ai 4.1.0 → 4.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -0
- package/dist/index.d.mts +81 -13
- package/dist/index.d.ts +81 -13
- package/dist/index.js +307 -244
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +264 -202
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -5
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
var __defProp = Object.defineProperty;
|
2
2
|
var __export = (target, all) => {
|
3
|
-
for (var
|
4
|
-
__defProp(target,
|
3
|
+
for (var name15 in all)
|
4
|
+
__defProp(target, name15, { get: all[name15], enumerable: true });
|
5
5
|
};
|
6
6
|
|
7
7
|
// core/index.ts
|
@@ -352,7 +352,7 @@ function getBaseTelemetryAttributes({
|
|
352
352
|
telemetry,
|
353
353
|
headers
|
354
354
|
}) {
|
355
|
-
var
|
355
|
+
var _a15;
|
356
356
|
return {
|
357
357
|
"ai.model.provider": model.provider,
|
358
358
|
"ai.model.id": model.modelId,
|
@@ -362,7 +362,7 @@ function getBaseTelemetryAttributes({
|
|
362
362
|
return attributes;
|
363
363
|
}, {}),
|
364
364
|
// add metadata as attributes:
|
365
|
-
...Object.entries((
|
365
|
+
...Object.entries((_a15 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a15 : {}).reduce(
|
366
366
|
(attributes, [key, value]) => {
|
367
367
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
368
368
|
return attributes;
|
@@ -387,7 +387,7 @@ var noopTracer = {
|
|
387
387
|
startSpan() {
|
388
388
|
return noopSpan;
|
389
389
|
},
|
390
|
-
startActiveSpan(
|
390
|
+
startActiveSpan(name15, arg1, arg2, arg3) {
|
391
391
|
if (typeof arg1 === "function") {
|
392
392
|
return arg1(noopSpan);
|
393
393
|
}
|
@@ -457,13 +457,13 @@ function getTracer({
|
|
457
457
|
// core/telemetry/record-span.ts
|
458
458
|
import { SpanStatusCode } from "@opentelemetry/api";
|
459
459
|
function recordSpan({
|
460
|
-
name:
|
460
|
+
name: name15,
|
461
461
|
tracer,
|
462
462
|
attributes,
|
463
463
|
fn,
|
464
464
|
endWhenDone = true
|
465
465
|
}) {
|
466
|
-
return tracer.startActiveSpan(
|
466
|
+
return tracer.startActiveSpan(name15, { attributes }, async (span) => {
|
467
467
|
try {
|
468
468
|
const result = await fn(span);
|
469
469
|
if (endWhenDone) {
|
@@ -571,14 +571,14 @@ async function embed({
|
|
571
571
|
}),
|
572
572
|
tracer,
|
573
573
|
fn: async (doEmbedSpan) => {
|
574
|
-
var
|
574
|
+
var _a15;
|
575
575
|
const modelResponse = await model.doEmbed({
|
576
576
|
values: [value],
|
577
577
|
abortSignal,
|
578
578
|
headers
|
579
579
|
});
|
580
580
|
const embedding2 = modelResponse.embeddings[0];
|
581
|
-
const usage2 = (
|
581
|
+
const usage2 = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
|
582
582
|
doEmbedSpan.setAttributes(
|
583
583
|
selectTelemetryAttributes({
|
584
584
|
telemetry,
|
@@ -688,14 +688,14 @@ async function embedMany({
|
|
688
688
|
}),
|
689
689
|
tracer,
|
690
690
|
fn: async (doEmbedSpan) => {
|
691
|
-
var
|
691
|
+
var _a15;
|
692
692
|
const modelResponse = await model.doEmbed({
|
693
693
|
values,
|
694
694
|
abortSignal,
|
695
695
|
headers
|
696
696
|
});
|
697
697
|
const embeddings3 = modelResponse.embeddings;
|
698
|
-
const usage2 = (
|
698
|
+
const usage2 = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
|
699
699
|
doEmbedSpan.setAttributes(
|
700
700
|
selectTelemetryAttributes({
|
701
701
|
telemetry,
|
@@ -747,14 +747,14 @@ async function embedMany({
|
|
747
747
|
}),
|
748
748
|
tracer,
|
749
749
|
fn: async (doEmbedSpan) => {
|
750
|
-
var
|
750
|
+
var _a15;
|
751
751
|
const modelResponse = await model.doEmbed({
|
752
752
|
values: chunk,
|
753
753
|
abortSignal,
|
754
754
|
headers
|
755
755
|
});
|
756
756
|
const embeddings2 = modelResponse.embeddings;
|
757
|
-
const usage2 = (
|
757
|
+
const usage2 = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
|
758
758
|
doEmbedSpan.setAttributes(
|
759
759
|
selectTelemetryAttributes({
|
760
760
|
telemetry,
|
@@ -805,6 +805,30 @@ import {
|
|
805
805
|
convertBase64ToUint8Array,
|
806
806
|
convertUint8ArrayToBase64
|
807
807
|
} from "@ai-sdk/provider-utils";
|
808
|
+
|
809
|
+
// errors/no-image-generated-error.ts
|
810
|
+
import { AISDKError as AISDKError3 } from "@ai-sdk/provider";
|
811
|
+
var name3 = "AI_NoImageGeneratedError";
|
812
|
+
var marker3 = `vercel.ai.error.${name3}`;
|
813
|
+
var symbol3 = Symbol.for(marker3);
|
814
|
+
var _a3;
|
815
|
+
var NoImageGeneratedError = class extends AISDKError3 {
|
816
|
+
constructor({
|
817
|
+
message = "No image generated.",
|
818
|
+
cause,
|
819
|
+
responses
|
820
|
+
}) {
|
821
|
+
super({ name: name3, message, cause });
|
822
|
+
this[_a3] = true;
|
823
|
+
this.responses = responses;
|
824
|
+
}
|
825
|
+
static isInstance(error) {
|
826
|
+
return AISDKError3.hasMarker(error, marker3);
|
827
|
+
}
|
828
|
+
};
|
829
|
+
_a3 = symbol3;
|
830
|
+
|
831
|
+
// core/generate-image/generate-image.ts
|
808
832
|
async function generateImage({
|
809
833
|
model,
|
810
834
|
prompt,
|
@@ -815,11 +839,14 @@ async function generateImage({
|
|
815
839
|
providerOptions,
|
816
840
|
maxRetries: maxRetriesArg,
|
817
841
|
abortSignal,
|
818
|
-
headers
|
842
|
+
headers,
|
843
|
+
_internal = {
|
844
|
+
currentDate: () => /* @__PURE__ */ new Date()
|
845
|
+
}
|
819
846
|
}) {
|
820
|
-
var
|
847
|
+
var _a15;
|
821
848
|
const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
822
|
-
const maxImagesPerCall = (
|
849
|
+
const maxImagesPerCall = (_a15 = model.maxImagesPerCall) != null ? _a15 : 1;
|
823
850
|
const callCount = Math.ceil(n / maxImagesPerCall);
|
824
851
|
const callImageCounts = Array.from({ length: callCount }, (_, i) => {
|
825
852
|
if (i < callCount - 1) {
|
@@ -846,18 +873,24 @@ async function generateImage({
|
|
846
873
|
);
|
847
874
|
const images = [];
|
848
875
|
const warnings = [];
|
876
|
+
const responses = [];
|
849
877
|
for (const result of results) {
|
850
878
|
images.push(
|
851
879
|
...result.images.map((image) => new DefaultGeneratedImage({ image }))
|
852
880
|
);
|
853
881
|
warnings.push(...result.warnings);
|
882
|
+
responses.push(result.response);
|
854
883
|
}
|
855
|
-
|
884
|
+
if (!images.length) {
|
885
|
+
throw new NoImageGeneratedError({ responses });
|
886
|
+
}
|
887
|
+
return new DefaultGenerateImageResult({ images, warnings, responses });
|
856
888
|
}
|
857
889
|
var DefaultGenerateImageResult = class {
|
858
890
|
constructor(options) {
|
859
891
|
this.images = options.images;
|
860
892
|
this.warnings = options.warnings;
|
893
|
+
this.responses = options.responses;
|
861
894
|
}
|
862
895
|
get image() {
|
863
896
|
return this.images[0];
|
@@ -889,12 +922,12 @@ var DefaultGeneratedImage = class {
|
|
889
922
|
import { createIdGenerator, safeParseJSON } from "@ai-sdk/provider-utils";
|
890
923
|
|
891
924
|
// errors/no-object-generated-error.ts
|
892
|
-
import { AISDKError as
|
893
|
-
var
|
894
|
-
var
|
895
|
-
var
|
896
|
-
var
|
897
|
-
var NoObjectGeneratedError = class extends
|
925
|
+
import { AISDKError as AISDKError4 } from "@ai-sdk/provider";
|
926
|
+
var name4 = "AI_NoObjectGeneratedError";
|
927
|
+
var marker4 = `vercel.ai.error.${name4}`;
|
928
|
+
var symbol4 = Symbol.for(marker4);
|
929
|
+
var _a4;
|
930
|
+
var NoObjectGeneratedError = class extends AISDKError4 {
|
898
931
|
constructor({
|
899
932
|
message = "No object generated.",
|
900
933
|
cause,
|
@@ -902,25 +935,25 @@ var NoObjectGeneratedError = class extends AISDKError3 {
|
|
902
935
|
response,
|
903
936
|
usage
|
904
937
|
}) {
|
905
|
-
super({ name:
|
906
|
-
this[
|
938
|
+
super({ name: name4, message, cause });
|
939
|
+
this[_a4] = true;
|
907
940
|
this.text = text2;
|
908
941
|
this.response = response;
|
909
942
|
this.usage = usage;
|
910
943
|
}
|
911
944
|
static isInstance(error) {
|
912
|
-
return
|
945
|
+
return AISDKError4.hasMarker(error, marker4);
|
913
946
|
}
|
914
947
|
};
|
915
|
-
|
948
|
+
_a4 = symbol4;
|
916
949
|
|
917
950
|
// util/download-error.ts
|
918
|
-
import { AISDKError as
|
919
|
-
var
|
920
|
-
var
|
921
|
-
var
|
922
|
-
var
|
923
|
-
var DownloadError = class extends
|
951
|
+
import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
|
952
|
+
var name5 = "AI_DownloadError";
|
953
|
+
var marker5 = `vercel.ai.error.${name5}`;
|
954
|
+
var symbol5 = Symbol.for(marker5);
|
955
|
+
var _a5;
|
956
|
+
var DownloadError = class extends AISDKError5 {
|
924
957
|
constructor({
|
925
958
|
url,
|
926
959
|
statusCode,
|
@@ -928,24 +961,24 @@ var DownloadError = class extends AISDKError4 {
|
|
928
961
|
cause,
|
929
962
|
message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
|
930
963
|
}) {
|
931
|
-
super({ name:
|
932
|
-
this[
|
964
|
+
super({ name: name5, message, cause });
|
965
|
+
this[_a5] = true;
|
933
966
|
this.url = url;
|
934
967
|
this.statusCode = statusCode;
|
935
968
|
this.statusText = statusText;
|
936
969
|
}
|
937
970
|
static isInstance(error) {
|
938
|
-
return
|
971
|
+
return AISDKError5.hasMarker(error, marker5);
|
939
972
|
}
|
940
973
|
};
|
941
|
-
|
974
|
+
_a5 = symbol5;
|
942
975
|
|
943
976
|
// util/download.ts
|
944
977
|
async function download({
|
945
978
|
url,
|
946
979
|
fetchImplementation = fetch
|
947
980
|
}) {
|
948
|
-
var
|
981
|
+
var _a15;
|
949
982
|
const urlText = url.toString();
|
950
983
|
try {
|
951
984
|
const response = await fetchImplementation(urlText);
|
@@ -958,7 +991,7 @@ async function download({
|
|
958
991
|
}
|
959
992
|
return {
|
960
993
|
data: new Uint8Array(await response.arrayBuffer()),
|
961
|
-
mimeType: (
|
994
|
+
mimeType: (_a15 = response.headers.get("content-type")) != null ? _a15 : void 0
|
962
995
|
};
|
963
996
|
} catch (error) {
|
964
997
|
if (DownloadError.isInstance(error)) {
|
@@ -991,26 +1024,26 @@ import {
|
|
991
1024
|
} from "@ai-sdk/provider-utils";
|
992
1025
|
|
993
1026
|
// core/prompt/invalid-data-content-error.ts
|
994
|
-
import { AISDKError as
|
995
|
-
var
|
996
|
-
var
|
997
|
-
var
|
998
|
-
var
|
999
|
-
var InvalidDataContentError = class extends
|
1027
|
+
import { AISDKError as AISDKError6 } from "@ai-sdk/provider";
|
1028
|
+
var name6 = "AI_InvalidDataContentError";
|
1029
|
+
var marker6 = `vercel.ai.error.${name6}`;
|
1030
|
+
var symbol6 = Symbol.for(marker6);
|
1031
|
+
var _a6;
|
1032
|
+
var InvalidDataContentError = class extends AISDKError6 {
|
1000
1033
|
constructor({
|
1001
1034
|
content,
|
1002
1035
|
cause,
|
1003
1036
|
message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
|
1004
1037
|
}) {
|
1005
|
-
super({ name:
|
1006
|
-
this[
|
1038
|
+
super({ name: name6, message, cause });
|
1039
|
+
this[_a6] = true;
|
1007
1040
|
this.content = content;
|
1008
1041
|
}
|
1009
1042
|
static isInstance(error) {
|
1010
|
-
return
|
1043
|
+
return AISDKError6.hasMarker(error, marker6);
|
1011
1044
|
}
|
1012
1045
|
};
|
1013
|
-
|
1046
|
+
_a6 = symbol6;
|
1014
1047
|
|
1015
1048
|
// core/prompt/data-content.ts
|
1016
1049
|
import { z } from "zod";
|
@@ -1021,8 +1054,8 @@ var dataContentSchema = z.union([
|
|
1021
1054
|
z.custom(
|
1022
1055
|
// Buffer might not be available in some environments such as CloudFlare:
|
1023
1056
|
(value) => {
|
1024
|
-
var
|
1025
|
-
return (_b = (
|
1057
|
+
var _a15, _b;
|
1058
|
+
return (_b = (_a15 = globalThis.Buffer) == null ? void 0 : _a15.isBuffer(value)) != null ? _b : false;
|
1026
1059
|
},
|
1027
1060
|
{ message: "Must be a Buffer" }
|
1028
1061
|
)
|
@@ -1065,25 +1098,25 @@ function convertUint8ArrayToText(uint8Array) {
|
|
1065
1098
|
}
|
1066
1099
|
|
1067
1100
|
// core/prompt/invalid-message-role-error.ts
|
1068
|
-
import { AISDKError as
|
1069
|
-
var
|
1070
|
-
var
|
1071
|
-
var
|
1072
|
-
var
|
1073
|
-
var InvalidMessageRoleError = class extends
|
1101
|
+
import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
|
1102
|
+
var name7 = "AI_InvalidMessageRoleError";
|
1103
|
+
var marker7 = `vercel.ai.error.${name7}`;
|
1104
|
+
var symbol7 = Symbol.for(marker7);
|
1105
|
+
var _a7;
|
1106
|
+
var InvalidMessageRoleError = class extends AISDKError7 {
|
1074
1107
|
constructor({
|
1075
1108
|
role,
|
1076
1109
|
message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
|
1077
1110
|
}) {
|
1078
|
-
super({ name:
|
1079
|
-
this[
|
1111
|
+
super({ name: name7, message });
|
1112
|
+
this[_a7] = true;
|
1080
1113
|
this.role = role;
|
1081
1114
|
}
|
1082
1115
|
static isInstance(error) {
|
1083
|
-
return
|
1116
|
+
return AISDKError7.hasMarker(error, marker7);
|
1084
1117
|
}
|
1085
1118
|
};
|
1086
|
-
|
1119
|
+
_a7 = symbol7;
|
1087
1120
|
|
1088
1121
|
// core/prompt/split-data-url.ts
|
1089
1122
|
function splitDataUrl(dataUrl) {
|
@@ -1213,7 +1246,7 @@ async function downloadAssets(messages, downloadImplementation, modelSupportsIma
|
|
1213
1246
|
);
|
1214
1247
|
}
|
1215
1248
|
function convertPartToLanguageModelPart(part, downloadedAssets) {
|
1216
|
-
var
|
1249
|
+
var _a15;
|
1217
1250
|
if (part.type === "text") {
|
1218
1251
|
return {
|
1219
1252
|
type: "text",
|
@@ -1266,7 +1299,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1266
1299
|
switch (type) {
|
1267
1300
|
case "image": {
|
1268
1301
|
if (normalizedData instanceof Uint8Array) {
|
1269
|
-
mimeType = (
|
1302
|
+
mimeType = (_a15 = detectImageMimeType(normalizedData)) != null ? _a15 : mimeType;
|
1270
1303
|
}
|
1271
1304
|
return {
|
1272
1305
|
type: "image",
|
@@ -1533,7 +1566,7 @@ function detectSingleMessageCharacteristics(message) {
|
|
1533
1566
|
|
1534
1567
|
// core/prompt/attachments-to-parts.ts
|
1535
1568
|
function attachmentsToParts(attachments) {
|
1536
|
-
var
|
1569
|
+
var _a15, _b, _c;
|
1537
1570
|
const parts = [];
|
1538
1571
|
for (const attachment of attachments) {
|
1539
1572
|
let url;
|
@@ -1545,7 +1578,7 @@ function attachmentsToParts(attachments) {
|
|
1545
1578
|
switch (url.protocol) {
|
1546
1579
|
case "http:":
|
1547
1580
|
case "https:": {
|
1548
|
-
if ((
|
1581
|
+
if ((_a15 = attachment.contentType) == null ? void 0 : _a15.startsWith("image/")) {
|
1549
1582
|
parts.push({ type: "image", image: url });
|
1550
1583
|
} else {
|
1551
1584
|
if (!attachment.contentType) {
|
@@ -1609,30 +1642,30 @@ function attachmentsToParts(attachments) {
|
|
1609
1642
|
}
|
1610
1643
|
|
1611
1644
|
// core/prompt/message-conversion-error.ts
|
1612
|
-
import { AISDKError as
|
1613
|
-
var
|
1614
|
-
var
|
1615
|
-
var
|
1616
|
-
var
|
1617
|
-
var MessageConversionError = class extends
|
1645
|
+
import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
|
1646
|
+
var name8 = "AI_MessageConversionError";
|
1647
|
+
var marker8 = `vercel.ai.error.${name8}`;
|
1648
|
+
var symbol8 = Symbol.for(marker8);
|
1649
|
+
var _a8;
|
1650
|
+
var MessageConversionError = class extends AISDKError8 {
|
1618
1651
|
constructor({
|
1619
1652
|
originalMessage,
|
1620
1653
|
message
|
1621
1654
|
}) {
|
1622
|
-
super({ name:
|
1623
|
-
this[
|
1655
|
+
super({ name: name8, message });
|
1656
|
+
this[_a8] = true;
|
1624
1657
|
this.originalMessage = originalMessage;
|
1625
1658
|
}
|
1626
1659
|
static isInstance(error) {
|
1627
|
-
return
|
1660
|
+
return AISDKError8.hasMarker(error, marker8);
|
1628
1661
|
}
|
1629
1662
|
};
|
1630
|
-
|
1663
|
+
_a8 = symbol8;
|
1631
1664
|
|
1632
1665
|
// core/prompt/convert-to-core-messages.ts
|
1633
1666
|
function convertToCoreMessages(messages, options) {
|
1634
|
-
var
|
1635
|
-
const tools = (
|
1667
|
+
var _a15;
|
1668
|
+
const tools = (_a15 = options == null ? void 0 : options.tools) != null ? _a15 : {};
|
1636
1669
|
const coreMessages = [];
|
1637
1670
|
for (const message of messages) {
|
1638
1671
|
const { role, content, toolInvocations, experimental_attachments } = message;
|
@@ -1914,7 +1947,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1914
1947
|
additionalProperties: false
|
1915
1948
|
},
|
1916
1949
|
validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {
|
1917
|
-
var
|
1950
|
+
var _a15;
|
1918
1951
|
if (!isJSONObject(value) || !isJSONArray(value.elements)) {
|
1919
1952
|
return {
|
1920
1953
|
success: false,
|
@@ -1937,7 +1970,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1937
1970
|
}
|
1938
1971
|
resultArray.push(result.value);
|
1939
1972
|
}
|
1940
|
-
const publishedElementCount = (
|
1973
|
+
const publishedElementCount = (_a15 = latestObject == null ? void 0 : latestObject.length) != null ? _a15 : 0;
|
1941
1974
|
let textDelta = "";
|
1942
1975
|
if (isFirstDelta) {
|
1943
1976
|
textDelta += "[";
|
@@ -2275,7 +2308,7 @@ async function generateObject({
|
|
2275
2308
|
}),
|
2276
2309
|
tracer,
|
2277
2310
|
fn: async (span) => {
|
2278
|
-
var
|
2311
|
+
var _a15, _b;
|
2279
2312
|
if (mode === "auto" || mode == null) {
|
2280
2313
|
mode = model.defaultObjectGenerationMode;
|
2281
2314
|
}
|
@@ -2337,7 +2370,7 @@ async function generateObject({
|
|
2337
2370
|
}),
|
2338
2371
|
tracer,
|
2339
2372
|
fn: async (span2) => {
|
2340
|
-
var
|
2373
|
+
var _a16, _b2, _c, _d, _e, _f;
|
2341
2374
|
const result2 = await model.doGenerate({
|
2342
2375
|
mode: {
|
2343
2376
|
type: "object-json",
|
@@ -2353,7 +2386,7 @@ async function generateObject({
|
|
2353
2386
|
headers
|
2354
2387
|
});
|
2355
2388
|
const responseData = {
|
2356
|
-
id: (_b2 = (
|
2389
|
+
id: (_b2 = (_a16 = result2.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId3(),
|
2357
2390
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
2358
2391
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
|
2359
2392
|
};
|
@@ -2395,7 +2428,7 @@ async function generateObject({
|
|
2395
2428
|
rawResponse = generateResult.rawResponse;
|
2396
2429
|
logprobs = generateResult.logprobs;
|
2397
2430
|
resultProviderMetadata = generateResult.providerMetadata;
|
2398
|
-
request = (
|
2431
|
+
request = (_a15 = generateResult.request) != null ? _a15 : {};
|
2399
2432
|
response = generateResult.responseData;
|
2400
2433
|
break;
|
2401
2434
|
}
|
@@ -2441,7 +2474,7 @@ async function generateObject({
|
|
2441
2474
|
}),
|
2442
2475
|
tracer,
|
2443
2476
|
fn: async (span2) => {
|
2444
|
-
var
|
2477
|
+
var _a16, _b2, _c, _d, _e, _f, _g, _h;
|
2445
2478
|
const result2 = await model.doGenerate({
|
2446
2479
|
mode: {
|
2447
2480
|
type: "object-tool",
|
@@ -2459,7 +2492,7 @@ async function generateObject({
|
|
2459
2492
|
abortSignal,
|
2460
2493
|
headers
|
2461
2494
|
});
|
2462
|
-
const objectText = (_b2 = (
|
2495
|
+
const objectText = (_b2 = (_a16 = result2.toolCalls) == null ? void 0 : _a16[0]) == null ? void 0 : _b2.args;
|
2463
2496
|
const responseData = {
|
2464
2497
|
id: (_d = (_c = result2.response) == null ? void 0 : _c.id) != null ? _d : generateId3(),
|
2465
2498
|
timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
|
@@ -2585,9 +2618,9 @@ var DefaultGenerateObjectResult = class {
|
|
2585
2618
|
this.logprobs = options.logprobs;
|
2586
2619
|
}
|
2587
2620
|
toJsonResponse(init) {
|
2588
|
-
var
|
2621
|
+
var _a15;
|
2589
2622
|
return new Response(JSON.stringify(this.object), {
|
2590
|
-
status: (
|
2623
|
+
status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
|
2591
2624
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2592
2625
|
contentType: "application/json; charset=utf-8"
|
2593
2626
|
})
|
@@ -2625,17 +2658,17 @@ var DelayedPromise = class {
|
|
2625
2658
|
return this.promise;
|
2626
2659
|
}
|
2627
2660
|
resolve(value) {
|
2628
|
-
var
|
2661
|
+
var _a15;
|
2629
2662
|
this.status = { type: "resolved", value };
|
2630
2663
|
if (this.promise) {
|
2631
|
-
(
|
2664
|
+
(_a15 = this._resolve) == null ? void 0 : _a15.call(this, value);
|
2632
2665
|
}
|
2633
2666
|
}
|
2634
2667
|
reject(error) {
|
2635
|
-
var
|
2668
|
+
var _a15;
|
2636
2669
|
this.status = { type: "rejected", error };
|
2637
2670
|
if (this.promise) {
|
2638
|
-
(
|
2671
|
+
(_a15 = this._reject) == null ? void 0 : _a15.call(this, error);
|
2639
2672
|
}
|
2640
2673
|
}
|
2641
2674
|
};
|
@@ -2739,8 +2772,8 @@ function createStitchableStream() {
|
|
2739
2772
|
|
2740
2773
|
// core/util/now.ts
|
2741
2774
|
function now() {
|
2742
|
-
var
|
2743
|
-
return (_b = (
|
2775
|
+
var _a15, _b;
|
2776
|
+
return (_b = (_a15 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a15.now()) != null ? _b : Date.now();
|
2744
2777
|
}
|
2745
2778
|
|
2746
2779
|
// core/generate-object/stream-object.ts
|
@@ -3029,7 +3062,7 @@ var DefaultStreamObjectResult = class {
|
|
3029
3062
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
3030
3063
|
new TransformStream({
|
3031
3064
|
async transform(chunk, controller) {
|
3032
|
-
var
|
3065
|
+
var _a15, _b, _c;
|
3033
3066
|
if (isFirstChunk) {
|
3034
3067
|
const msToFirstChunk = now2() - startTimestampMs;
|
3035
3068
|
isFirstChunk = false;
|
@@ -3075,7 +3108,7 @@ var DefaultStreamObjectResult = class {
|
|
3075
3108
|
switch (chunk.type) {
|
3076
3109
|
case "response-metadata": {
|
3077
3110
|
response = {
|
3078
|
-
id: (
|
3111
|
+
id: (_a15 = chunk.id) != null ? _a15 : response.id,
|
3079
3112
|
timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
|
3080
3113
|
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
3081
3114
|
};
|
@@ -3289,9 +3322,9 @@ var DefaultStreamObjectResult = class {
|
|
3289
3322
|
});
|
3290
3323
|
}
|
3291
3324
|
toTextStreamResponse(init) {
|
3292
|
-
var
|
3325
|
+
var _a15;
|
3293
3326
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3294
|
-
status: (
|
3327
|
+
status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
|
3295
3328
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3296
3329
|
contentType: "text/plain; charset=utf-8"
|
3297
3330
|
})
|
@@ -3303,30 +3336,30 @@ var DefaultStreamObjectResult = class {
|
|
3303
3336
|
import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils";
|
3304
3337
|
|
3305
3338
|
// errors/no-output-specified-error.ts
|
3306
|
-
import { AISDKError as
|
3307
|
-
var
|
3308
|
-
var
|
3309
|
-
var
|
3310
|
-
var
|
3311
|
-
var NoOutputSpecifiedError = class extends
|
3339
|
+
import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
|
3340
|
+
var name9 = "AI_NoOutputSpecifiedError";
|
3341
|
+
var marker9 = `vercel.ai.error.${name9}`;
|
3342
|
+
var symbol9 = Symbol.for(marker9);
|
3343
|
+
var _a9;
|
3344
|
+
var NoOutputSpecifiedError = class extends AISDKError9 {
|
3312
3345
|
// used in isInstance
|
3313
3346
|
constructor({ message = "No output specified." } = {}) {
|
3314
|
-
super({ name:
|
3315
|
-
this[
|
3347
|
+
super({ name: name9, message });
|
3348
|
+
this[_a9] = true;
|
3316
3349
|
}
|
3317
3350
|
static isInstance(error) {
|
3318
|
-
return
|
3351
|
+
return AISDKError9.hasMarker(error, marker9);
|
3319
3352
|
}
|
3320
3353
|
};
|
3321
|
-
|
3354
|
+
_a9 = symbol9;
|
3322
3355
|
|
3323
3356
|
// errors/tool-execution-error.ts
|
3324
|
-
import { AISDKError as
|
3325
|
-
var
|
3326
|
-
var
|
3327
|
-
var
|
3328
|
-
var
|
3329
|
-
var ToolExecutionError = class extends
|
3357
|
+
import { AISDKError as AISDKError10, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
|
3358
|
+
var name10 = "AI_ToolExecutionError";
|
3359
|
+
var marker10 = `vercel.ai.error.${name10}`;
|
3360
|
+
var symbol10 = Symbol.for(marker10);
|
3361
|
+
var _a10;
|
3362
|
+
var ToolExecutionError = class extends AISDKError10 {
|
3330
3363
|
constructor({
|
3331
3364
|
toolArgs,
|
3332
3365
|
toolName,
|
@@ -3334,17 +3367,17 @@ var ToolExecutionError = class extends AISDKError9 {
|
|
3334
3367
|
cause,
|
3335
3368
|
message = `Error executing tool ${toolName}: ${getErrorMessage2(cause)}`
|
3336
3369
|
}) {
|
3337
|
-
super({ name:
|
3338
|
-
this[
|
3370
|
+
super({ name: name10, message, cause });
|
3371
|
+
this[_a10] = true;
|
3339
3372
|
this.toolArgs = toolArgs;
|
3340
3373
|
this.toolName = toolName;
|
3341
3374
|
this.toolCallId = toolCallId;
|
3342
3375
|
}
|
3343
3376
|
static isInstance(error) {
|
3344
|
-
return
|
3377
|
+
return AISDKError10.hasMarker(error, marker10);
|
3345
3378
|
}
|
3346
3379
|
};
|
3347
|
-
|
3380
|
+
_a10 = symbol10;
|
3348
3381
|
|
3349
3382
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
3350
3383
|
import { asSchema as asSchema2 } from "@ai-sdk/ui-utils";
|
@@ -3367,24 +3400,24 @@ function prepareToolsAndToolChoice({
|
|
3367
3400
|
};
|
3368
3401
|
}
|
3369
3402
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
3370
|
-
([
|
3403
|
+
([name15]) => activeTools.includes(name15)
|
3371
3404
|
) : Object.entries(tools);
|
3372
3405
|
return {
|
3373
|
-
tools: filteredTools.map(([
|
3406
|
+
tools: filteredTools.map(([name15, tool2]) => {
|
3374
3407
|
const toolType = tool2.type;
|
3375
3408
|
switch (toolType) {
|
3376
3409
|
case void 0:
|
3377
3410
|
case "function":
|
3378
3411
|
return {
|
3379
3412
|
type: "function",
|
3380
|
-
name:
|
3413
|
+
name: name15,
|
3381
3414
|
description: tool2.description,
|
3382
3415
|
parameters: asSchema2(tool2.parameters).jsonSchema
|
3383
3416
|
};
|
3384
3417
|
case "provider-defined":
|
3385
3418
|
return {
|
3386
3419
|
type: "provider-defined",
|
3387
|
-
name:
|
3420
|
+
name: name15,
|
3388
3421
|
id: tool2.id,
|
3389
3422
|
args: tool2.args
|
3390
3423
|
};
|
@@ -3416,12 +3449,12 @@ import { safeParseJSON as safeParseJSON2, safeValidateTypes as safeValidateTypes
|
|
3416
3449
|
import { asSchema as asSchema3 } from "@ai-sdk/ui-utils";
|
3417
3450
|
|
3418
3451
|
// errors/invalid-tool-arguments-error.ts
|
3419
|
-
import { AISDKError as
|
3420
|
-
var
|
3421
|
-
var
|
3422
|
-
var
|
3423
|
-
var
|
3424
|
-
var InvalidToolArgumentsError = class extends
|
3452
|
+
import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
|
3453
|
+
var name11 = "AI_InvalidToolArgumentsError";
|
3454
|
+
var marker11 = `vercel.ai.error.${name11}`;
|
3455
|
+
var symbol11 = Symbol.for(marker11);
|
3456
|
+
var _a11;
|
3457
|
+
var InvalidToolArgumentsError = class extends AISDKError11 {
|
3425
3458
|
constructor({
|
3426
3459
|
toolArgs,
|
3427
3460
|
toolName,
|
@@ -3430,61 +3463,61 @@ var InvalidToolArgumentsError = class extends AISDKError10 {
|
|
3430
3463
|
cause
|
3431
3464
|
)}`
|
3432
3465
|
}) {
|
3433
|
-
super({ name:
|
3434
|
-
this[
|
3466
|
+
super({ name: name11, message, cause });
|
3467
|
+
this[_a11] = true;
|
3435
3468
|
this.toolArgs = toolArgs;
|
3436
3469
|
this.toolName = toolName;
|
3437
3470
|
}
|
3438
3471
|
static isInstance(error) {
|
3439
|
-
return
|
3472
|
+
return AISDKError11.hasMarker(error, marker11);
|
3440
3473
|
}
|
3441
3474
|
};
|
3442
|
-
|
3475
|
+
_a11 = symbol11;
|
3443
3476
|
|
3444
3477
|
// errors/no-such-tool-error.ts
|
3445
|
-
import { AISDKError as
|
3446
|
-
var
|
3447
|
-
var
|
3448
|
-
var
|
3449
|
-
var
|
3450
|
-
var NoSuchToolError = class extends
|
3478
|
+
import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
|
3479
|
+
var name12 = "AI_NoSuchToolError";
|
3480
|
+
var marker12 = `vercel.ai.error.${name12}`;
|
3481
|
+
var symbol12 = Symbol.for(marker12);
|
3482
|
+
var _a12;
|
3483
|
+
var NoSuchToolError = class extends AISDKError12 {
|
3451
3484
|
constructor({
|
3452
3485
|
toolName,
|
3453
3486
|
availableTools = void 0,
|
3454
3487
|
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
3455
3488
|
}) {
|
3456
|
-
super({ name:
|
3457
|
-
this[
|
3489
|
+
super({ name: name12, message });
|
3490
|
+
this[_a12] = true;
|
3458
3491
|
this.toolName = toolName;
|
3459
3492
|
this.availableTools = availableTools;
|
3460
3493
|
}
|
3461
3494
|
static isInstance(error) {
|
3462
|
-
return
|
3495
|
+
return AISDKError12.hasMarker(error, marker12);
|
3463
3496
|
}
|
3464
3497
|
};
|
3465
|
-
|
3498
|
+
_a12 = symbol12;
|
3466
3499
|
|
3467
3500
|
// errors/tool-call-repair-error.ts
|
3468
|
-
import { AISDKError as
|
3469
|
-
var
|
3470
|
-
var
|
3471
|
-
var
|
3472
|
-
var
|
3473
|
-
var ToolCallRepairError = class extends
|
3501
|
+
import { AISDKError as AISDKError13, getErrorMessage as getErrorMessage4 } from "@ai-sdk/provider";
|
3502
|
+
var name13 = "AI_ToolCallRepairError";
|
3503
|
+
var marker13 = `vercel.ai.error.${name13}`;
|
3504
|
+
var symbol13 = Symbol.for(marker13);
|
3505
|
+
var _a13;
|
3506
|
+
var ToolCallRepairError = class extends AISDKError13 {
|
3474
3507
|
constructor({
|
3475
3508
|
cause,
|
3476
3509
|
originalError,
|
3477
3510
|
message = `Error repairing tool call: ${getErrorMessage4(cause)}`
|
3478
3511
|
}) {
|
3479
|
-
super({ name:
|
3480
|
-
this[
|
3512
|
+
super({ name: name13, message, cause });
|
3513
|
+
this[_a13] = true;
|
3481
3514
|
this.originalError = originalError;
|
3482
3515
|
}
|
3483
3516
|
static isInstance(error) {
|
3484
|
-
return
|
3517
|
+
return AISDKError13.hasMarker(error, marker13);
|
3485
3518
|
}
|
3486
3519
|
};
|
3487
|
-
|
3520
|
+
_a13 = symbol13;
|
3488
3521
|
|
3489
3522
|
// core/generate-text/parse-tool-call.ts
|
3490
3523
|
async function parseToolCall({
|
@@ -3629,7 +3662,7 @@ async function generateText({
|
|
3629
3662
|
onStepFinish,
|
3630
3663
|
...settings
|
3631
3664
|
}) {
|
3632
|
-
var
|
3665
|
+
var _a15;
|
3633
3666
|
if (maxSteps < 1) {
|
3634
3667
|
throw new InvalidArgumentError({
|
3635
3668
|
parameter: "maxSteps",
|
@@ -3646,7 +3679,7 @@ async function generateText({
|
|
3646
3679
|
});
|
3647
3680
|
const initialPrompt = standardizePrompt({
|
3648
3681
|
prompt: {
|
3649
|
-
system: (
|
3682
|
+
system: (_a15 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a15 : system,
|
3650
3683
|
prompt,
|
3651
3684
|
messages
|
3652
3685
|
},
|
@@ -3672,7 +3705,7 @@ async function generateText({
|
|
3672
3705
|
}),
|
3673
3706
|
tracer,
|
3674
3707
|
fn: async (span) => {
|
3675
|
-
var
|
3708
|
+
var _a16, _b, _c, _d, _e, _f;
|
3676
3709
|
const mode = {
|
3677
3710
|
type: "regular",
|
3678
3711
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -3724,8 +3757,8 @@ async function generateText({
|
|
3724
3757
|
"ai.prompt.tools": {
|
3725
3758
|
// convert the language model level tools:
|
3726
3759
|
input: () => {
|
3727
|
-
var
|
3728
|
-
return (
|
3760
|
+
var _a17;
|
3761
|
+
return (_a17 = mode.tools) == null ? void 0 : _a17.map((tool2) => JSON.stringify(tool2));
|
3729
3762
|
}
|
3730
3763
|
},
|
3731
3764
|
"ai.prompt.toolChoice": {
|
@@ -3745,7 +3778,7 @@ async function generateText({
|
|
3745
3778
|
}),
|
3746
3779
|
tracer,
|
3747
3780
|
fn: async (span2) => {
|
3748
|
-
var
|
3781
|
+
var _a17, _b2, _c2, _d2, _e2, _f2;
|
3749
3782
|
const result = await model.doGenerate({
|
3750
3783
|
mode,
|
3751
3784
|
...callSettings,
|
@@ -3757,7 +3790,7 @@ async function generateText({
|
|
3757
3790
|
headers
|
3758
3791
|
});
|
3759
3792
|
const responseData = {
|
3760
|
-
id: (_b2 = (
|
3793
|
+
id: (_b2 = (_a17 = result.response) == null ? void 0 : _a17.id) != null ? _b2 : generateId3(),
|
3761
3794
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
3762
3795
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
3763
3796
|
};
|
@@ -3791,7 +3824,7 @@ async function generateText({
|
|
3791
3824
|
})
|
3792
3825
|
);
|
3793
3826
|
currentToolCalls = await Promise.all(
|
3794
|
-
((
|
3827
|
+
((_a16 = currentModelResponse.toolCalls) != null ? _a16 : []).map(
|
3795
3828
|
(toolCall) => parseToolCall({
|
3796
3829
|
toolCall,
|
3797
3830
|
tools,
|
@@ -3856,6 +3889,7 @@ async function generateText({
|
|
3856
3889
|
const currentStepResult = {
|
3857
3890
|
stepType,
|
3858
3891
|
text: stepText,
|
3892
|
+
reasoning: currentModelResponse.reasoning,
|
3859
3893
|
toolCalls: currentToolCalls,
|
3860
3894
|
toolResults: currentToolResults,
|
3861
3895
|
finishReason: currentModelResponse.finishReason,
|
@@ -3894,6 +3928,7 @@ async function generateText({
|
|
3894
3928
|
);
|
3895
3929
|
return new DefaultGenerateTextResult({
|
3896
3930
|
text: text2,
|
3931
|
+
reasoning: currentModelResponse.reasoning,
|
3897
3932
|
outputResolver: () => {
|
3898
3933
|
if (output == null) {
|
3899
3934
|
throw new NoOutputSpecifiedError();
|
@@ -3999,6 +4034,7 @@ async function executeTools({
|
|
3999
4034
|
var DefaultGenerateTextResult = class {
|
4000
4035
|
constructor(options) {
|
4001
4036
|
this.text = options.text;
|
4037
|
+
this.reasoning = options.reasoning;
|
4002
4038
|
this.toolCalls = options.toolCalls;
|
4003
4039
|
this.toolResults = options.toolResults;
|
4004
4040
|
this.finishReason = options.finishReason;
|
@@ -4030,7 +4066,7 @@ import {
|
|
4030
4066
|
|
4031
4067
|
// errors/index.ts
|
4032
4068
|
import {
|
4033
|
-
AISDKError as
|
4069
|
+
AISDKError as AISDKError14,
|
4034
4070
|
APICallError as APICallError2,
|
4035
4071
|
EmptyResponseBodyError,
|
4036
4072
|
InvalidPromptError as InvalidPromptError2,
|
@@ -4300,6 +4336,7 @@ function runToolsTransformation({
|
|
4300
4336
|
const chunkType = chunk.type;
|
4301
4337
|
switch (chunkType) {
|
4302
4338
|
case "text-delta":
|
4339
|
+
case "reasoning":
|
4303
4340
|
case "response-metadata":
|
4304
4341
|
case "error": {
|
4305
4342
|
controller.enqueue(chunk);
|
@@ -4479,7 +4516,8 @@ function streamText({
|
|
4479
4516
|
experimental_continueSteps: continueSteps = false,
|
4480
4517
|
experimental_telemetry: telemetry,
|
4481
4518
|
experimental_providerMetadata: providerMetadata,
|
4482
|
-
experimental_toolCallStreaming
|
4519
|
+
experimental_toolCallStreaming = false,
|
4520
|
+
toolCallStreaming = experimental_toolCallStreaming,
|
4483
4521
|
experimental_activeTools: activeTools,
|
4484
4522
|
experimental_repairToolCall: repairToolCall,
|
4485
4523
|
experimental_transform: transform,
|
@@ -4607,12 +4645,13 @@ var DefaultStreamTextResult = class {
|
|
4607
4645
|
this.finishReasonPromise = new DelayedPromise();
|
4608
4646
|
this.providerMetadataPromise = new DelayedPromise();
|
4609
4647
|
this.textPromise = new DelayedPromise();
|
4648
|
+
this.reasoningPromise = new DelayedPromise();
|
4610
4649
|
this.toolCallsPromise = new DelayedPromise();
|
4611
4650
|
this.toolResultsPromise = new DelayedPromise();
|
4612
4651
|
this.requestPromise = new DelayedPromise();
|
4613
4652
|
this.responsePromise = new DelayedPromise();
|
4614
4653
|
this.stepsPromise = new DelayedPromise();
|
4615
|
-
var
|
4654
|
+
var _a15;
|
4616
4655
|
if (maxSteps < 1) {
|
4617
4656
|
throw new InvalidArgumentError({
|
4618
4657
|
parameter: "maxSteps",
|
@@ -4624,6 +4663,7 @@ var DefaultStreamTextResult = class {
|
|
4624
4663
|
let recordedStepText = "";
|
4625
4664
|
let recordedContinuationText = "";
|
4626
4665
|
let recordedFullText = "";
|
4666
|
+
let recordedReasoningText = void 0;
|
4627
4667
|
const recordedResponse = {
|
4628
4668
|
id: generateId3(),
|
4629
4669
|
timestamp: currentDate(),
|
@@ -4641,7 +4681,7 @@ var DefaultStreamTextResult = class {
|
|
4641
4681
|
async transform(chunk, controller) {
|
4642
4682
|
controller.enqueue(chunk);
|
4643
4683
|
const { part } = chunk;
|
4644
|
-
if (part.type === "text-delta" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
|
4684
|
+
if (part.type === "text-delta" || part.type === "reasoning" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
|
4645
4685
|
await (onChunk == null ? void 0 : onChunk({ chunk: part }));
|
4646
4686
|
}
|
4647
4687
|
if (part.type === "text-delta") {
|
@@ -4649,6 +4689,9 @@ var DefaultStreamTextResult = class {
|
|
4649
4689
|
recordedContinuationText += part.textDelta;
|
4650
4690
|
recordedFullText += part.textDelta;
|
4651
4691
|
}
|
4692
|
+
if (part.type === "reasoning") {
|
4693
|
+
recordedReasoningText = (recordedReasoningText != null ? recordedReasoningText : "") + part.textDelta;
|
4694
|
+
}
|
4652
4695
|
if (part.type === "tool-call") {
|
4653
4696
|
recordedToolCalls.push(part);
|
4654
4697
|
}
|
@@ -4681,6 +4724,7 @@ var DefaultStreamTextResult = class {
|
|
4681
4724
|
const currentStepResult = {
|
4682
4725
|
stepType,
|
4683
4726
|
text: recordedStepText,
|
4727
|
+
reasoning: recordedReasoningText,
|
4684
4728
|
toolCalls: recordedToolCalls,
|
4685
4729
|
toolResults: recordedToolResults,
|
4686
4730
|
finishReason: part.finishReason,
|
@@ -4718,7 +4762,7 @@ var DefaultStreamTextResult = class {
|
|
4718
4762
|
}
|
4719
4763
|
},
|
4720
4764
|
async flush(controller) {
|
4721
|
-
var
|
4765
|
+
var _a16;
|
4722
4766
|
try {
|
4723
4767
|
if (recordedSteps.length === 0) {
|
4724
4768
|
return;
|
@@ -4741,15 +4785,17 @@ var DefaultStreamTextResult = class {
|
|
4741
4785
|
self.finishReasonPromise.resolve(finishReason);
|
4742
4786
|
self.usagePromise.resolve(usage);
|
4743
4787
|
self.textPromise.resolve(recordedFullText);
|
4788
|
+
self.reasoningPromise.resolve(recordedReasoningText);
|
4744
4789
|
self.stepsPromise.resolve(recordedSteps);
|
4745
4790
|
await (onFinish == null ? void 0 : onFinish({
|
4746
4791
|
finishReason,
|
4747
4792
|
logprobs: void 0,
|
4748
4793
|
usage,
|
4749
4794
|
text: recordedFullText,
|
4795
|
+
reasoning: recordedReasoningText,
|
4750
4796
|
toolCalls: lastStep.toolCalls,
|
4751
4797
|
toolResults: lastStep.toolResults,
|
4752
|
-
request: (
|
4798
|
+
request: (_a16 = lastStep.request) != null ? _a16 : {},
|
4753
4799
|
response: lastStep.response,
|
4754
4800
|
warnings: lastStep.warnings,
|
4755
4801
|
experimental_providerMetadata: lastStep.experimental_providerMetadata,
|
@@ -4763,8 +4809,8 @@ var DefaultStreamTextResult = class {
|
|
4763
4809
|
"ai.response.text": { output: () => recordedFullText },
|
4764
4810
|
"ai.response.toolCalls": {
|
4765
4811
|
output: () => {
|
4766
|
-
var
|
4767
|
-
return ((
|
4812
|
+
var _a17;
|
4813
|
+
return ((_a17 = lastStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(lastStep.toolCalls) : void 0;
|
4768
4814
|
}
|
4769
4815
|
},
|
4770
4816
|
"ai.usage.promptTokens": usage.promptTokens,
|
@@ -4806,7 +4852,7 @@ var DefaultStreamTextResult = class {
|
|
4806
4852
|
});
|
4807
4853
|
const initialPrompt = standardizePrompt({
|
4808
4854
|
prompt: {
|
4809
|
-
system: (
|
4855
|
+
system: (_a15 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a15 : system,
|
4810
4856
|
prompt,
|
4811
4857
|
messages
|
4812
4858
|
},
|
@@ -4882,8 +4928,8 @@ var DefaultStreamTextResult = class {
|
|
4882
4928
|
"ai.prompt.tools": {
|
4883
4929
|
// convert the language model level tools:
|
4884
4930
|
input: () => {
|
4885
|
-
var
|
4886
|
-
return (
|
4931
|
+
var _a16;
|
4932
|
+
return (_a16 = mode.tools) == null ? void 0 : _a16.map((tool2) => JSON.stringify(tool2));
|
4887
4933
|
}
|
4888
4934
|
},
|
4889
4935
|
"ai.prompt.toolChoice": {
|
@@ -4943,6 +4989,7 @@ var DefaultStreamTextResult = class {
|
|
4943
4989
|
let stepProviderMetadata;
|
4944
4990
|
let stepFirstChunk = true;
|
4945
4991
|
let stepText = "";
|
4992
|
+
let stepReasoning = "";
|
4946
4993
|
let fullStepText = stepType2 === "continue" ? previousStepText : "";
|
4947
4994
|
let stepLogProbs;
|
4948
4995
|
let stepResponse = {
|
@@ -4968,7 +5015,7 @@ var DefaultStreamTextResult = class {
|
|
4968
5015
|
transformedStream.pipeThrough(
|
4969
5016
|
new TransformStream({
|
4970
5017
|
async transform(chunk, controller) {
|
4971
|
-
var
|
5018
|
+
var _a16, _b, _c;
|
4972
5019
|
if (stepFirstChunk) {
|
4973
5020
|
const msToFirstChunk = now2() - startTimestampMs;
|
4974
5021
|
stepFirstChunk = false;
|
@@ -5014,6 +5061,11 @@ var DefaultStreamTextResult = class {
|
|
5014
5061
|
}
|
5015
5062
|
break;
|
5016
5063
|
}
|
5064
|
+
case "reasoning": {
|
5065
|
+
controller.enqueue(chunk);
|
5066
|
+
stepReasoning += chunk.textDelta;
|
5067
|
+
break;
|
5068
|
+
}
|
5017
5069
|
case "tool-call": {
|
5018
5070
|
controller.enqueue(chunk);
|
5019
5071
|
stepToolCalls.push(chunk);
|
@@ -5026,7 +5078,7 @@ var DefaultStreamTextResult = class {
|
|
5026
5078
|
}
|
5027
5079
|
case "response-metadata": {
|
5028
5080
|
stepResponse = {
|
5029
|
-
id: (
|
5081
|
+
id: (_a16 = chunk.id) != null ? _a16 : stepResponse.id,
|
5030
5082
|
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
5031
5083
|
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
5032
5084
|
};
|
@@ -5227,6 +5279,9 @@ var DefaultStreamTextResult = class {
|
|
5227
5279
|
get text() {
|
5228
5280
|
return this.textPromise.value;
|
5229
5281
|
}
|
5282
|
+
get reasoning() {
|
5283
|
+
return this.reasoningPromise.value;
|
5284
|
+
}
|
5230
5285
|
get toolCalls() {
|
5231
5286
|
return this.toolCallsPromise.value;
|
5232
5287
|
}
|
@@ -5319,6 +5374,12 @@ var DefaultStreamTextResult = class {
|
|
5319
5374
|
controller.enqueue(formatDataStreamPart2("text", chunk.textDelta));
|
5320
5375
|
break;
|
5321
5376
|
}
|
5377
|
+
case "reasoning": {
|
5378
|
+
controller.enqueue(
|
5379
|
+
formatDataStreamPart2("reasoning", chunk.textDelta)
|
5380
|
+
);
|
5381
|
+
break;
|
5382
|
+
}
|
5322
5383
|
case "tool-call-streaming-start": {
|
5323
5384
|
controller.enqueue(
|
5324
5385
|
formatDataStreamPart2("tool_call_streaming_start", {
|
@@ -5470,9 +5531,9 @@ var DefaultStreamTextResult = class {
|
|
5470
5531
|
);
|
5471
5532
|
}
|
5472
5533
|
toTextStreamResponse(init) {
|
5473
|
-
var
|
5534
|
+
var _a15;
|
5474
5535
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
5475
|
-
status: (
|
5536
|
+
status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
|
5476
5537
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5477
5538
|
contentType: "text/plain; charset=utf-8"
|
5478
5539
|
})
|
@@ -5519,7 +5580,7 @@ function appendResponseMessages({
|
|
5519
5580
|
messages,
|
5520
5581
|
responseMessages
|
5521
5582
|
}) {
|
5522
|
-
var
|
5583
|
+
var _a15;
|
5523
5584
|
const clonedMessages = structuredClone(messages);
|
5524
5585
|
for (const message of responseMessages) {
|
5525
5586
|
const role = message.role;
|
@@ -5542,7 +5603,7 @@ function appendResponseMessages({
|
|
5542
5603
|
}
|
5543
5604
|
case "tool": {
|
5544
5605
|
const previousMessage = clonedMessages[clonedMessages.length - 1];
|
5545
|
-
(
|
5606
|
+
(_a15 = previousMessage.toolInvocations) != null ? _a15 : previousMessage.toolInvocations = [];
|
5546
5607
|
if (previousMessage.role !== "assistant") {
|
5547
5608
|
throw new Error(
|
5548
5609
|
`Tool result must follow an assistant message: ${previousMessage.role}`
|
@@ -5600,11 +5661,11 @@ function experimental_customProvider({
|
|
5600
5661
|
}
|
5601
5662
|
|
5602
5663
|
// core/registry/no-such-provider-error.ts
|
5603
|
-
import { AISDKError as
|
5604
|
-
var
|
5605
|
-
var
|
5606
|
-
var
|
5607
|
-
var
|
5664
|
+
import { AISDKError as AISDKError15, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
|
5665
|
+
var name14 = "AI_NoSuchProviderError";
|
5666
|
+
var marker14 = `vercel.ai.error.${name14}`;
|
5667
|
+
var symbol14 = Symbol.for(marker14);
|
5668
|
+
var _a14;
|
5608
5669
|
var NoSuchProviderError = class extends NoSuchModelError3 {
|
5609
5670
|
constructor({
|
5610
5671
|
modelId,
|
@@ -5613,16 +5674,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
5613
5674
|
availableProviders,
|
5614
5675
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
5615
5676
|
}) {
|
5616
|
-
super({ errorName:
|
5617
|
-
this[
|
5677
|
+
super({ errorName: name14, modelId, modelType, message });
|
5678
|
+
this[_a14] = true;
|
5618
5679
|
this.providerId = providerId;
|
5619
5680
|
this.availableProviders = availableProviders;
|
5620
5681
|
}
|
5621
5682
|
static isInstance(error) {
|
5622
|
-
return
|
5683
|
+
return AISDKError15.hasMarker(error, marker14);
|
5623
5684
|
}
|
5624
5685
|
};
|
5625
|
-
|
5686
|
+
_a14 = symbol14;
|
5626
5687
|
|
5627
5688
|
// core/registry/provider-registry.ts
|
5628
5689
|
import { NoSuchModelError as NoSuchModelError4 } from "@ai-sdk/provider";
|
@@ -5664,19 +5725,19 @@ var DefaultProviderRegistry = class {
|
|
5664
5725
|
return [id.slice(0, index), id.slice(index + 1)];
|
5665
5726
|
}
|
5666
5727
|
languageModel(id) {
|
5667
|
-
var
|
5728
|
+
var _a15, _b;
|
5668
5729
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
5669
|
-
const model = (_b = (
|
5730
|
+
const model = (_b = (_a15 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a15, modelId);
|
5670
5731
|
if (model == null) {
|
5671
5732
|
throw new NoSuchModelError4({ modelId: id, modelType: "languageModel" });
|
5672
5733
|
}
|
5673
5734
|
return model;
|
5674
5735
|
}
|
5675
5736
|
textEmbeddingModel(id) {
|
5676
|
-
var
|
5737
|
+
var _a15;
|
5677
5738
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
5678
5739
|
const provider = this.getProvider(providerId);
|
5679
|
-
const model = (
|
5740
|
+
const model = (_a15 = provider.textEmbeddingModel) == null ? void 0 : _a15.call(provider, modelId);
|
5680
5741
|
if (model == null) {
|
5681
5742
|
throw new NoSuchModelError4({
|
5682
5743
|
modelId: id,
|
@@ -5739,8 +5800,8 @@ function simulateReadableStream({
|
|
5739
5800
|
chunkDelayInMs = 0,
|
5740
5801
|
_internal
|
5741
5802
|
}) {
|
5742
|
-
var
|
5743
|
-
const delay2 = (
|
5803
|
+
var _a15;
|
5804
|
+
const delay2 = (_a15 = _internal == null ? void 0 : _internal.delay) != null ? _a15 : delay;
|
5744
5805
|
let index = 0;
|
5745
5806
|
return new ReadableStream({
|
5746
5807
|
async pull(controller) {
|
@@ -5761,7 +5822,7 @@ import {
|
|
5761
5822
|
function AssistantResponse({ threadId, messageId }, process2) {
|
5762
5823
|
const stream = new ReadableStream({
|
5763
5824
|
async start(controller) {
|
5764
|
-
var
|
5825
|
+
var _a15;
|
5765
5826
|
const textEncoder = new TextEncoder();
|
5766
5827
|
const sendMessage = (message) => {
|
5767
5828
|
controller.enqueue(
|
@@ -5783,7 +5844,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5783
5844
|
);
|
5784
5845
|
};
|
5785
5846
|
const forwardStream = async (stream2) => {
|
5786
|
-
var
|
5847
|
+
var _a16, _b;
|
5787
5848
|
let result = void 0;
|
5788
5849
|
for await (const value of stream2) {
|
5789
5850
|
switch (value.event) {
|
@@ -5800,7 +5861,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5800
5861
|
break;
|
5801
5862
|
}
|
5802
5863
|
case "thread.message.delta": {
|
5803
|
-
const content = (
|
5864
|
+
const content = (_a16 = value.data.delta.content) == null ? void 0 : _a16[0];
|
5804
5865
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
5805
5866
|
controller.enqueue(
|
5806
5867
|
textEncoder.encode(
|
@@ -5834,7 +5895,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5834
5895
|
forwardStream
|
5835
5896
|
});
|
5836
5897
|
} catch (error) {
|
5837
|
-
sendError((
|
5898
|
+
sendError((_a15 = error.message) != null ? _a15 : `${error}`);
|
5838
5899
|
} finally {
|
5839
5900
|
controller.close();
|
5840
5901
|
}
|
@@ -5895,7 +5956,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5895
5956
|
return stream.pipeThrough(
|
5896
5957
|
new TransformStream({
|
5897
5958
|
transform: async (value, controller) => {
|
5898
|
-
var
|
5959
|
+
var _a15;
|
5899
5960
|
if (typeof value === "string") {
|
5900
5961
|
controller.enqueue(value);
|
5901
5962
|
return;
|
@@ -5903,7 +5964,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5903
5964
|
if ("event" in value) {
|
5904
5965
|
if (value.event === "on_chat_model_stream") {
|
5905
5966
|
forwardAIMessageChunk(
|
5906
|
-
(
|
5967
|
+
(_a15 = value.data) == null ? void 0 : _a15.chunk,
|
5907
5968
|
controller
|
5908
5969
|
);
|
5909
5970
|
}
|
@@ -5926,7 +5987,7 @@ function toDataStream(stream, callbacks) {
|
|
5926
5987
|
);
|
5927
5988
|
}
|
5928
5989
|
function toDataStreamResponse(stream, options) {
|
5929
|
-
var
|
5990
|
+
var _a15;
|
5930
5991
|
const dataStream = toDataStreamInternal(
|
5931
5992
|
stream,
|
5932
5993
|
options == null ? void 0 : options.callbacks
|
@@ -5935,7 +5996,7 @@ function toDataStreamResponse(stream, options) {
|
|
5935
5996
|
const init = options == null ? void 0 : options.init;
|
5936
5997
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5937
5998
|
return new Response(responseStream, {
|
5938
|
-
status: (
|
5999
|
+
status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
|
5939
6000
|
statusText: init == null ? void 0 : init.statusText,
|
5940
6001
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5941
6002
|
contentType: "text/plain; charset=utf-8",
|
@@ -5990,14 +6051,14 @@ function toDataStream2(stream, callbacks) {
|
|
5990
6051
|
);
|
5991
6052
|
}
|
5992
6053
|
function toDataStreamResponse2(stream, options = {}) {
|
5993
|
-
var
|
6054
|
+
var _a15;
|
5994
6055
|
const { init, data, callbacks } = options;
|
5995
6056
|
const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
|
5996
6057
|
new TextEncoderStream()
|
5997
6058
|
);
|
5998
6059
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5999
6060
|
return new Response(responseStream, {
|
6000
|
-
status: (
|
6061
|
+
status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
|
6001
6062
|
statusText: init == null ? void 0 : init.statusText,
|
6002
6063
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
6003
6064
|
contentType: "text/plain; charset=utf-8",
|
@@ -6089,7 +6150,7 @@ var StreamData = class {
|
|
6089
6150
|
}
|
6090
6151
|
};
|
6091
6152
|
export {
|
6092
|
-
|
6153
|
+
AISDKError14 as AISDKError,
|
6093
6154
|
APICallError2 as APICallError,
|
6094
6155
|
AssistantResponse,
|
6095
6156
|
DownloadError,
|
@@ -6106,6 +6167,7 @@ export {
|
|
6106
6167
|
LoadAPIKeyError,
|
6107
6168
|
MessageConversionError,
|
6108
6169
|
NoContentGeneratedError,
|
6170
|
+
NoImageGeneratedError,
|
6109
6171
|
NoObjectGeneratedError,
|
6110
6172
|
NoOutputSpecifiedError,
|
6111
6173
|
NoSuchModelError,
|