ai 6.0.0-beta.159 → 6.0.0-beta.161
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/dist/index.d.mts +41 -42
- package/dist/index.d.ts +41 -42
- package/dist/index.js +457 -430
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +249 -220
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +75 -100
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +46 -71
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.js
CHANGED
|
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
6
|
var __export = (target, all) => {
|
|
7
|
-
for (var
|
|
8
|
-
__defProp(target,
|
|
7
|
+
for (var name14 in all)
|
|
8
|
+
__defProp(target, name14, { get: all[name14], enumerable: true });
|
|
9
9
|
};
|
|
10
10
|
var __copyProps = (to, from, except, desc) => {
|
|
11
11
|
if (from && typeof from === "object" || typeof from === "function") {
|
|
@@ -20,47 +20,47 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var src_exports = {};
|
|
22
22
|
__export(src_exports, {
|
|
23
|
-
AISDKError: () =>
|
|
24
|
-
APICallError: () =>
|
|
23
|
+
AISDKError: () => import_provider15.AISDKError,
|
|
24
|
+
APICallError: () => import_provider15.APICallError,
|
|
25
25
|
AbstractChat: () => AbstractChat,
|
|
26
26
|
DefaultChatTransport: () => DefaultChatTransport,
|
|
27
|
-
DownloadError: () => DownloadError,
|
|
28
|
-
EmptyResponseBodyError: () =>
|
|
27
|
+
DownloadError: () => import_provider_utils.DownloadError,
|
|
28
|
+
EmptyResponseBodyError: () => import_provider15.EmptyResponseBodyError,
|
|
29
29
|
Experimental_Agent: () => ToolLoopAgent,
|
|
30
30
|
HttpChatTransport: () => HttpChatTransport,
|
|
31
31
|
InvalidArgumentError: () => InvalidArgumentError,
|
|
32
32
|
InvalidDataContentError: () => InvalidDataContentError,
|
|
33
33
|
InvalidMessageRoleError: () => InvalidMessageRoleError,
|
|
34
|
-
InvalidPromptError: () =>
|
|
35
|
-
InvalidResponseDataError: () =>
|
|
34
|
+
InvalidPromptError: () => import_provider15.InvalidPromptError,
|
|
35
|
+
InvalidResponseDataError: () => import_provider15.InvalidResponseDataError,
|
|
36
36
|
InvalidStreamPartError: () => InvalidStreamPartError,
|
|
37
37
|
InvalidToolInputError: () => InvalidToolInputError,
|
|
38
|
-
JSONParseError: () =>
|
|
38
|
+
JSONParseError: () => import_provider15.JSONParseError,
|
|
39
39
|
JsonToSseTransformStream: () => JsonToSseTransformStream,
|
|
40
|
-
LoadAPIKeyError: () =>
|
|
41
|
-
LoadSettingError: () =>
|
|
40
|
+
LoadAPIKeyError: () => import_provider15.LoadAPIKeyError,
|
|
41
|
+
LoadSettingError: () => import_provider15.LoadSettingError,
|
|
42
42
|
MessageConversionError: () => MessageConversionError,
|
|
43
|
-
NoContentGeneratedError: () =>
|
|
43
|
+
NoContentGeneratedError: () => import_provider15.NoContentGeneratedError,
|
|
44
44
|
NoImageGeneratedError: () => NoImageGeneratedError,
|
|
45
45
|
NoObjectGeneratedError: () => NoObjectGeneratedError,
|
|
46
46
|
NoOutputGeneratedError: () => NoOutputGeneratedError,
|
|
47
47
|
NoSpeechGeneratedError: () => NoSpeechGeneratedError,
|
|
48
|
-
NoSuchModelError: () =>
|
|
48
|
+
NoSuchModelError: () => import_provider15.NoSuchModelError,
|
|
49
49
|
NoSuchProviderError: () => NoSuchProviderError,
|
|
50
50
|
NoSuchToolError: () => NoSuchToolError,
|
|
51
51
|
Output: () => output_exports,
|
|
52
52
|
RetryError: () => RetryError,
|
|
53
53
|
SerialJobExecutor: () => SerialJobExecutor,
|
|
54
54
|
TextStreamChatTransport: () => TextStreamChatTransport,
|
|
55
|
-
TooManyEmbeddingValuesForCallError: () =>
|
|
55
|
+
TooManyEmbeddingValuesForCallError: () => import_provider15.TooManyEmbeddingValuesForCallError,
|
|
56
56
|
ToolCallRepairError: () => ToolCallRepairError,
|
|
57
57
|
ToolLoopAgent: () => ToolLoopAgent,
|
|
58
|
-
TypeValidationError: () =>
|
|
58
|
+
TypeValidationError: () => import_provider15.TypeValidationError,
|
|
59
59
|
UI_MESSAGE_STREAM_HEADERS: () => UI_MESSAGE_STREAM_HEADERS,
|
|
60
|
-
UnsupportedFunctionalityError: () =>
|
|
60
|
+
UnsupportedFunctionalityError: () => import_provider15.UnsupportedFunctionalityError,
|
|
61
61
|
UnsupportedModelVersionError: () => UnsupportedModelVersionError,
|
|
62
62
|
addToolInputExamplesMiddleware: () => addToolInputExamplesMiddleware,
|
|
63
|
-
asSchema: () =>
|
|
63
|
+
asSchema: () => import_provider_utils38.asSchema,
|
|
64
64
|
assistantModelMessageSchema: () => assistantModelMessageSchema,
|
|
65
65
|
callCompletionApi: () => callCompletionApi,
|
|
66
66
|
consumeStream: () => consumeStream,
|
|
@@ -70,7 +70,7 @@ __export(src_exports, {
|
|
|
70
70
|
createAgentUIStream: () => createAgentUIStream,
|
|
71
71
|
createAgentUIStreamResponse: () => createAgentUIStreamResponse,
|
|
72
72
|
createGateway: () => import_gateway3.createGateway,
|
|
73
|
-
createIdGenerator: () =>
|
|
73
|
+
createIdGenerator: () => import_provider_utils38.createIdGenerator,
|
|
74
74
|
createProviderRegistry: () => createProviderRegistry,
|
|
75
75
|
createTextStreamResponse: () => createTextStreamResponse,
|
|
76
76
|
createUIMessageStream: () => createUIMessageStream,
|
|
@@ -78,17 +78,18 @@ __export(src_exports, {
|
|
|
78
78
|
customProvider: () => customProvider,
|
|
79
79
|
defaultEmbeddingSettingsMiddleware: () => defaultEmbeddingSettingsMiddleware,
|
|
80
80
|
defaultSettingsMiddleware: () => defaultSettingsMiddleware,
|
|
81
|
-
dynamicTool: () =>
|
|
81
|
+
dynamicTool: () => import_provider_utils38.dynamicTool,
|
|
82
82
|
embed: () => embed,
|
|
83
83
|
embedMany: () => embedMany,
|
|
84
84
|
experimental_createProviderRegistry: () => experimental_createProviderRegistry,
|
|
85
85
|
experimental_customProvider: () => experimental_customProvider,
|
|
86
|
-
experimental_generateImage: () =>
|
|
86
|
+
experimental_generateImage: () => experimental_generateImage,
|
|
87
87
|
experimental_generateSpeech: () => generateSpeech,
|
|
88
88
|
experimental_transcribe: () => transcribe,
|
|
89
89
|
extractReasoningMiddleware: () => extractReasoningMiddleware,
|
|
90
90
|
gateway: () => import_gateway3.gateway,
|
|
91
|
-
generateId: () =>
|
|
91
|
+
generateId: () => import_provider_utils38.generateId,
|
|
92
|
+
generateImage: () => generateImage,
|
|
92
93
|
generateObject: () => generateObject,
|
|
93
94
|
generateText: () => generateText,
|
|
94
95
|
getStaticToolName: () => getStaticToolName,
|
|
@@ -104,11 +105,11 @@ __export(src_exports, {
|
|
|
104
105
|
isTextUIPart: () => isTextUIPart,
|
|
105
106
|
isToolOrDynamicToolUIPart: () => isToolOrDynamicToolUIPart,
|
|
106
107
|
isToolUIPart: () => isToolUIPart,
|
|
107
|
-
jsonSchema: () =>
|
|
108
|
+
jsonSchema: () => import_provider_utils38.jsonSchema,
|
|
108
109
|
lastAssistantMessageIsCompleteWithApprovalResponses: () => lastAssistantMessageIsCompleteWithApprovalResponses,
|
|
109
110
|
lastAssistantMessageIsCompleteWithToolCalls: () => lastAssistantMessageIsCompleteWithToolCalls,
|
|
110
111
|
modelMessageSchema: () => modelMessageSchema,
|
|
111
|
-
parseJsonEventStream: () =>
|
|
112
|
+
parseJsonEventStream: () => import_provider_utils38.parseJsonEventStream,
|
|
112
113
|
parsePartialJson: () => parsePartialJson,
|
|
113
114
|
pipeAgentUIStreamToResponse: () => pipeAgentUIStreamToResponse,
|
|
114
115
|
pipeTextStreamToResponse: () => pipeTextStreamToResponse,
|
|
@@ -124,7 +125,7 @@ __export(src_exports, {
|
|
|
124
125
|
streamObject: () => streamObject,
|
|
125
126
|
streamText: () => streamText,
|
|
126
127
|
systemModelMessageSchema: () => systemModelMessageSchema,
|
|
127
|
-
tool: () =>
|
|
128
|
+
tool: () => import_provider_utils38.tool,
|
|
128
129
|
toolModelMessageSchema: () => toolModelMessageSchema,
|
|
129
130
|
uiMessageChunkSchema: () => uiMessageChunkSchema,
|
|
130
131
|
userModelMessageSchema: () => userModelMessageSchema,
|
|
@@ -132,14 +133,14 @@ __export(src_exports, {
|
|
|
132
133
|
wrapEmbeddingModel: () => wrapEmbeddingModel,
|
|
133
134
|
wrapLanguageModel: () => wrapLanguageModel,
|
|
134
135
|
wrapProvider: () => wrapProvider,
|
|
135
|
-
zodSchema: () =>
|
|
136
|
+
zodSchema: () => import_provider_utils38.zodSchema
|
|
136
137
|
});
|
|
137
138
|
module.exports = __toCommonJS(src_exports);
|
|
138
139
|
var import_gateway3 = require("@ai-sdk/gateway");
|
|
139
|
-
var
|
|
140
|
+
var import_provider_utils38 = require("@ai-sdk/provider-utils");
|
|
140
141
|
|
|
141
142
|
// src/generate-text/generate-text.ts
|
|
142
|
-
var
|
|
143
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
143
144
|
|
|
144
145
|
// src/logger/log-warnings.ts
|
|
145
146
|
function formatWarning({
|
|
@@ -204,7 +205,7 @@ var logWarnings = (options) => {
|
|
|
204
205
|
var import_gateway = require("@ai-sdk/gateway");
|
|
205
206
|
|
|
206
207
|
// src/error/index.ts
|
|
207
|
-
var
|
|
208
|
+
var import_provider15 = require("@ai-sdk/provider");
|
|
208
209
|
|
|
209
210
|
// src/error/invalid-argument-error.ts
|
|
210
211
|
var import_provider = require("@ai-sdk/provider");
|
|
@@ -483,55 +484,32 @@ var MessageConversionError = class extends import_provider13.AISDKError {
|
|
|
483
484
|
};
|
|
484
485
|
_a11 = symbol11;
|
|
485
486
|
|
|
486
|
-
// src/
|
|
487
|
+
// src/error/index.ts
|
|
488
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
489
|
+
|
|
490
|
+
// src/util/retry-error.ts
|
|
487
491
|
var import_provider14 = require("@ai-sdk/provider");
|
|
488
|
-
var name12 = "
|
|
492
|
+
var name12 = "AI_RetryError";
|
|
489
493
|
var marker12 = `vercel.ai.error.${name12}`;
|
|
490
494
|
var symbol12 = Symbol.for(marker12);
|
|
491
495
|
var _a12;
|
|
492
|
-
var
|
|
493
|
-
constructor({
|
|
494
|
-
url,
|
|
495
|
-
statusCode,
|
|
496
|
-
statusText,
|
|
497
|
-
cause,
|
|
498
|
-
message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
|
|
499
|
-
}) {
|
|
500
|
-
super({ name: name12, message, cause });
|
|
501
|
-
this[_a12] = true;
|
|
502
|
-
this.url = url;
|
|
503
|
-
this.statusCode = statusCode;
|
|
504
|
-
this.statusText = statusText;
|
|
505
|
-
}
|
|
506
|
-
static isInstance(error) {
|
|
507
|
-
return import_provider14.AISDKError.hasMarker(error, marker12);
|
|
508
|
-
}
|
|
509
|
-
};
|
|
510
|
-
_a12 = symbol12;
|
|
511
|
-
|
|
512
|
-
// src/util/retry-error.ts
|
|
513
|
-
var import_provider15 = require("@ai-sdk/provider");
|
|
514
|
-
var name13 = "AI_RetryError";
|
|
515
|
-
var marker13 = `vercel.ai.error.${name13}`;
|
|
516
|
-
var symbol13 = Symbol.for(marker13);
|
|
517
|
-
var _a13;
|
|
518
|
-
var RetryError = class extends import_provider15.AISDKError {
|
|
496
|
+
var RetryError = class extends import_provider14.AISDKError {
|
|
519
497
|
constructor({
|
|
520
498
|
message,
|
|
521
499
|
reason,
|
|
522
500
|
errors
|
|
523
501
|
}) {
|
|
524
|
-
super({ name:
|
|
525
|
-
this[
|
|
502
|
+
super({ name: name12, message });
|
|
503
|
+
this[_a12] = true;
|
|
526
504
|
this.reason = reason;
|
|
527
505
|
this.errors = errors;
|
|
528
506
|
this.lastError = errors[errors.length - 1];
|
|
529
507
|
}
|
|
530
508
|
static isInstance(error) {
|
|
531
|
-
return
|
|
509
|
+
return import_provider14.AISDKError.hasMarker(error, marker12);
|
|
532
510
|
}
|
|
533
511
|
};
|
|
534
|
-
|
|
512
|
+
_a12 = symbol12;
|
|
535
513
|
|
|
536
514
|
// src/util/log-v2-compatibility-warning.ts
|
|
537
515
|
function logV2CompatibilityWarning({
|
|
@@ -724,7 +702,7 @@ function resolveEmbeddingModel(model) {
|
|
|
724
702
|
return getGlobalProvider().embeddingModel(model);
|
|
725
703
|
}
|
|
726
704
|
function resolveTranscriptionModel(model) {
|
|
727
|
-
var
|
|
705
|
+
var _a14, _b;
|
|
728
706
|
if (typeof model !== "string") {
|
|
729
707
|
if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
|
|
730
708
|
const unsupportedModel = model;
|
|
@@ -736,10 +714,10 @@ function resolveTranscriptionModel(model) {
|
|
|
736
714
|
}
|
|
737
715
|
return asTranscriptionModelV3(model);
|
|
738
716
|
}
|
|
739
|
-
return (_b = (
|
|
717
|
+
return (_b = (_a14 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a14, model);
|
|
740
718
|
}
|
|
741
719
|
function resolveSpeechModel(model) {
|
|
742
|
-
var
|
|
720
|
+
var _a14, _b;
|
|
743
721
|
if (typeof model !== "string") {
|
|
744
722
|
if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
|
|
745
723
|
const unsupportedModel = model;
|
|
@@ -751,7 +729,7 @@ function resolveSpeechModel(model) {
|
|
|
751
729
|
}
|
|
752
730
|
return asSpeechModelV3(model);
|
|
753
731
|
}
|
|
754
|
-
return (_b = (
|
|
732
|
+
return (_b = (_a14 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a14, model);
|
|
755
733
|
}
|
|
756
734
|
function resolveImageModel(model) {
|
|
757
735
|
if (typeof model !== "string") {
|
|
@@ -768,15 +746,15 @@ function resolveImageModel(model) {
|
|
|
768
746
|
return getGlobalProvider().imageModel(model);
|
|
769
747
|
}
|
|
770
748
|
function getGlobalProvider() {
|
|
771
|
-
var
|
|
772
|
-
return (
|
|
749
|
+
var _a14;
|
|
750
|
+
return (_a14 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a14 : import_gateway.gateway;
|
|
773
751
|
}
|
|
774
752
|
|
|
775
753
|
// src/prompt/convert-to-language-model-prompt.ts
|
|
776
|
-
var
|
|
754
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
777
755
|
|
|
778
756
|
// src/util/detect-media-type.ts
|
|
779
|
-
var
|
|
757
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
780
758
|
var imageMediaTypeSignatures = [
|
|
781
759
|
{
|
|
782
760
|
mediaType: "image/gif",
|
|
@@ -932,7 +910,7 @@ var audioMediaTypeSignatures = [
|
|
|
932
910
|
}
|
|
933
911
|
];
|
|
934
912
|
var stripID3 = (data) => {
|
|
935
|
-
const bytes = typeof data === "string" ? (0,
|
|
913
|
+
const bytes = typeof data === "string" ? (0, import_provider_utils2.convertBase64ToUint8Array)(data) : data;
|
|
936
914
|
const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;
|
|
937
915
|
return bytes.slice(id3Size + 10);
|
|
938
916
|
};
|
|
@@ -947,7 +925,7 @@ function detectMediaType({
|
|
|
947
925
|
signatures
|
|
948
926
|
}) {
|
|
949
927
|
const processedData = stripID3TagsIfPresent(data);
|
|
950
|
-
const bytes = typeof processedData === "string" ? (0,
|
|
928
|
+
const bytes = typeof processedData === "string" ? (0, import_provider_utils2.convertBase64ToUint8Array)(
|
|
951
929
|
processedData.substring(0, Math.min(processedData.length, 24))
|
|
952
930
|
) : processedData;
|
|
953
931
|
for (const signature of signatures) {
|
|
@@ -961,25 +939,26 @@ function detectMediaType({
|
|
|
961
939
|
}
|
|
962
940
|
|
|
963
941
|
// src/util/download/download.ts
|
|
964
|
-
var
|
|
942
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
943
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
965
944
|
|
|
966
945
|
// src/version.ts
|
|
967
|
-
var VERSION = true ? "6.0.0-beta.
|
|
946
|
+
var VERSION = true ? "6.0.0-beta.161" : "0.0.0-test";
|
|
968
947
|
|
|
969
948
|
// src/util/download/download.ts
|
|
970
949
|
var download = async ({ url }) => {
|
|
971
|
-
var
|
|
950
|
+
var _a14;
|
|
972
951
|
const urlText = url.toString();
|
|
973
952
|
try {
|
|
974
953
|
const response = await fetch(urlText, {
|
|
975
|
-
headers: (0,
|
|
954
|
+
headers: (0, import_provider_utils4.withUserAgentSuffix)(
|
|
976
955
|
{},
|
|
977
956
|
`ai-sdk/${VERSION}`,
|
|
978
|
-
(0,
|
|
957
|
+
(0, import_provider_utils4.getRuntimeEnvironmentUserAgent)()
|
|
979
958
|
)
|
|
980
959
|
});
|
|
981
960
|
if (!response.ok) {
|
|
982
|
-
throw new DownloadError({
|
|
961
|
+
throw new import_provider_utils3.DownloadError({
|
|
983
962
|
url: urlText,
|
|
984
963
|
statusCode: response.status,
|
|
985
964
|
statusText: response.statusText
|
|
@@ -987,13 +966,13 @@ var download = async ({ url }) => {
|
|
|
987
966
|
}
|
|
988
967
|
return {
|
|
989
968
|
data: new Uint8Array(await response.arrayBuffer()),
|
|
990
|
-
mediaType: (
|
|
969
|
+
mediaType: (_a14 = response.headers.get("content-type")) != null ? _a14 : void 0
|
|
991
970
|
};
|
|
992
971
|
} catch (error) {
|
|
993
|
-
if (DownloadError.isInstance(error)) {
|
|
972
|
+
if (import_provider_utils3.DownloadError.isInstance(error)) {
|
|
994
973
|
throw error;
|
|
995
974
|
}
|
|
996
|
-
throw new DownloadError({ url: urlText, cause: error });
|
|
975
|
+
throw new import_provider_utils3.DownloadError({ url: urlText, cause: error });
|
|
997
976
|
}
|
|
998
977
|
};
|
|
999
978
|
|
|
@@ -1005,8 +984,8 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
|
|
|
1005
984
|
);
|
|
1006
985
|
|
|
1007
986
|
// src/prompt/data-content.ts
|
|
1008
|
-
var
|
|
1009
|
-
var
|
|
987
|
+
var import_provider16 = require("@ai-sdk/provider");
|
|
988
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1010
989
|
var import_v4 = require("zod/v4");
|
|
1011
990
|
|
|
1012
991
|
// src/prompt/split-data-url.ts
|
|
@@ -1033,8 +1012,8 @@ var dataContentSchema = import_v4.z.union([
|
|
|
1033
1012
|
import_v4.z.custom(
|
|
1034
1013
|
// Buffer might not be available in some environments such as CloudFlare:
|
|
1035
1014
|
(value) => {
|
|
1036
|
-
var
|
|
1037
|
-
return (_b = (
|
|
1015
|
+
var _a14, _b;
|
|
1016
|
+
return (_b = (_a14 = globalThis.Buffer) == null ? void 0 : _a14.isBuffer(value)) != null ? _b : false;
|
|
1038
1017
|
},
|
|
1039
1018
|
{ message: "Must be a Buffer" }
|
|
1040
1019
|
)
|
|
@@ -1057,7 +1036,7 @@ function convertToLanguageModelV3DataContent(content) {
|
|
|
1057
1036
|
content.toString()
|
|
1058
1037
|
);
|
|
1059
1038
|
if (dataUrlMediaType == null || base64Content == null) {
|
|
1060
|
-
throw new
|
|
1039
|
+
throw new import_provider16.AISDKError({
|
|
1061
1040
|
name: "InvalidDataContentError",
|
|
1062
1041
|
message: `Invalid data URL format in content ${content.toString()}`
|
|
1063
1042
|
});
|
|
@@ -1071,9 +1050,9 @@ function convertDataContentToBase64String(content) {
|
|
|
1071
1050
|
return content;
|
|
1072
1051
|
}
|
|
1073
1052
|
if (content instanceof ArrayBuffer) {
|
|
1074
|
-
return (0,
|
|
1053
|
+
return (0, import_provider_utils5.convertUint8ArrayToBase64)(new Uint8Array(content));
|
|
1075
1054
|
}
|
|
1076
|
-
return (0,
|
|
1055
|
+
return (0, import_provider_utils5.convertUint8ArrayToBase64)(content);
|
|
1077
1056
|
}
|
|
1078
1057
|
function convertDataContentToUint8Array(content) {
|
|
1079
1058
|
if (content instanceof Uint8Array) {
|
|
@@ -1081,7 +1060,7 @@ function convertDataContentToUint8Array(content) {
|
|
|
1081
1060
|
}
|
|
1082
1061
|
if (typeof content === "string") {
|
|
1083
1062
|
try {
|
|
1084
|
-
return (0,
|
|
1063
|
+
return (0, import_provider_utils5.convertBase64ToUint8Array)(content);
|
|
1085
1064
|
} catch (error) {
|
|
1086
1065
|
throw new InvalidDataContentError({
|
|
1087
1066
|
message: "Invalid data content. Content string is not a base64-encoded media.",
|
|
@@ -1257,8 +1236,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
|
|
|
1257
1236
|
).flat().filter(
|
|
1258
1237
|
(part) => part.type === "image" || part.type === "file"
|
|
1259
1238
|
).map((part) => {
|
|
1260
|
-
var
|
|
1261
|
-
const mediaType = (
|
|
1239
|
+
var _a14;
|
|
1240
|
+
const mediaType = (_a14 = part.mediaType) != null ? _a14 : part.type === "image" ? "image/*" : void 0;
|
|
1262
1241
|
let data = part.type === "image" ? part.image : part.data;
|
|
1263
1242
|
if (typeof data === "string") {
|
|
1264
1243
|
try {
|
|
@@ -1271,7 +1250,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
|
|
|
1271
1250
|
(part) => part.data instanceof URL
|
|
1272
1251
|
).map((part) => ({
|
|
1273
1252
|
url: part.data,
|
|
1274
|
-
isUrlSupportedByModel: part.mediaType != null && (0,
|
|
1253
|
+
isUrlSupportedByModel: part.mediaType != null && (0, import_provider_utils6.isUrlSupported)({
|
|
1275
1254
|
url: part.data.toString(),
|
|
1276
1255
|
mediaType: part.mediaType,
|
|
1277
1256
|
supportedUrls
|
|
@@ -1288,7 +1267,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
|
|
|
1288
1267
|
);
|
|
1289
1268
|
}
|
|
1290
1269
|
function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1291
|
-
var
|
|
1270
|
+
var _a14;
|
|
1292
1271
|
if (part.type === "text") {
|
|
1293
1272
|
return {
|
|
1294
1273
|
type: "text",
|
|
@@ -1321,7 +1300,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
|
1321
1300
|
switch (type) {
|
|
1322
1301
|
case "image": {
|
|
1323
1302
|
if (data instanceof Uint8Array || typeof data === "string") {
|
|
1324
|
-
mediaType = (
|
|
1303
|
+
mediaType = (_a14 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a14 : mediaType;
|
|
1325
1304
|
}
|
|
1326
1305
|
return {
|
|
1327
1306
|
type: "file",
|
|
@@ -1373,7 +1352,7 @@ function mapToolResultOutput(output) {
|
|
|
1373
1352
|
}
|
|
1374
1353
|
|
|
1375
1354
|
// src/prompt/create-tool-model-output.ts
|
|
1376
|
-
var
|
|
1355
|
+
var import_provider17 = require("@ai-sdk/provider");
|
|
1377
1356
|
async function createToolModelOutput({
|
|
1378
1357
|
toolCallId,
|
|
1379
1358
|
input,
|
|
@@ -1382,7 +1361,7 @@ async function createToolModelOutput({
|
|
|
1382
1361
|
errorMode
|
|
1383
1362
|
}) {
|
|
1384
1363
|
if (errorMode === "text") {
|
|
1385
|
-
return { type: "error-text", value: (0,
|
|
1364
|
+
return { type: "error-text", value: (0, import_provider17.getErrorMessage)(output) };
|
|
1386
1365
|
} else if (errorMode === "json") {
|
|
1387
1366
|
return { type: "error-json", value: toJSONValue(output) };
|
|
1388
1367
|
}
|
|
@@ -1489,7 +1468,7 @@ function prepareCallSettings({
|
|
|
1489
1468
|
}
|
|
1490
1469
|
|
|
1491
1470
|
// src/prompt/prepare-tools-and-tool-choice.ts
|
|
1492
|
-
var
|
|
1471
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1493
1472
|
|
|
1494
1473
|
// src/util/is-non-empty-object.ts
|
|
1495
1474
|
function isNonEmptyObject(object2) {
|
|
@@ -1509,10 +1488,10 @@ async function prepareToolsAndToolChoice({
|
|
|
1509
1488
|
};
|
|
1510
1489
|
}
|
|
1511
1490
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
|
1512
|
-
([
|
|
1491
|
+
([name14]) => activeTools.includes(name14)
|
|
1513
1492
|
) : Object.entries(tools);
|
|
1514
1493
|
const languageModelTools = [];
|
|
1515
|
-
for (const [
|
|
1494
|
+
for (const [name14, tool2] of filteredTools) {
|
|
1516
1495
|
const toolType = tool2.type;
|
|
1517
1496
|
switch (toolType) {
|
|
1518
1497
|
case void 0:
|
|
@@ -1520,9 +1499,9 @@ async function prepareToolsAndToolChoice({
|
|
|
1520
1499
|
case "function":
|
|
1521
1500
|
languageModelTools.push({
|
|
1522
1501
|
type: "function",
|
|
1523
|
-
name:
|
|
1502
|
+
name: name14,
|
|
1524
1503
|
description: tool2.description,
|
|
1525
|
-
inputSchema: await (0,
|
|
1504
|
+
inputSchema: await (0, import_provider_utils7.asSchema)(tool2.inputSchema).jsonSchema,
|
|
1526
1505
|
...tool2.inputExamples != null ? { inputExamples: tool2.inputExamples } : {},
|
|
1527
1506
|
providerOptions: tool2.providerOptions,
|
|
1528
1507
|
...tool2.strict != null ? { strict: tool2.strict } : {}
|
|
@@ -1531,7 +1510,7 @@ async function prepareToolsAndToolChoice({
|
|
|
1531
1510
|
case "provider":
|
|
1532
1511
|
languageModelTools.push({
|
|
1533
1512
|
type: "provider",
|
|
1534
|
-
name:
|
|
1513
|
+
name: name14,
|
|
1535
1514
|
id: tool2.id,
|
|
1536
1515
|
args: tool2.args
|
|
1537
1516
|
});
|
|
@@ -1549,8 +1528,8 @@ async function prepareToolsAndToolChoice({
|
|
|
1549
1528
|
}
|
|
1550
1529
|
|
|
1551
1530
|
// src/prompt/standardize-prompt.ts
|
|
1552
|
-
var
|
|
1553
|
-
var
|
|
1531
|
+
var import_provider18 = require("@ai-sdk/provider");
|
|
1532
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1554
1533
|
var import_v46 = require("zod/v4");
|
|
1555
1534
|
|
|
1556
1535
|
// src/prompt/message.ts
|
|
@@ -1762,13 +1741,13 @@ var modelMessageSchema = import_v45.z.union([
|
|
|
1762
1741
|
// src/prompt/standardize-prompt.ts
|
|
1763
1742
|
async function standardizePrompt(prompt) {
|
|
1764
1743
|
if (prompt.prompt == null && prompt.messages == null) {
|
|
1765
|
-
throw new
|
|
1744
|
+
throw new import_provider18.InvalidPromptError({
|
|
1766
1745
|
prompt,
|
|
1767
1746
|
message: "prompt or messages must be defined"
|
|
1768
1747
|
});
|
|
1769
1748
|
}
|
|
1770
1749
|
if (prompt.prompt != null && prompt.messages != null) {
|
|
1771
|
-
throw new
|
|
1750
|
+
throw new import_provider18.InvalidPromptError({
|
|
1772
1751
|
prompt,
|
|
1773
1752
|
message: "prompt and messages cannot be defined at the same time"
|
|
1774
1753
|
});
|
|
@@ -1776,7 +1755,7 @@ async function standardizePrompt(prompt) {
|
|
|
1776
1755
|
if (prompt.system != null && typeof prompt.system !== "string" && !asArray(prompt.system).every(
|
|
1777
1756
|
(message) => typeof message === "object" && message !== null && "role" in message && message.role === "system"
|
|
1778
1757
|
)) {
|
|
1779
|
-
throw new
|
|
1758
|
+
throw new import_provider18.InvalidPromptError({
|
|
1780
1759
|
prompt,
|
|
1781
1760
|
message: "system must be a string, SystemModelMessage, or array of SystemModelMessage"
|
|
1782
1761
|
});
|
|
@@ -1789,23 +1768,23 @@ async function standardizePrompt(prompt) {
|
|
|
1789
1768
|
} else if (prompt.messages != null) {
|
|
1790
1769
|
messages = prompt.messages;
|
|
1791
1770
|
} else {
|
|
1792
|
-
throw new
|
|
1771
|
+
throw new import_provider18.InvalidPromptError({
|
|
1793
1772
|
prompt,
|
|
1794
1773
|
message: "prompt or messages must be defined"
|
|
1795
1774
|
});
|
|
1796
1775
|
}
|
|
1797
1776
|
if (messages.length === 0) {
|
|
1798
|
-
throw new
|
|
1777
|
+
throw new import_provider18.InvalidPromptError({
|
|
1799
1778
|
prompt,
|
|
1800
1779
|
message: "messages must not be empty"
|
|
1801
1780
|
});
|
|
1802
1781
|
}
|
|
1803
|
-
const validationResult = await (0,
|
|
1782
|
+
const validationResult = await (0, import_provider_utils8.safeValidateTypes)({
|
|
1804
1783
|
value: messages,
|
|
1805
1784
|
schema: import_v46.z.array(modelMessageSchema)
|
|
1806
1785
|
});
|
|
1807
1786
|
if (!validationResult.success) {
|
|
1808
|
-
throw new
|
|
1787
|
+
throw new import_provider18.InvalidPromptError({
|
|
1809
1788
|
prompt,
|
|
1810
1789
|
message: "The messages do not match the ModelMessage[] schema.",
|
|
1811
1790
|
cause: validationResult.error
|
|
@@ -1819,14 +1798,14 @@ async function standardizePrompt(prompt) {
|
|
|
1819
1798
|
|
|
1820
1799
|
// src/prompt/wrap-gateway-error.ts
|
|
1821
1800
|
var import_gateway2 = require("@ai-sdk/gateway");
|
|
1822
|
-
var
|
|
1801
|
+
var import_provider19 = require("@ai-sdk/provider");
|
|
1823
1802
|
function wrapGatewayError(error) {
|
|
1824
1803
|
if (!import_gateway2.GatewayAuthenticationError.isInstance(error))
|
|
1825
1804
|
return error;
|
|
1826
1805
|
const isProductionEnv = (process == null ? void 0 : process.env.NODE_ENV) === "production";
|
|
1827
1806
|
const moreInfoURL = "https://v6.ai-sdk.dev/unauthenticated-ai-gateway";
|
|
1828
1807
|
if (isProductionEnv) {
|
|
1829
|
-
return new
|
|
1808
|
+
return new import_provider19.AISDKError({
|
|
1830
1809
|
name: "GatewayError",
|
|
1831
1810
|
message: `Unauthenticated. Configure AI_GATEWAY_API_KEY or use a provider module. Learn more: ${moreInfoURL}`
|
|
1832
1811
|
});
|
|
@@ -1867,7 +1846,7 @@ function getBaseTelemetryAttributes({
|
|
|
1867
1846
|
telemetry,
|
|
1868
1847
|
headers
|
|
1869
1848
|
}) {
|
|
1870
|
-
var
|
|
1849
|
+
var _a14;
|
|
1871
1850
|
return {
|
|
1872
1851
|
"ai.model.provider": model.provider,
|
|
1873
1852
|
"ai.model.id": model.modelId,
|
|
@@ -1877,7 +1856,7 @@ function getBaseTelemetryAttributes({
|
|
|
1877
1856
|
return attributes;
|
|
1878
1857
|
}, {}),
|
|
1879
1858
|
// add metadata as attributes:
|
|
1880
|
-
...Object.entries((
|
|
1859
|
+
...Object.entries((_a14 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a14 : {}).reduce(
|
|
1881
1860
|
(attributes, [key, value]) => {
|
|
1882
1861
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
|
1883
1862
|
return attributes;
|
|
@@ -1902,7 +1881,7 @@ var noopTracer = {
|
|
|
1902
1881
|
startSpan() {
|
|
1903
1882
|
return noopSpan;
|
|
1904
1883
|
},
|
|
1905
|
-
startActiveSpan(
|
|
1884
|
+
startActiveSpan(name14, arg1, arg2, arg3) {
|
|
1906
1885
|
if (typeof arg1 === "function") {
|
|
1907
1886
|
return arg1(noopSpan);
|
|
1908
1887
|
}
|
|
@@ -1972,14 +1951,14 @@ function getTracer({
|
|
|
1972
1951
|
// src/telemetry/record-span.ts
|
|
1973
1952
|
var import_api2 = require("@opentelemetry/api");
|
|
1974
1953
|
async function recordSpan({
|
|
1975
|
-
name:
|
|
1954
|
+
name: name14,
|
|
1976
1955
|
tracer,
|
|
1977
1956
|
attributes,
|
|
1978
1957
|
fn,
|
|
1979
1958
|
endWhenDone = true
|
|
1980
1959
|
}) {
|
|
1981
1960
|
return tracer.startActiveSpan(
|
|
1982
|
-
|
|
1961
|
+
name14,
|
|
1983
1962
|
{ attributes: await attributes },
|
|
1984
1963
|
async (span) => {
|
|
1985
1964
|
try {
|
|
@@ -2109,12 +2088,12 @@ function createNullLanguageModelUsage() {
|
|
|
2109
2088
|
};
|
|
2110
2089
|
}
|
|
2111
2090
|
function addLanguageModelUsage(usage1, usage2) {
|
|
2112
|
-
var
|
|
2091
|
+
var _a14, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2113
2092
|
return {
|
|
2114
2093
|
inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
|
|
2115
2094
|
inputTokenDetails: {
|
|
2116
2095
|
noCacheTokens: addTokenCounts(
|
|
2117
|
-
(
|
|
2096
|
+
(_a14 = usage1.inputTokenDetails) == null ? void 0 : _a14.noCacheTokens,
|
|
2118
2097
|
(_b = usage2.inputTokenDetails) == null ? void 0 : _b.noCacheTokens
|
|
2119
2098
|
),
|
|
2120
2099
|
cacheReadTokens: addTokenCounts(
|
|
@@ -2160,8 +2139,8 @@ function addImageModelUsage(usage1, usage2) {
|
|
|
2160
2139
|
}
|
|
2161
2140
|
|
|
2162
2141
|
// src/util/retry-with-exponential-backoff.ts
|
|
2163
|
-
var
|
|
2164
|
-
var
|
|
2142
|
+
var import_provider20 = require("@ai-sdk/provider");
|
|
2143
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
2165
2144
|
function getRetryDelayInMs({
|
|
2166
2145
|
error,
|
|
2167
2146
|
exponentialBackoffDelay
|
|
@@ -2211,13 +2190,13 @@ async function _retryWithExponentialBackoff(f, {
|
|
|
2211
2190
|
try {
|
|
2212
2191
|
return await f();
|
|
2213
2192
|
} catch (error) {
|
|
2214
|
-
if ((0,
|
|
2193
|
+
if ((0, import_provider_utils9.isAbortError)(error)) {
|
|
2215
2194
|
throw error;
|
|
2216
2195
|
}
|
|
2217
2196
|
if (maxRetries === 0) {
|
|
2218
2197
|
throw error;
|
|
2219
2198
|
}
|
|
2220
|
-
const errorMessage = (0,
|
|
2199
|
+
const errorMessage = (0, import_provider_utils9.getErrorMessage)(error);
|
|
2221
2200
|
const newErrors = [...errors, error];
|
|
2222
2201
|
const tryNumber = newErrors.length;
|
|
2223
2202
|
if (tryNumber > maxRetries) {
|
|
@@ -2227,8 +2206,8 @@ async function _retryWithExponentialBackoff(f, {
|
|
|
2227
2206
|
errors: newErrors
|
|
2228
2207
|
});
|
|
2229
2208
|
}
|
|
2230
|
-
if (error instanceof Error &&
|
|
2231
|
-
await (0,
|
|
2209
|
+
if (error instanceof Error && import_provider20.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
|
2210
|
+
await (0, import_provider_utils9.delay)(
|
|
2232
2211
|
getRetryDelayInMs({
|
|
2233
2212
|
error,
|
|
2234
2213
|
exponentialBackoffDelay: delayInMs
|
|
@@ -2352,7 +2331,7 @@ function collectToolApprovals({
|
|
|
2352
2331
|
}
|
|
2353
2332
|
|
|
2354
2333
|
// src/generate-text/execute-tool-call.ts
|
|
2355
|
-
var
|
|
2334
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
2356
2335
|
async function executeToolCall({
|
|
2357
2336
|
toolCall,
|
|
2358
2337
|
tools,
|
|
@@ -2388,7 +2367,7 @@ async function executeToolCall({
|
|
|
2388
2367
|
fn: async (span) => {
|
|
2389
2368
|
let output;
|
|
2390
2369
|
try {
|
|
2391
|
-
const stream = (0,
|
|
2370
|
+
const stream = (0, import_provider_utils10.executeTool)({
|
|
2392
2371
|
execute: tool2.execute.bind(tool2),
|
|
2393
2372
|
input,
|
|
2394
2373
|
options: {
|
|
@@ -2460,7 +2439,7 @@ function extractTextContent(content) {
|
|
|
2460
2439
|
}
|
|
2461
2440
|
|
|
2462
2441
|
// src/generate-text/generated-file.ts
|
|
2463
|
-
var
|
|
2442
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
2464
2443
|
var DefaultGeneratedFile = class {
|
|
2465
2444
|
constructor({
|
|
2466
2445
|
data,
|
|
@@ -2474,14 +2453,14 @@ var DefaultGeneratedFile = class {
|
|
|
2474
2453
|
// lazy conversion with caching to avoid unnecessary conversion overhead:
|
|
2475
2454
|
get base64() {
|
|
2476
2455
|
if (this.base64Data == null) {
|
|
2477
|
-
this.base64Data = (0,
|
|
2456
|
+
this.base64Data = (0, import_provider_utils11.convertUint8ArrayToBase64)(this.uint8ArrayData);
|
|
2478
2457
|
}
|
|
2479
2458
|
return this.base64Data;
|
|
2480
2459
|
}
|
|
2481
2460
|
// lazy conversion with caching to avoid unnecessary conversion overhead:
|
|
2482
2461
|
get uint8Array() {
|
|
2483
2462
|
if (this.uint8ArrayData == null) {
|
|
2484
|
-
this.uint8ArrayData = (0,
|
|
2463
|
+
this.uint8ArrayData = (0, import_provider_utils11.convertBase64ToUint8Array)(this.base64Data);
|
|
2485
2464
|
}
|
|
2486
2465
|
return this.uint8ArrayData;
|
|
2487
2466
|
}
|
|
@@ -2522,11 +2501,11 @@ __export(output_exports, {
|
|
|
2522
2501
|
object: () => object,
|
|
2523
2502
|
text: () => text
|
|
2524
2503
|
});
|
|
2525
|
-
var
|
|
2526
|
-
var
|
|
2504
|
+
var import_provider21 = require("@ai-sdk/provider");
|
|
2505
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
2527
2506
|
|
|
2528
2507
|
// src/util/parse-partial-json.ts
|
|
2529
|
-
var
|
|
2508
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2530
2509
|
|
|
2531
2510
|
// src/util/fix-json.ts
|
|
2532
2511
|
function fixJson(input) {
|
|
@@ -2851,11 +2830,11 @@ async function parsePartialJson(jsonText) {
|
|
|
2851
2830
|
if (jsonText === void 0) {
|
|
2852
2831
|
return { value: void 0, state: "undefined-input" };
|
|
2853
2832
|
}
|
|
2854
|
-
let result = await (0,
|
|
2833
|
+
let result = await (0, import_provider_utils12.safeParseJSON)({ text: jsonText });
|
|
2855
2834
|
if (result.success) {
|
|
2856
2835
|
return { value: result.value, state: "successful-parse" };
|
|
2857
2836
|
}
|
|
2858
|
-
result = await (0,
|
|
2837
|
+
result = await (0, import_provider_utils12.safeParseJSON)({ text: fixJson(jsonText) });
|
|
2859
2838
|
if (result.success) {
|
|
2860
2839
|
return { value: result.value, state: "repaired-parse" };
|
|
2861
2840
|
}
|
|
@@ -2874,19 +2853,19 @@ var text = () => ({
|
|
|
2874
2853
|
});
|
|
2875
2854
|
var object = ({
|
|
2876
2855
|
schema: inputSchema,
|
|
2877
|
-
name:
|
|
2856
|
+
name: name14,
|
|
2878
2857
|
description
|
|
2879
2858
|
}) => {
|
|
2880
|
-
const schema = (0,
|
|
2859
|
+
const schema = (0, import_provider_utils13.asSchema)(inputSchema);
|
|
2881
2860
|
return {
|
|
2882
|
-
responseFormat: (0,
|
|
2861
|
+
responseFormat: (0, import_provider_utils13.resolve)(schema.jsonSchema).then((jsonSchema2) => ({
|
|
2883
2862
|
type: "json",
|
|
2884
2863
|
schema: jsonSchema2,
|
|
2885
|
-
...
|
|
2864
|
+
...name14 != null && { name: name14 },
|
|
2886
2865
|
...description != null && { description }
|
|
2887
2866
|
})),
|
|
2888
2867
|
async parseCompleteOutput({ text: text2 }, context) {
|
|
2889
|
-
const parseResult = await (0,
|
|
2868
|
+
const parseResult = await (0, import_provider_utils13.safeParseJSON)({ text: text2 });
|
|
2890
2869
|
if (!parseResult.success) {
|
|
2891
2870
|
throw new NoObjectGeneratedError({
|
|
2892
2871
|
message: "No object generated: could not parse the response.",
|
|
@@ -2897,7 +2876,7 @@ var object = ({
|
|
|
2897
2876
|
finishReason: context.finishReason
|
|
2898
2877
|
});
|
|
2899
2878
|
}
|
|
2900
|
-
const validationResult = await (0,
|
|
2879
|
+
const validationResult = await (0, import_provider_utils13.safeValidateTypes)({
|
|
2901
2880
|
value: parseResult.value,
|
|
2902
2881
|
schema
|
|
2903
2882
|
});
|
|
@@ -2933,13 +2912,13 @@ var object = ({
|
|
|
2933
2912
|
};
|
|
2934
2913
|
var array = ({
|
|
2935
2914
|
element: inputElementSchema,
|
|
2936
|
-
name:
|
|
2915
|
+
name: name14,
|
|
2937
2916
|
description
|
|
2938
2917
|
}) => {
|
|
2939
|
-
const elementSchema = (0,
|
|
2918
|
+
const elementSchema = (0, import_provider_utils13.asSchema)(inputElementSchema);
|
|
2940
2919
|
return {
|
|
2941
2920
|
// JSON schema that describes an array of elements:
|
|
2942
|
-
responseFormat: (0,
|
|
2921
|
+
responseFormat: (0, import_provider_utils13.resolve)(elementSchema.jsonSchema).then((jsonSchema2) => {
|
|
2943
2922
|
const { $schema, ...itemSchema } = jsonSchema2;
|
|
2944
2923
|
return {
|
|
2945
2924
|
type: "json",
|
|
@@ -2952,12 +2931,12 @@ var array = ({
|
|
|
2952
2931
|
required: ["elements"],
|
|
2953
2932
|
additionalProperties: false
|
|
2954
2933
|
},
|
|
2955
|
-
...
|
|
2934
|
+
...name14 != null && { name: name14 },
|
|
2956
2935
|
...description != null && { description }
|
|
2957
2936
|
};
|
|
2958
2937
|
}),
|
|
2959
2938
|
async parseCompleteOutput({ text: text2 }, context) {
|
|
2960
|
-
const parseResult = await (0,
|
|
2939
|
+
const parseResult = await (0, import_provider_utils13.safeParseJSON)({ text: text2 });
|
|
2961
2940
|
if (!parseResult.success) {
|
|
2962
2941
|
throw new NoObjectGeneratedError({
|
|
2963
2942
|
message: "No object generated: could not parse the response.",
|
|
@@ -2972,7 +2951,7 @@ var array = ({
|
|
|
2972
2951
|
if (outerValue == null || typeof outerValue !== "object" || !("elements" in outerValue) || !Array.isArray(outerValue.elements)) {
|
|
2973
2952
|
throw new NoObjectGeneratedError({
|
|
2974
2953
|
message: "No object generated: response did not match schema.",
|
|
2975
|
-
cause: new
|
|
2954
|
+
cause: new import_provider21.TypeValidationError({
|
|
2976
2955
|
value: outerValue,
|
|
2977
2956
|
cause: "response must be an object with an elements array"
|
|
2978
2957
|
}),
|
|
@@ -2983,7 +2962,7 @@ var array = ({
|
|
|
2983
2962
|
});
|
|
2984
2963
|
}
|
|
2985
2964
|
for (const element of outerValue.elements) {
|
|
2986
|
-
const validationResult = await (0,
|
|
2965
|
+
const validationResult = await (0, import_provider_utils13.safeValidateTypes)({
|
|
2987
2966
|
value: element,
|
|
2988
2967
|
schema: elementSchema
|
|
2989
2968
|
});
|
|
@@ -3016,7 +2995,7 @@ var array = ({
|
|
|
3016
2995
|
const rawElements = result.state === "repaired-parse" && outerValue.elements.length > 0 ? outerValue.elements.slice(0, -1) : outerValue.elements;
|
|
3017
2996
|
const parsedElements = [];
|
|
3018
2997
|
for (const rawElement of rawElements) {
|
|
3019
|
-
const validationResult = await (0,
|
|
2998
|
+
const validationResult = await (0, import_provider_utils13.safeValidateTypes)({
|
|
3020
2999
|
value: rawElement,
|
|
3021
3000
|
schema: elementSchema
|
|
3022
3001
|
});
|
|
@@ -3032,7 +3011,7 @@ var array = ({
|
|
|
3032
3011
|
};
|
|
3033
3012
|
var choice = ({
|
|
3034
3013
|
options: choiceOptions,
|
|
3035
|
-
name:
|
|
3014
|
+
name: name14,
|
|
3036
3015
|
description
|
|
3037
3016
|
}) => {
|
|
3038
3017
|
return {
|
|
@@ -3048,11 +3027,11 @@ var choice = ({
|
|
|
3048
3027
|
required: ["result"],
|
|
3049
3028
|
additionalProperties: false
|
|
3050
3029
|
},
|
|
3051
|
-
...
|
|
3030
|
+
...name14 != null && { name: name14 },
|
|
3052
3031
|
...description != null && { description }
|
|
3053
3032
|
}),
|
|
3054
3033
|
async parseCompleteOutput({ text: text2 }, context) {
|
|
3055
|
-
const parseResult = await (0,
|
|
3034
|
+
const parseResult = await (0, import_provider_utils13.safeParseJSON)({ text: text2 });
|
|
3056
3035
|
if (!parseResult.success) {
|
|
3057
3036
|
throw new NoObjectGeneratedError({
|
|
3058
3037
|
message: "No object generated: could not parse the response.",
|
|
@@ -3067,7 +3046,7 @@ var choice = ({
|
|
|
3067
3046
|
if (outerValue == null || typeof outerValue !== "object" || !("result" in outerValue) || typeof outerValue.result !== "string" || !choiceOptions.includes(outerValue.result)) {
|
|
3068
3047
|
throw new NoObjectGeneratedError({
|
|
3069
3048
|
message: "No object generated: response did not match schema.",
|
|
3070
|
-
cause: new
|
|
3049
|
+
cause: new import_provider21.TypeValidationError({
|
|
3071
3050
|
value: outerValue,
|
|
3072
3051
|
cause: "response must be an object that contains a choice value."
|
|
3073
3052
|
}),
|
|
@@ -3106,17 +3085,17 @@ var choice = ({
|
|
|
3106
3085
|
};
|
|
3107
3086
|
};
|
|
3108
3087
|
var json = ({
|
|
3109
|
-
name:
|
|
3088
|
+
name: name14,
|
|
3110
3089
|
description
|
|
3111
3090
|
} = {}) => {
|
|
3112
3091
|
return {
|
|
3113
3092
|
responseFormat: Promise.resolve({
|
|
3114
3093
|
type: "json",
|
|
3115
|
-
...
|
|
3094
|
+
...name14 != null && { name: name14 },
|
|
3116
3095
|
...description != null && { description }
|
|
3117
3096
|
}),
|
|
3118
3097
|
async parseCompleteOutput({ text: text2 }, context) {
|
|
3119
|
-
const parseResult = await (0,
|
|
3098
|
+
const parseResult = await (0, import_provider_utils13.safeParseJSON)({ text: text2 });
|
|
3120
3099
|
if (!parseResult.success) {
|
|
3121
3100
|
throw new NoObjectGeneratedError({
|
|
3122
3101
|
message: "No object generated: could not parse the response.",
|
|
@@ -3146,7 +3125,7 @@ var json = ({
|
|
|
3146
3125
|
};
|
|
3147
3126
|
|
|
3148
3127
|
// src/generate-text/parse-tool-call.ts
|
|
3149
|
-
var
|
|
3128
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
3150
3129
|
async function parseToolCall({
|
|
3151
3130
|
toolCall,
|
|
3152
3131
|
tools,
|
|
@@ -3154,7 +3133,7 @@ async function parseToolCall({
|
|
|
3154
3133
|
system,
|
|
3155
3134
|
messages
|
|
3156
3135
|
}) {
|
|
3157
|
-
var
|
|
3136
|
+
var _a14;
|
|
3158
3137
|
try {
|
|
3159
3138
|
if (tools == null) {
|
|
3160
3139
|
if (toolCall.providerExecuted && toolCall.dynamic) {
|
|
@@ -3175,7 +3154,7 @@ async function parseToolCall({
|
|
|
3175
3154
|
tools,
|
|
3176
3155
|
inputSchema: async ({ toolName }) => {
|
|
3177
3156
|
const { inputSchema } = tools[toolName];
|
|
3178
|
-
return await (0,
|
|
3157
|
+
return await (0, import_provider_utils14.asSchema)(inputSchema).jsonSchema;
|
|
3179
3158
|
},
|
|
3180
3159
|
system,
|
|
3181
3160
|
messages,
|
|
@@ -3193,7 +3172,7 @@ async function parseToolCall({
|
|
|
3193
3172
|
return await doParseToolCall({ toolCall: repairedToolCall, tools });
|
|
3194
3173
|
}
|
|
3195
3174
|
} catch (error) {
|
|
3196
|
-
const parsedInput = await (0,
|
|
3175
|
+
const parsedInput = await (0, import_provider_utils14.safeParseJSON)({ text: toolCall.input });
|
|
3197
3176
|
const input = parsedInput.success ? parsedInput.value : toolCall.input;
|
|
3198
3177
|
return {
|
|
3199
3178
|
type: "tool-call",
|
|
@@ -3203,14 +3182,14 @@ async function parseToolCall({
|
|
|
3203
3182
|
dynamic: true,
|
|
3204
3183
|
invalid: true,
|
|
3205
3184
|
error,
|
|
3206
|
-
title: (
|
|
3185
|
+
title: (_a14 = tools == null ? void 0 : tools[toolCall.toolName]) == null ? void 0 : _a14.title,
|
|
3207
3186
|
providerExecuted: toolCall.providerExecuted,
|
|
3208
3187
|
providerMetadata: toolCall.providerMetadata
|
|
3209
3188
|
};
|
|
3210
3189
|
}
|
|
3211
3190
|
}
|
|
3212
3191
|
async function parseProviderExecutedDynamicToolCall(toolCall) {
|
|
3213
|
-
const parseResult = toolCall.input.trim() === "" ? { success: true, value: {} } : await (0,
|
|
3192
|
+
const parseResult = toolCall.input.trim() === "" ? { success: true, value: {} } : await (0, import_provider_utils14.safeParseJSON)({ text: toolCall.input });
|
|
3214
3193
|
if (parseResult.success === false) {
|
|
3215
3194
|
throw new InvalidToolInputError({
|
|
3216
3195
|
toolName: toolCall.toolName,
|
|
@@ -3243,8 +3222,8 @@ async function doParseToolCall({
|
|
|
3243
3222
|
availableTools: Object.keys(tools)
|
|
3244
3223
|
});
|
|
3245
3224
|
}
|
|
3246
|
-
const schema = (0,
|
|
3247
|
-
const parseResult = toolCall.input.trim() === "" ? await (0,
|
|
3225
|
+
const schema = (0, import_provider_utils14.asSchema)(tool2.inputSchema);
|
|
3226
|
+
const parseResult = toolCall.input.trim() === "" ? await (0, import_provider_utils14.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils14.safeParseJSON)({ text: toolCall.input, schema });
|
|
3248
3227
|
if (parseResult.success === false) {
|
|
3249
3228
|
throw new InvalidToolInputError({
|
|
3250
3229
|
toolName,
|
|
@@ -3340,8 +3319,8 @@ function stepCountIs(stepCount) {
|
|
|
3340
3319
|
}
|
|
3341
3320
|
function hasToolCall(toolName) {
|
|
3342
3321
|
return ({ steps }) => {
|
|
3343
|
-
var
|
|
3344
|
-
return (_c = (_b = (
|
|
3322
|
+
var _a14, _b, _c;
|
|
3323
|
+
return (_c = (_b = (_a14 = steps[steps.length - 1]) == null ? void 0 : _a14.toolCalls) == null ? void 0 : _b.some(
|
|
3345
3324
|
(toolCall) => toolCall.toolName === toolName
|
|
3346
3325
|
)) != null ? _c : false;
|
|
3347
3326
|
};
|
|
@@ -3476,7 +3455,7 @@ async function toResponseMessages({
|
|
|
3476
3455
|
}
|
|
3477
3456
|
|
|
3478
3457
|
// src/generate-text/generate-text.ts
|
|
3479
|
-
var originalGenerateId = (0,
|
|
3458
|
+
var originalGenerateId = (0, import_provider_utils15.createIdGenerator)({
|
|
3480
3459
|
prefix: "aitxt",
|
|
3481
3460
|
size: 24
|
|
3482
3461
|
});
|
|
@@ -3517,7 +3496,7 @@ async function generateText({
|
|
|
3517
3496
|
abortSignal
|
|
3518
3497
|
});
|
|
3519
3498
|
const callSettings = prepareCallSettings(settings);
|
|
3520
|
-
const headersWithUserAgent = (0,
|
|
3499
|
+
const headersWithUserAgent = (0, import_provider_utils15.withUserAgentSuffix)(
|
|
3521
3500
|
headers != null ? headers : {},
|
|
3522
3501
|
`ai/${VERSION}`
|
|
3523
3502
|
);
|
|
@@ -3555,7 +3534,7 @@ async function generateText({
|
|
|
3555
3534
|
}),
|
|
3556
3535
|
tracer,
|
|
3557
3536
|
fn: async (span) => {
|
|
3558
|
-
var
|
|
3537
|
+
var _a14, _b, _c, _d, _e, _f, _g, _h;
|
|
3559
3538
|
const initialMessages = initialPrompt.messages;
|
|
3560
3539
|
const responseMessages = [];
|
|
3561
3540
|
const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
|
|
@@ -3618,7 +3597,7 @@ async function generateText({
|
|
|
3618
3597
|
experimental_context
|
|
3619
3598
|
}));
|
|
3620
3599
|
const stepModel = resolveLanguageModel(
|
|
3621
|
-
(
|
|
3600
|
+
(_a14 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a14 : model
|
|
3622
3601
|
);
|
|
3623
3602
|
const promptMessages = await convertToLanguageModelPrompt({
|
|
3624
3603
|
prompt: {
|
|
@@ -3636,7 +3615,7 @@ async function generateText({
|
|
|
3636
3615
|
});
|
|
3637
3616
|
currentModelResponse = await retry(
|
|
3638
3617
|
() => {
|
|
3639
|
-
var
|
|
3618
|
+
var _a15;
|
|
3640
3619
|
return recordSpan({
|
|
3641
3620
|
name: "ai.generateText.doGenerate",
|
|
3642
3621
|
attributes: selectTelemetryAttributes({
|
|
@@ -3668,14 +3647,14 @@ async function generateText({
|
|
|
3668
3647
|
"gen_ai.request.max_tokens": settings.maxOutputTokens,
|
|
3669
3648
|
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
|
3670
3649
|
"gen_ai.request.stop_sequences": settings.stopSequences,
|
|
3671
|
-
"gen_ai.request.temperature": (
|
|
3650
|
+
"gen_ai.request.temperature": (_a15 = settings.temperature) != null ? _a15 : void 0,
|
|
3672
3651
|
"gen_ai.request.top_k": settings.topK,
|
|
3673
3652
|
"gen_ai.request.top_p": settings.topP
|
|
3674
3653
|
}
|
|
3675
3654
|
}),
|
|
3676
3655
|
tracer,
|
|
3677
3656
|
fn: async (span2) => {
|
|
3678
|
-
var
|
|
3657
|
+
var _a16, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
|
|
3679
3658
|
const result = await stepModel.doGenerate({
|
|
3680
3659
|
...callSettings2,
|
|
3681
3660
|
tools: stepTools,
|
|
@@ -3687,7 +3666,7 @@ async function generateText({
|
|
|
3687
3666
|
headers: headersWithUserAgent
|
|
3688
3667
|
});
|
|
3689
3668
|
const responseData = {
|
|
3690
|
-
id: (_b2 = (
|
|
3669
|
+
id: (_b2 = (_a16 = result.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId2(),
|
|
3691
3670
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
|
3692
3671
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
|
|
3693
3672
|
headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
|
|
@@ -3784,7 +3763,7 @@ async function generateText({
|
|
|
3784
3763
|
toolCallId: toolCall.toolCallId,
|
|
3785
3764
|
toolName: toolCall.toolName,
|
|
3786
3765
|
input: toolCall.input,
|
|
3787
|
-
error: (0,
|
|
3766
|
+
error: (0, import_provider_utils15.getErrorMessage)(toolCall.error),
|
|
3788
3767
|
dynamic: true
|
|
3789
3768
|
});
|
|
3790
3769
|
}
|
|
@@ -4099,8 +4078,8 @@ function asContent({
|
|
|
4099
4078
|
}
|
|
4100
4079
|
|
|
4101
4080
|
// src/generate-text/stream-text.ts
|
|
4102
|
-
var
|
|
4103
|
-
var
|
|
4081
|
+
var import_provider22 = require("@ai-sdk/provider");
|
|
4082
|
+
var import_provider_utils19 = require("@ai-sdk/provider-utils");
|
|
4104
4083
|
|
|
4105
4084
|
// src/util/prepare-headers.ts
|
|
4106
4085
|
function prepareHeaders(headers, defaultHeaders) {
|
|
@@ -4247,13 +4226,13 @@ function getResponseUIMessageId({
|
|
|
4247
4226
|
}
|
|
4248
4227
|
|
|
4249
4228
|
// src/ui/process-ui-message-stream.ts
|
|
4250
|
-
var
|
|
4229
|
+
var import_provider_utils17 = require("@ai-sdk/provider-utils");
|
|
4251
4230
|
|
|
4252
4231
|
// src/ui-message-stream/ui-message-chunks.ts
|
|
4253
4232
|
var import_v47 = require("zod/v4");
|
|
4254
|
-
var
|
|
4255
|
-
var uiMessageChunkSchema = (0,
|
|
4256
|
-
() => (0,
|
|
4233
|
+
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
|
4234
|
+
var uiMessageChunkSchema = (0, import_provider_utils16.lazySchema)(
|
|
4235
|
+
() => (0, import_provider_utils16.zodSchema)(
|
|
4257
4236
|
import_v47.z.union([
|
|
4258
4237
|
import_v47.z.strictObject({
|
|
4259
4238
|
type: import_v47.z.literal("text-start"),
|
|
@@ -4511,7 +4490,7 @@ function processUIMessageStream({
|
|
|
4511
4490
|
new TransformStream({
|
|
4512
4491
|
async transform(chunk, controller) {
|
|
4513
4492
|
await runUpdateMessageJob(async ({ state, write }) => {
|
|
4514
|
-
var
|
|
4493
|
+
var _a14, _b, _c, _d;
|
|
4515
4494
|
function getToolInvocation(toolCallId) {
|
|
4516
4495
|
const toolInvocations = state.message.parts.filter(isToolUIPart);
|
|
4517
4496
|
const toolInvocation = toolInvocations.find(
|
|
@@ -4525,7 +4504,7 @@ function processUIMessageStream({
|
|
|
4525
4504
|
return toolInvocation;
|
|
4526
4505
|
}
|
|
4527
4506
|
function updateToolPart(options) {
|
|
4528
|
-
var
|
|
4507
|
+
var _a15;
|
|
4529
4508
|
const part = state.message.parts.find(
|
|
4530
4509
|
(part2) => isStaticToolUIPart(part2) && part2.toolCallId === options.toolCallId
|
|
4531
4510
|
);
|
|
@@ -4541,7 +4520,7 @@ function processUIMessageStream({
|
|
|
4541
4520
|
if (options.title !== void 0) {
|
|
4542
4521
|
anyPart.title = options.title;
|
|
4543
4522
|
}
|
|
4544
|
-
anyPart.providerExecuted = (
|
|
4523
|
+
anyPart.providerExecuted = (_a15 = anyOptions.providerExecuted) != null ? _a15 : part.providerExecuted;
|
|
4545
4524
|
if (anyOptions.providerMetadata != null && part.state === "input-available") {
|
|
4546
4525
|
part.callProviderMetadata = anyOptions.providerMetadata;
|
|
4547
4526
|
}
|
|
@@ -4562,7 +4541,7 @@ function processUIMessageStream({
|
|
|
4562
4541
|
}
|
|
4563
4542
|
}
|
|
4564
4543
|
function updateDynamicToolPart(options) {
|
|
4565
|
-
var
|
|
4544
|
+
var _a15, _b2;
|
|
4566
4545
|
const part = state.message.parts.find(
|
|
4567
4546
|
(part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
|
|
4568
4547
|
);
|
|
@@ -4574,7 +4553,7 @@ function processUIMessageStream({
|
|
|
4574
4553
|
anyPart.input = anyOptions.input;
|
|
4575
4554
|
anyPart.output = anyOptions.output;
|
|
4576
4555
|
anyPart.errorText = anyOptions.errorText;
|
|
4577
|
-
anyPart.rawInput = (
|
|
4556
|
+
anyPart.rawInput = (_a15 = anyOptions.rawInput) != null ? _a15 : anyPart.rawInput;
|
|
4578
4557
|
anyPart.preliminary = anyOptions.preliminary;
|
|
4579
4558
|
if (options.title !== void 0) {
|
|
4580
4559
|
anyPart.title = options.title;
|
|
@@ -4603,7 +4582,7 @@ function processUIMessageStream({
|
|
|
4603
4582
|
if (metadata != null) {
|
|
4604
4583
|
const mergedMetadata = state.message.metadata != null ? mergeObjects(state.message.metadata, metadata) : metadata;
|
|
4605
4584
|
if (messageMetadataSchema != null) {
|
|
4606
|
-
await (0,
|
|
4585
|
+
await (0, import_provider_utils17.validateTypes)({
|
|
4607
4586
|
value: mergedMetadata,
|
|
4608
4587
|
schema: messageMetadataSchema
|
|
4609
4588
|
});
|
|
@@ -4627,7 +4606,7 @@ function processUIMessageStream({
|
|
|
4627
4606
|
case "text-delta": {
|
|
4628
4607
|
const textPart = state.activeTextParts[chunk.id];
|
|
4629
4608
|
textPart.text += chunk.delta;
|
|
4630
|
-
textPart.providerMetadata = (
|
|
4609
|
+
textPart.providerMetadata = (_a14 = chunk.providerMetadata) != null ? _a14 : textPart.providerMetadata;
|
|
4631
4610
|
write();
|
|
4632
4611
|
break;
|
|
4633
4612
|
}
|
|
@@ -4922,7 +4901,7 @@ function processUIMessageStream({
|
|
|
4922
4901
|
default: {
|
|
4923
4902
|
if (isDataUIMessageChunk(chunk)) {
|
|
4924
4903
|
if ((dataPartSchemas == null ? void 0 : dataPartSchemas[chunk.type]) != null) {
|
|
4925
|
-
await (0,
|
|
4904
|
+
await (0, import_provider_utils17.validateTypes)({
|
|
4926
4905
|
value: chunk.data,
|
|
4927
4906
|
schema: dataPartSchemas[chunk.type]
|
|
4928
4907
|
});
|
|
@@ -5066,11 +5045,11 @@ function createAsyncIterableStream(source) {
|
|
|
5066
5045
|
const reader = this.getReader();
|
|
5067
5046
|
let finished = false;
|
|
5068
5047
|
async function cleanup(cancelStream) {
|
|
5069
|
-
var
|
|
5048
|
+
var _a14;
|
|
5070
5049
|
finished = true;
|
|
5071
5050
|
try {
|
|
5072
5051
|
if (cancelStream) {
|
|
5073
|
-
await ((
|
|
5052
|
+
await ((_a14 = reader.cancel) == null ? void 0 : _a14.call(reader));
|
|
5074
5053
|
}
|
|
5075
5054
|
} finally {
|
|
5076
5055
|
try {
|
|
@@ -5236,12 +5215,12 @@ function createStitchableStream() {
|
|
|
5236
5215
|
|
|
5237
5216
|
// src/util/now.ts
|
|
5238
5217
|
function now() {
|
|
5239
|
-
var
|
|
5240
|
-
return (_b = (
|
|
5218
|
+
var _a14, _b;
|
|
5219
|
+
return (_b = (_a14 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a14.now()) != null ? _b : Date.now();
|
|
5241
5220
|
}
|
|
5242
5221
|
|
|
5243
5222
|
// src/generate-text/run-tools-transformation.ts
|
|
5244
|
-
var
|
|
5223
|
+
var import_provider_utils18 = require("@ai-sdk/provider-utils");
|
|
5245
5224
|
function runToolsTransformation({
|
|
5246
5225
|
tools,
|
|
5247
5226
|
generatorStream,
|
|
@@ -5328,7 +5307,7 @@ function runToolsTransformation({
|
|
|
5328
5307
|
toolCallId: toolCall.toolCallId,
|
|
5329
5308
|
toolName: toolCall.toolName,
|
|
5330
5309
|
input: toolCall.input,
|
|
5331
|
-
error: (0,
|
|
5310
|
+
error: (0, import_provider_utils18.getErrorMessage)(toolCall.error),
|
|
5332
5311
|
dynamic: true,
|
|
5333
5312
|
title: toolCall.title
|
|
5334
5313
|
});
|
|
@@ -5450,7 +5429,7 @@ function runToolsTransformation({
|
|
|
5450
5429
|
}
|
|
5451
5430
|
|
|
5452
5431
|
// src/generate-text/stream-text.ts
|
|
5453
|
-
var originalGenerateId2 = (0,
|
|
5432
|
+
var originalGenerateId2 = (0, import_provider_utils19.createIdGenerator)({
|
|
5454
5433
|
prefix: "aitxt",
|
|
5455
5434
|
size: 24
|
|
5456
5435
|
});
|
|
@@ -5546,7 +5525,7 @@ function createOutputTransformStream(output) {
|
|
|
5546
5525
|
}
|
|
5547
5526
|
return new TransformStream({
|
|
5548
5527
|
async transform(chunk, controller) {
|
|
5549
|
-
var
|
|
5528
|
+
var _a14;
|
|
5550
5529
|
if (chunk.type === "finish-step" && textChunk.length > 0) {
|
|
5551
5530
|
publishTextChunk({ controller });
|
|
5552
5531
|
}
|
|
@@ -5573,7 +5552,7 @@ function createOutputTransformStream(output) {
|
|
|
5573
5552
|
}
|
|
5574
5553
|
text2 += chunk.text;
|
|
5575
5554
|
textChunk += chunk.text;
|
|
5576
|
-
textProviderMetadata = (
|
|
5555
|
+
textProviderMetadata = (_a14 = chunk.providerMetadata) != null ? _a14 : textProviderMetadata;
|
|
5577
5556
|
const result = await output.parsePartialOutput({ text: text2 });
|
|
5578
5557
|
if (result !== void 0) {
|
|
5579
5558
|
const currentJson = JSON.stringify(result.partial);
|
|
@@ -5617,9 +5596,9 @@ var DefaultStreamTextResult = class {
|
|
|
5617
5596
|
experimental_context,
|
|
5618
5597
|
download: download2
|
|
5619
5598
|
}) {
|
|
5620
|
-
this._totalUsage = new
|
|
5621
|
-
this._finishReason = new
|
|
5622
|
-
this._steps = new
|
|
5599
|
+
this._totalUsage = new import_provider_utils19.DelayedPromise();
|
|
5600
|
+
this._finishReason = new import_provider_utils19.DelayedPromise();
|
|
5601
|
+
this._steps = new import_provider_utils19.DelayedPromise();
|
|
5623
5602
|
this.outputSpecification = output;
|
|
5624
5603
|
this.includeRawChunks = includeRawChunks;
|
|
5625
5604
|
this.tools = tools;
|
|
@@ -5636,7 +5615,7 @@ var DefaultStreamTextResult = class {
|
|
|
5636
5615
|
let activeReasoningContent = {};
|
|
5637
5616
|
const eventProcessor = new TransformStream({
|
|
5638
5617
|
async transform(chunk, controller) {
|
|
5639
|
-
var
|
|
5618
|
+
var _a14, _b, _c, _d;
|
|
5640
5619
|
controller.enqueue(chunk);
|
|
5641
5620
|
const { part } = chunk;
|
|
5642
5621
|
if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
|
|
@@ -5666,7 +5645,7 @@ var DefaultStreamTextResult = class {
|
|
|
5666
5645
|
return;
|
|
5667
5646
|
}
|
|
5668
5647
|
activeText.text += part.text;
|
|
5669
|
-
activeText.providerMetadata = (
|
|
5648
|
+
activeText.providerMetadata = (_a14 = part.providerMetadata) != null ? _a14 : activeText.providerMetadata;
|
|
5670
5649
|
}
|
|
5671
5650
|
if (part.type === "text-end") {
|
|
5672
5651
|
const activeText = activeTextContent[part.id];
|
|
@@ -5826,8 +5805,8 @@ var DefaultStreamTextResult = class {
|
|
|
5826
5805
|
"ai.response.text": { output: () => finalStep.text },
|
|
5827
5806
|
"ai.response.toolCalls": {
|
|
5828
5807
|
output: () => {
|
|
5829
|
-
var
|
|
5830
|
-
return ((
|
|
5808
|
+
var _a14;
|
|
5809
|
+
return ((_a14 = finalStep.toolCalls) == null ? void 0 : _a14.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
|
|
5831
5810
|
}
|
|
5832
5811
|
},
|
|
5833
5812
|
"ai.response.providerMetadata": JSON.stringify(
|
|
@@ -5874,7 +5853,7 @@ var DefaultStreamTextResult = class {
|
|
|
5874
5853
|
}
|
|
5875
5854
|
controller.enqueue(value);
|
|
5876
5855
|
} catch (error) {
|
|
5877
|
-
if ((0,
|
|
5856
|
+
if ((0, import_provider_utils19.isAbortError)(error) && (abortSignal == null ? void 0 : abortSignal.aborted)) {
|
|
5878
5857
|
abort();
|
|
5879
5858
|
} else {
|
|
5880
5859
|
controller.error(error);
|
|
@@ -6011,9 +5990,9 @@ var DefaultStreamTextResult = class {
|
|
|
6011
5990
|
responseMessages,
|
|
6012
5991
|
usage
|
|
6013
5992
|
}) {
|
|
6014
|
-
var
|
|
5993
|
+
var _a14, _b, _c, _d, _e, _f;
|
|
6015
5994
|
const includeRawChunks2 = self.includeRawChunks;
|
|
6016
|
-
stepFinish = new
|
|
5995
|
+
stepFinish = new import_provider_utils19.DelayedPromise();
|
|
6017
5996
|
const stepInputMessages = [...initialMessages, ...responseMessages];
|
|
6018
5997
|
const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
|
|
6019
5998
|
model,
|
|
@@ -6023,7 +6002,7 @@ var DefaultStreamTextResult = class {
|
|
|
6023
6002
|
experimental_context
|
|
6024
6003
|
}));
|
|
6025
6004
|
const stepModel = resolveLanguageModel(
|
|
6026
|
-
(
|
|
6005
|
+
(_a14 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a14 : model
|
|
6027
6006
|
);
|
|
6028
6007
|
const promptMessages = await convertToLanguageModelPrompt({
|
|
6029
6008
|
prompt: {
|
|
@@ -6131,7 +6110,7 @@ var DefaultStreamTextResult = class {
|
|
|
6131
6110
|
streamWithToolResults.pipeThrough(
|
|
6132
6111
|
new TransformStream({
|
|
6133
6112
|
async transform(chunk, controller) {
|
|
6134
|
-
var
|
|
6113
|
+
var _a15, _b2, _c2, _d2, _e2;
|
|
6135
6114
|
if (chunk.type === "stream-start") {
|
|
6136
6115
|
warnings = chunk.warnings;
|
|
6137
6116
|
return;
|
|
@@ -6204,7 +6183,7 @@ var DefaultStreamTextResult = class {
|
|
|
6204
6183
|
}
|
|
6205
6184
|
case "response-metadata": {
|
|
6206
6185
|
stepResponse = {
|
|
6207
|
-
id: (
|
|
6186
|
+
id: (_a15 = chunk.id) != null ? _a15 : stepResponse.id,
|
|
6208
6187
|
timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
|
|
6209
6188
|
modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
|
|
6210
6189
|
};
|
|
@@ -6503,14 +6482,14 @@ var DefaultStreamTextResult = class {
|
|
|
6503
6482
|
);
|
|
6504
6483
|
}
|
|
6505
6484
|
async consumeStream(options) {
|
|
6506
|
-
var
|
|
6485
|
+
var _a14;
|
|
6507
6486
|
try {
|
|
6508
6487
|
await consumeStream({
|
|
6509
6488
|
stream: this.fullStream,
|
|
6510
6489
|
onError: options == null ? void 0 : options.onError
|
|
6511
6490
|
});
|
|
6512
6491
|
} catch (error) {
|
|
6513
|
-
(
|
|
6492
|
+
(_a14 = options == null ? void 0 : options.onError) == null ? void 0 : _a14.call(options, error);
|
|
6514
6493
|
}
|
|
6515
6494
|
}
|
|
6516
6495
|
get experimental_partialOutputStream() {
|
|
@@ -6531,8 +6510,8 @@ var DefaultStreamTextResult = class {
|
|
|
6531
6510
|
}
|
|
6532
6511
|
get output() {
|
|
6533
6512
|
return this.finalStep.then((step) => {
|
|
6534
|
-
var
|
|
6535
|
-
const output = (
|
|
6513
|
+
var _a14;
|
|
6514
|
+
const output = (_a14 = this.outputSpecification) != null ? _a14 : text();
|
|
6536
6515
|
return output.parseCompleteOutput(
|
|
6537
6516
|
{ text: step.text },
|
|
6538
6517
|
{
|
|
@@ -6552,15 +6531,15 @@ var DefaultStreamTextResult = class {
|
|
|
6552
6531
|
sendSources = false,
|
|
6553
6532
|
sendStart = true,
|
|
6554
6533
|
sendFinish = true,
|
|
6555
|
-
onError =
|
|
6534
|
+
onError = import_provider22.getErrorMessage
|
|
6556
6535
|
} = {}) {
|
|
6557
6536
|
const responseMessageId = generateMessageId != null ? getResponseUIMessageId({
|
|
6558
6537
|
originalMessages,
|
|
6559
6538
|
responseMessageId: generateMessageId
|
|
6560
6539
|
}) : void 0;
|
|
6561
6540
|
const isDynamic = (part) => {
|
|
6562
|
-
var
|
|
6563
|
-
const tool2 = (
|
|
6541
|
+
var _a14;
|
|
6542
|
+
const tool2 = (_a14 = this.tools) == null ? void 0 : _a14[part.toolName];
|
|
6564
6543
|
if (tool2 == null) {
|
|
6565
6544
|
return part.dynamic;
|
|
6566
6545
|
}
|
|
@@ -6898,10 +6877,10 @@ var ToolLoopAgent = class {
|
|
|
6898
6877
|
return this.settings.tools;
|
|
6899
6878
|
}
|
|
6900
6879
|
async prepareCall(options) {
|
|
6901
|
-
var
|
|
6880
|
+
var _a14, _b, _c, _d;
|
|
6902
6881
|
const baseCallArgs = {
|
|
6903
6882
|
...this.settings,
|
|
6904
|
-
stopWhen: (
|
|
6883
|
+
stopWhen: (_a14 = this.settings.stopWhen) != null ? _a14 : stepCountIs(20),
|
|
6905
6884
|
...options
|
|
6906
6885
|
};
|
|
6907
6886
|
const preparedCallArgs = (_d = await ((_c = (_b = this.settings).prepareCall) == null ? void 0 : _c.call(_b, baseCallArgs))) != null ? _d : baseCallArgs;
|
|
@@ -6941,13 +6920,13 @@ var ToolLoopAgent = class {
|
|
|
6941
6920
|
};
|
|
6942
6921
|
|
|
6943
6922
|
// src/ui-message-stream/create-ui-message-stream.ts
|
|
6944
|
-
var
|
|
6923
|
+
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
|
6945
6924
|
function createUIMessageStream({
|
|
6946
6925
|
execute,
|
|
6947
|
-
onError =
|
|
6926
|
+
onError = import_provider_utils20.getErrorMessage,
|
|
6948
6927
|
originalMessages,
|
|
6949
6928
|
onFinish,
|
|
6950
|
-
generateId: generateId2 =
|
|
6929
|
+
generateId: generateId2 = import_provider_utils20.generateId
|
|
6951
6930
|
}) {
|
|
6952
6931
|
let controller;
|
|
6953
6932
|
const ongoingStreamPromises = [];
|
|
@@ -7033,7 +7012,7 @@ function readUIMessageStream({
|
|
|
7033
7012
|
onError,
|
|
7034
7013
|
terminateOnError = false
|
|
7035
7014
|
}) {
|
|
7036
|
-
var
|
|
7015
|
+
var _a14;
|
|
7037
7016
|
let controller;
|
|
7038
7017
|
let hasErrored = false;
|
|
7039
7018
|
const outputStream = new ReadableStream({
|
|
@@ -7042,7 +7021,7 @@ function readUIMessageStream({
|
|
|
7042
7021
|
}
|
|
7043
7022
|
});
|
|
7044
7023
|
const state = createStreamingUIMessageState({
|
|
7045
|
-
messageId: (
|
|
7024
|
+
messageId: (_a14 = message == null ? void 0 : message.id) != null ? _a14 : "",
|
|
7046
7025
|
lastMessage: message
|
|
7047
7026
|
});
|
|
7048
7027
|
const handleError = (error) => {
|
|
@@ -7075,7 +7054,7 @@ function readUIMessageStream({
|
|
|
7075
7054
|
}
|
|
7076
7055
|
|
|
7077
7056
|
// src/ui/convert-to-model-messages.ts
|
|
7078
|
-
var
|
|
7057
|
+
var import_provider_utils21 = require("@ai-sdk/provider-utils");
|
|
7079
7058
|
async function convertToModelMessages(messages, options) {
|
|
7080
7059
|
const modelMessages = [];
|
|
7081
7060
|
if (options == null ? void 0 : options.ignoreIncompleteToolCalls) {
|
|
@@ -7109,7 +7088,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7109
7088
|
modelMessages.push({
|
|
7110
7089
|
role: "user",
|
|
7111
7090
|
content: message.parts.map((part) => {
|
|
7112
|
-
var
|
|
7091
|
+
var _a14;
|
|
7113
7092
|
if (isTextUIPart(part)) {
|
|
7114
7093
|
return {
|
|
7115
7094
|
type: "text",
|
|
@@ -7127,12 +7106,12 @@ async function convertToModelMessages(messages, options) {
|
|
|
7127
7106
|
};
|
|
7128
7107
|
}
|
|
7129
7108
|
if (isDataUIPart(part)) {
|
|
7130
|
-
return (
|
|
7109
|
+
return (_a14 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a14.call(
|
|
7131
7110
|
options,
|
|
7132
7111
|
part
|
|
7133
7112
|
);
|
|
7134
7113
|
}
|
|
7135
|
-
}).filter(
|
|
7114
|
+
}).filter(import_provider_utils21.isNonNullable)
|
|
7136
7115
|
});
|
|
7137
7116
|
break;
|
|
7138
7117
|
}
|
|
@@ -7140,7 +7119,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7140
7119
|
if (message.parts != null) {
|
|
7141
7120
|
let block = [];
|
|
7142
7121
|
async function processBlock() {
|
|
7143
|
-
var
|
|
7122
|
+
var _a14, _b, _c, _d, _e, _f;
|
|
7144
7123
|
if (block.length === 0) {
|
|
7145
7124
|
return;
|
|
7146
7125
|
}
|
|
@@ -7172,7 +7151,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7172
7151
|
type: "tool-call",
|
|
7173
7152
|
toolCallId: part.toolCallId,
|
|
7174
7153
|
toolName,
|
|
7175
|
-
input: part.state === "output-error" ? (
|
|
7154
|
+
input: part.state === "output-error" ? (_a14 = part.input) != null ? _a14 : "rawInput" in part ? part.rawInput : void 0 : part.input,
|
|
7176
7155
|
providerExecuted: part.providerExecuted,
|
|
7177
7156
|
...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
|
|
7178
7157
|
});
|
|
@@ -7298,11 +7277,11 @@ async function convertToModelMessages(messages, options) {
|
|
|
7298
7277
|
}
|
|
7299
7278
|
|
|
7300
7279
|
// src/ui/validate-ui-messages.ts
|
|
7301
|
-
var
|
|
7302
|
-
var
|
|
7280
|
+
var import_provider23 = require("@ai-sdk/provider");
|
|
7281
|
+
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
|
7303
7282
|
var import_v48 = require("zod/v4");
|
|
7304
|
-
var uiMessagesSchema = (0,
|
|
7305
|
-
() => (0,
|
|
7283
|
+
var uiMessagesSchema = (0, import_provider_utils22.lazySchema)(
|
|
7284
|
+
() => (0, import_provider_utils22.zodSchema)(
|
|
7306
7285
|
import_v48.z.array(
|
|
7307
7286
|
import_v48.z.object({
|
|
7308
7287
|
id: import_v48.z.string(),
|
|
@@ -7576,13 +7555,13 @@ async function safeValidateUIMessages({
|
|
|
7576
7555
|
})
|
|
7577
7556
|
};
|
|
7578
7557
|
}
|
|
7579
|
-
const validatedMessages = await (0,
|
|
7558
|
+
const validatedMessages = await (0, import_provider_utils22.validateTypes)({
|
|
7580
7559
|
value: messages,
|
|
7581
7560
|
schema: uiMessagesSchema
|
|
7582
7561
|
});
|
|
7583
7562
|
if (metadataSchema) {
|
|
7584
7563
|
for (const message of validatedMessages) {
|
|
7585
|
-
await (0,
|
|
7564
|
+
await (0, import_provider_utils22.validateTypes)({
|
|
7586
7565
|
value: message.metadata,
|
|
7587
7566
|
schema: metadataSchema
|
|
7588
7567
|
});
|
|
@@ -7599,13 +7578,13 @@ async function safeValidateUIMessages({
|
|
|
7599
7578
|
if (!dataSchema) {
|
|
7600
7579
|
return {
|
|
7601
7580
|
success: false,
|
|
7602
|
-
error: new
|
|
7581
|
+
error: new import_provider23.TypeValidationError({
|
|
7603
7582
|
value: dataPart.data,
|
|
7604
7583
|
cause: `No data schema found for data part ${dataName}`
|
|
7605
7584
|
})
|
|
7606
7585
|
};
|
|
7607
7586
|
}
|
|
7608
|
-
await (0,
|
|
7587
|
+
await (0, import_provider_utils22.validateTypes)({
|
|
7609
7588
|
value: dataPart.data,
|
|
7610
7589
|
schema: dataSchema
|
|
7611
7590
|
});
|
|
@@ -7623,20 +7602,20 @@ async function safeValidateUIMessages({
|
|
|
7623
7602
|
if (!tool2) {
|
|
7624
7603
|
return {
|
|
7625
7604
|
success: false,
|
|
7626
|
-
error: new
|
|
7605
|
+
error: new import_provider23.TypeValidationError({
|
|
7627
7606
|
value: toolPart.input,
|
|
7628
7607
|
cause: `No tool schema found for tool part ${toolName}`
|
|
7629
7608
|
})
|
|
7630
7609
|
};
|
|
7631
7610
|
}
|
|
7632
7611
|
if (toolPart.state === "input-available" || toolPart.state === "output-available" || toolPart.state === "output-error") {
|
|
7633
|
-
await (0,
|
|
7612
|
+
await (0, import_provider_utils22.validateTypes)({
|
|
7634
7613
|
value: toolPart.input,
|
|
7635
7614
|
schema: tool2.inputSchema
|
|
7636
7615
|
});
|
|
7637
7616
|
}
|
|
7638
7617
|
if (toolPart.state === "output-available" && tool2.outputSchema) {
|
|
7639
|
-
await (0,
|
|
7618
|
+
await (0, import_provider_utils22.validateTypes)({
|
|
7640
7619
|
value: toolPart.output,
|
|
7641
7620
|
schema: tool2.outputSchema
|
|
7642
7621
|
});
|
|
@@ -7735,7 +7714,7 @@ async function pipeAgentUIStreamToResponse({
|
|
|
7735
7714
|
}
|
|
7736
7715
|
|
|
7737
7716
|
// src/embed/embed.ts
|
|
7738
|
-
var
|
|
7717
|
+
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
|
7739
7718
|
async function embed({
|
|
7740
7719
|
model: modelArg,
|
|
7741
7720
|
value,
|
|
@@ -7750,7 +7729,7 @@ async function embed({
|
|
|
7750
7729
|
maxRetries: maxRetriesArg,
|
|
7751
7730
|
abortSignal
|
|
7752
7731
|
});
|
|
7753
|
-
const headersWithUserAgent = (0,
|
|
7732
|
+
const headersWithUserAgent = (0, import_provider_utils23.withUserAgentSuffix)(
|
|
7754
7733
|
headers != null ? headers : {},
|
|
7755
7734
|
`ai/${VERSION}`
|
|
7756
7735
|
);
|
|
@@ -7792,7 +7771,7 @@ async function embed({
|
|
|
7792
7771
|
}),
|
|
7793
7772
|
tracer,
|
|
7794
7773
|
fn: async (doEmbedSpan) => {
|
|
7795
|
-
var
|
|
7774
|
+
var _a14;
|
|
7796
7775
|
const modelResponse = await model.doEmbed({
|
|
7797
7776
|
values: [value],
|
|
7798
7777
|
abortSignal,
|
|
@@ -7800,7 +7779,7 @@ async function embed({
|
|
|
7800
7779
|
providerOptions
|
|
7801
7780
|
});
|
|
7802
7781
|
const embedding2 = modelResponse.embeddings[0];
|
|
7803
|
-
const usage2 = (
|
|
7782
|
+
const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
|
|
7804
7783
|
doEmbedSpan.setAttributes(
|
|
7805
7784
|
await selectTelemetryAttributes({
|
|
7806
7785
|
telemetry,
|
|
@@ -7858,7 +7837,7 @@ var DefaultEmbedResult = class {
|
|
|
7858
7837
|
};
|
|
7859
7838
|
|
|
7860
7839
|
// src/embed/embed-many.ts
|
|
7861
|
-
var
|
|
7840
|
+
var import_provider_utils24 = require("@ai-sdk/provider-utils");
|
|
7862
7841
|
|
|
7863
7842
|
// src/util/split-array.ts
|
|
7864
7843
|
function splitArray(array2, chunkSize) {
|
|
@@ -7888,7 +7867,7 @@ async function embedMany({
|
|
|
7888
7867
|
maxRetries: maxRetriesArg,
|
|
7889
7868
|
abortSignal
|
|
7890
7869
|
});
|
|
7891
|
-
const headersWithUserAgent = (0,
|
|
7870
|
+
const headersWithUserAgent = (0, import_provider_utils24.withUserAgentSuffix)(
|
|
7892
7871
|
headers != null ? headers : {},
|
|
7893
7872
|
`ai/${VERSION}`
|
|
7894
7873
|
);
|
|
@@ -7914,7 +7893,7 @@ async function embedMany({
|
|
|
7914
7893
|
}),
|
|
7915
7894
|
tracer,
|
|
7916
7895
|
fn: async (span) => {
|
|
7917
|
-
var
|
|
7896
|
+
var _a14;
|
|
7918
7897
|
const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
|
|
7919
7898
|
model.maxEmbeddingsPerCall,
|
|
7920
7899
|
model.supportsParallelCalls
|
|
@@ -7939,7 +7918,7 @@ async function embedMany({
|
|
|
7939
7918
|
}),
|
|
7940
7919
|
tracer,
|
|
7941
7920
|
fn: async (doEmbedSpan) => {
|
|
7942
|
-
var
|
|
7921
|
+
var _a15;
|
|
7943
7922
|
const modelResponse = await model.doEmbed({
|
|
7944
7923
|
values,
|
|
7945
7924
|
abortSignal,
|
|
@@ -7947,7 +7926,7 @@ async function embedMany({
|
|
|
7947
7926
|
providerOptions
|
|
7948
7927
|
});
|
|
7949
7928
|
const embeddings3 = modelResponse.embeddings;
|
|
7950
|
-
const usage2 = (
|
|
7929
|
+
const usage2 = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
|
|
7951
7930
|
doEmbedSpan.setAttributes(
|
|
7952
7931
|
await selectTelemetryAttributes({
|
|
7953
7932
|
telemetry,
|
|
@@ -8028,7 +8007,7 @@ async function embedMany({
|
|
|
8028
8007
|
}),
|
|
8029
8008
|
tracer,
|
|
8030
8009
|
fn: async (doEmbedSpan) => {
|
|
8031
|
-
var
|
|
8010
|
+
var _a15;
|
|
8032
8011
|
const modelResponse = await model.doEmbed({
|
|
8033
8012
|
values: chunk,
|
|
8034
8013
|
abortSignal,
|
|
@@ -8036,7 +8015,7 @@ async function embedMany({
|
|
|
8036
8015
|
providerOptions
|
|
8037
8016
|
});
|
|
8038
8017
|
const embeddings2 = modelResponse.embeddings;
|
|
8039
|
-
const usage = (
|
|
8018
|
+
const usage = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
|
|
8040
8019
|
doEmbedSpan.setAttributes(
|
|
8041
8020
|
await selectTelemetryAttributes({
|
|
8042
8021
|
telemetry,
|
|
@@ -8075,7 +8054,7 @@ async function embedMany({
|
|
|
8075
8054
|
result.providerMetadata
|
|
8076
8055
|
)) {
|
|
8077
8056
|
providerMetadata[providerName] = {
|
|
8078
|
-
...(
|
|
8057
|
+
...(_a14 = providerMetadata[providerName]) != null ? _a14 : {},
|
|
8079
8058
|
...metadata
|
|
8080
8059
|
};
|
|
8081
8060
|
}
|
|
@@ -8122,10 +8101,10 @@ var DefaultEmbedManyResult = class {
|
|
|
8122
8101
|
};
|
|
8123
8102
|
|
|
8124
8103
|
// src/generate-image/generate-image.ts
|
|
8125
|
-
var
|
|
8104
|
+
var import_provider_utils25 = require("@ai-sdk/provider-utils");
|
|
8126
8105
|
async function generateImage({
|
|
8127
8106
|
model: modelArg,
|
|
8128
|
-
prompt,
|
|
8107
|
+
prompt: promptArg,
|
|
8129
8108
|
n = 1,
|
|
8130
8109
|
maxImagesPerCall,
|
|
8131
8110
|
size,
|
|
@@ -8136,9 +8115,9 @@ async function generateImage({
|
|
|
8136
8115
|
abortSignal,
|
|
8137
8116
|
headers
|
|
8138
8117
|
}) {
|
|
8139
|
-
var
|
|
8118
|
+
var _a14, _b;
|
|
8140
8119
|
const model = resolveImageModel(modelArg);
|
|
8141
|
-
const headersWithUserAgent = (0,
|
|
8120
|
+
const headersWithUserAgent = (0, import_provider_utils25.withUserAgentSuffix)(
|
|
8142
8121
|
headers != null ? headers : {},
|
|
8143
8122
|
`ai/${VERSION}`
|
|
8144
8123
|
);
|
|
@@ -8146,7 +8125,7 @@ async function generateImage({
|
|
|
8146
8125
|
maxRetries: maxRetriesArg,
|
|
8147
8126
|
abortSignal
|
|
8148
8127
|
});
|
|
8149
|
-
const maxImagesPerCallWithDefault = (
|
|
8128
|
+
const maxImagesPerCallWithDefault = (_a14 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a14 : 1;
|
|
8150
8129
|
const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
|
|
8151
8130
|
const callImageCounts = Array.from({ length: callCount }, (_, i) => {
|
|
8152
8131
|
if (i < callCount - 1) {
|
|
@@ -8157,9 +8136,12 @@ async function generateImage({
|
|
|
8157
8136
|
});
|
|
8158
8137
|
const results = await Promise.all(
|
|
8159
8138
|
callImageCounts.map(
|
|
8160
|
-
async (callImageCount) => retry(
|
|
8161
|
-
|
|
8139
|
+
async (callImageCount) => retry(() => {
|
|
8140
|
+
const { prompt, files, mask } = normalizePrompt(promptArg);
|
|
8141
|
+
return model.doGenerate({
|
|
8162
8142
|
prompt,
|
|
8143
|
+
files,
|
|
8144
|
+
mask,
|
|
8163
8145
|
n: callImageCount,
|
|
8164
8146
|
abortSignal,
|
|
8165
8147
|
headers: headersWithUserAgent,
|
|
@@ -8167,8 +8149,8 @@ async function generateImage({
|
|
|
8167
8149
|
aspectRatio,
|
|
8168
8150
|
seed,
|
|
8169
8151
|
providerOptions: providerOptions != null ? providerOptions : {}
|
|
8170
|
-
})
|
|
8171
|
-
)
|
|
8152
|
+
});
|
|
8153
|
+
})
|
|
8172
8154
|
)
|
|
8173
8155
|
);
|
|
8174
8156
|
const images = [];
|
|
@@ -8184,13 +8166,13 @@ async function generateImage({
|
|
|
8184
8166
|
images.push(
|
|
8185
8167
|
...result.images.map(
|
|
8186
8168
|
(image) => {
|
|
8187
|
-
var
|
|
8169
|
+
var _a15;
|
|
8188
8170
|
return new DefaultGeneratedFile({
|
|
8189
8171
|
data: image,
|
|
8190
|
-
mediaType: (
|
|
8172
|
+
mediaType: (_a15 = detectMediaType({
|
|
8191
8173
|
data: image,
|
|
8192
8174
|
signatures: imageMediaTypeSignatures
|
|
8193
|
-
})) != null ?
|
|
8175
|
+
})) != null ? _a15 : "image/png"
|
|
8194
8176
|
});
|
|
8195
8177
|
}
|
|
8196
8178
|
)
|
|
@@ -8258,9 +8240,53 @@ async function invokeModelMaxImagesPerCall(model) {
|
|
|
8258
8240
|
modelId: model.modelId
|
|
8259
8241
|
});
|
|
8260
8242
|
}
|
|
8243
|
+
function normalizePrompt(prompt) {
|
|
8244
|
+
if (typeof prompt === "string") {
|
|
8245
|
+
return { prompt, files: void 0, mask: void 0 };
|
|
8246
|
+
}
|
|
8247
|
+
return {
|
|
8248
|
+
prompt: prompt.text,
|
|
8249
|
+
files: prompt.images.map(toImageModelV3File),
|
|
8250
|
+
mask: prompt.mask ? toImageModelV3File(prompt.mask) : void 0
|
|
8251
|
+
};
|
|
8252
|
+
}
|
|
8253
|
+
function toImageModelV3File(dataContent) {
|
|
8254
|
+
if (typeof dataContent === "string" && dataContent.startsWith("http")) {
|
|
8255
|
+
return {
|
|
8256
|
+
type: "url",
|
|
8257
|
+
url: dataContent
|
|
8258
|
+
};
|
|
8259
|
+
}
|
|
8260
|
+
if (typeof dataContent === "string" && dataContent.startsWith("data:")) {
|
|
8261
|
+
const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(dataContent);
|
|
8262
|
+
if (base64Content != null) {
|
|
8263
|
+
const uint8Data2 = (0, import_provider_utils25.convertBase64ToUint8Array)(base64Content);
|
|
8264
|
+
return {
|
|
8265
|
+
type: "file",
|
|
8266
|
+
data: uint8Data2,
|
|
8267
|
+
mediaType: dataUrlMediaType || detectMediaType({
|
|
8268
|
+
data: uint8Data2,
|
|
8269
|
+
signatures: imageMediaTypeSignatures
|
|
8270
|
+
}) || "image/png"
|
|
8271
|
+
};
|
|
8272
|
+
}
|
|
8273
|
+
}
|
|
8274
|
+
const uint8Data = convertDataContentToUint8Array(dataContent);
|
|
8275
|
+
return {
|
|
8276
|
+
type: "file",
|
|
8277
|
+
data: uint8Data,
|
|
8278
|
+
mediaType: detectMediaType({
|
|
8279
|
+
data: uint8Data,
|
|
8280
|
+
signatures: imageMediaTypeSignatures
|
|
8281
|
+
}) || "image/png"
|
|
8282
|
+
};
|
|
8283
|
+
}
|
|
8284
|
+
|
|
8285
|
+
// src/generate-image/index.ts
|
|
8286
|
+
var experimental_generateImage = generateImage;
|
|
8261
8287
|
|
|
8262
8288
|
// src/generate-object/generate-object.ts
|
|
8263
|
-
var
|
|
8289
|
+
var import_provider_utils28 = require("@ai-sdk/provider-utils");
|
|
8264
8290
|
|
|
8265
8291
|
// src/generate-text/extract-reasoning-content.ts
|
|
8266
8292
|
function extractReasoningContent(content) {
|
|
@@ -8271,8 +8297,8 @@ function extractReasoningContent(content) {
|
|
|
8271
8297
|
}
|
|
8272
8298
|
|
|
8273
8299
|
// src/generate-object/output-strategy.ts
|
|
8274
|
-
var
|
|
8275
|
-
var
|
|
8300
|
+
var import_provider24 = require("@ai-sdk/provider");
|
|
8301
|
+
var import_provider_utils26 = require("@ai-sdk/provider-utils");
|
|
8276
8302
|
var noSchemaOutputStrategy = {
|
|
8277
8303
|
type: "no-schema",
|
|
8278
8304
|
jsonSchema: async () => void 0,
|
|
@@ -8292,7 +8318,7 @@ var noSchemaOutputStrategy = {
|
|
|
8292
8318
|
} : { success: true, value };
|
|
8293
8319
|
},
|
|
8294
8320
|
createElementStream() {
|
|
8295
|
-
throw new
|
|
8321
|
+
throw new import_provider24.UnsupportedFunctionalityError({
|
|
8296
8322
|
functionality: "element streams in no-schema mode"
|
|
8297
8323
|
});
|
|
8298
8324
|
}
|
|
@@ -8311,10 +8337,10 @@ var objectOutputStrategy = (schema) => ({
|
|
|
8311
8337
|
};
|
|
8312
8338
|
},
|
|
8313
8339
|
async validateFinalResult(value) {
|
|
8314
|
-
return (0,
|
|
8340
|
+
return (0, import_provider_utils26.safeValidateTypes)({ value, schema });
|
|
8315
8341
|
},
|
|
8316
8342
|
createElementStream() {
|
|
8317
|
-
throw new
|
|
8343
|
+
throw new import_provider24.UnsupportedFunctionalityError({
|
|
8318
8344
|
functionality: "element streams in object mode"
|
|
8319
8345
|
});
|
|
8320
8346
|
}
|
|
@@ -8343,11 +8369,11 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8343
8369
|
isFirstDelta,
|
|
8344
8370
|
isFinalDelta
|
|
8345
8371
|
}) {
|
|
8346
|
-
var
|
|
8347
|
-
if (!(0,
|
|
8372
|
+
var _a14;
|
|
8373
|
+
if (!(0, import_provider24.isJSONObject)(value) || !(0, import_provider24.isJSONArray)(value.elements)) {
|
|
8348
8374
|
return {
|
|
8349
8375
|
success: false,
|
|
8350
|
-
error: new
|
|
8376
|
+
error: new import_provider24.TypeValidationError({
|
|
8351
8377
|
value,
|
|
8352
8378
|
cause: "value must be an object that contains an array of elements"
|
|
8353
8379
|
})
|
|
@@ -8357,7 +8383,7 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8357
8383
|
const resultArray = [];
|
|
8358
8384
|
for (let i = 0; i < inputArray.length; i++) {
|
|
8359
8385
|
const element = inputArray[i];
|
|
8360
|
-
const result = await (0,
|
|
8386
|
+
const result = await (0, import_provider_utils26.safeValidateTypes)({ value: element, schema });
|
|
8361
8387
|
if (i === inputArray.length - 1 && !isFinalDelta) {
|
|
8362
8388
|
continue;
|
|
8363
8389
|
}
|
|
@@ -8366,7 +8392,7 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8366
8392
|
}
|
|
8367
8393
|
resultArray.push(result.value);
|
|
8368
8394
|
}
|
|
8369
|
-
const publishedElementCount = (
|
|
8395
|
+
const publishedElementCount = (_a14 = latestObject == null ? void 0 : latestObject.length) != null ? _a14 : 0;
|
|
8370
8396
|
let textDelta = "";
|
|
8371
8397
|
if (isFirstDelta) {
|
|
8372
8398
|
textDelta += "[";
|
|
@@ -8387,10 +8413,10 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8387
8413
|
};
|
|
8388
8414
|
},
|
|
8389
8415
|
async validateFinalResult(value) {
|
|
8390
|
-
if (!(0,
|
|
8416
|
+
if (!(0, import_provider24.isJSONObject)(value) || !(0, import_provider24.isJSONArray)(value.elements)) {
|
|
8391
8417
|
return {
|
|
8392
8418
|
success: false,
|
|
8393
|
-
error: new
|
|
8419
|
+
error: new import_provider24.TypeValidationError({
|
|
8394
8420
|
value,
|
|
8395
8421
|
cause: "value must be an object that contains an array of elements"
|
|
8396
8422
|
})
|
|
@@ -8398,7 +8424,7 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8398
8424
|
}
|
|
8399
8425
|
const inputArray = value.elements;
|
|
8400
8426
|
for (const element of inputArray) {
|
|
8401
|
-
const result = await (0,
|
|
8427
|
+
const result = await (0, import_provider_utils26.safeValidateTypes)({ value: element, schema });
|
|
8402
8428
|
if (!result.success) {
|
|
8403
8429
|
return result;
|
|
8404
8430
|
}
|
|
@@ -8453,10 +8479,10 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
8453
8479
|
additionalProperties: false
|
|
8454
8480
|
}),
|
|
8455
8481
|
async validateFinalResult(value) {
|
|
8456
|
-
if (!(0,
|
|
8482
|
+
if (!(0, import_provider24.isJSONObject)(value) || typeof value.result !== "string") {
|
|
8457
8483
|
return {
|
|
8458
8484
|
success: false,
|
|
8459
|
-
error: new
|
|
8485
|
+
error: new import_provider24.TypeValidationError({
|
|
8460
8486
|
value,
|
|
8461
8487
|
cause: 'value must be an object that contains a string in the "result" property.'
|
|
8462
8488
|
})
|
|
@@ -8465,17 +8491,17 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
8465
8491
|
const result = value.result;
|
|
8466
8492
|
return enumValues.includes(result) ? { success: true, value: result } : {
|
|
8467
8493
|
success: false,
|
|
8468
|
-
error: new
|
|
8494
|
+
error: new import_provider24.TypeValidationError({
|
|
8469
8495
|
value,
|
|
8470
8496
|
cause: "value must be a string in the enum"
|
|
8471
8497
|
})
|
|
8472
8498
|
};
|
|
8473
8499
|
},
|
|
8474
8500
|
async validatePartialResult({ value, textDelta }) {
|
|
8475
|
-
if (!(0,
|
|
8501
|
+
if (!(0, import_provider24.isJSONObject)(value) || typeof value.result !== "string") {
|
|
8476
8502
|
return {
|
|
8477
8503
|
success: false,
|
|
8478
|
-
error: new
|
|
8504
|
+
error: new import_provider24.TypeValidationError({
|
|
8479
8505
|
value,
|
|
8480
8506
|
cause: 'value must be an object that contains a string in the "result" property.'
|
|
8481
8507
|
})
|
|
@@ -8488,7 +8514,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
8488
8514
|
if (value.result.length === 0 || possibleEnumValues.length === 0) {
|
|
8489
8515
|
return {
|
|
8490
8516
|
success: false,
|
|
8491
|
-
error: new
|
|
8517
|
+
error: new import_provider24.TypeValidationError({
|
|
8492
8518
|
value,
|
|
8493
8519
|
cause: "value must be a string in the enum"
|
|
8494
8520
|
})
|
|
@@ -8503,7 +8529,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
8503
8529
|
};
|
|
8504
8530
|
},
|
|
8505
8531
|
createElementStream() {
|
|
8506
|
-
throw new
|
|
8532
|
+
throw new import_provider24.UnsupportedFunctionalityError({
|
|
8507
8533
|
functionality: "element streams in enum mode"
|
|
8508
8534
|
});
|
|
8509
8535
|
}
|
|
@@ -8516,9 +8542,9 @@ function getOutputStrategy({
|
|
|
8516
8542
|
}) {
|
|
8517
8543
|
switch (output) {
|
|
8518
8544
|
case "object":
|
|
8519
|
-
return objectOutputStrategy((0,
|
|
8545
|
+
return objectOutputStrategy((0, import_provider_utils26.asSchema)(schema));
|
|
8520
8546
|
case "array":
|
|
8521
|
-
return arrayOutputStrategy((0,
|
|
8547
|
+
return arrayOutputStrategy((0, import_provider_utils26.asSchema)(schema));
|
|
8522
8548
|
case "enum":
|
|
8523
8549
|
return enumOutputStrategy(enumValues);
|
|
8524
8550
|
case "no-schema":
|
|
@@ -8531,10 +8557,10 @@ function getOutputStrategy({
|
|
|
8531
8557
|
}
|
|
8532
8558
|
|
|
8533
8559
|
// src/generate-object/parse-and-validate-object-result.ts
|
|
8534
|
-
var
|
|
8535
|
-
var
|
|
8560
|
+
var import_provider25 = require("@ai-sdk/provider");
|
|
8561
|
+
var import_provider_utils27 = require("@ai-sdk/provider-utils");
|
|
8536
8562
|
async function parseAndValidateObjectResult(result, outputStrategy, context) {
|
|
8537
|
-
const parseResult = await (0,
|
|
8563
|
+
const parseResult = await (0, import_provider_utils27.safeParseJSON)({ text: result });
|
|
8538
8564
|
if (!parseResult.success) {
|
|
8539
8565
|
throw new NoObjectGeneratedError({
|
|
8540
8566
|
message: "No object generated: could not parse the response.",
|
|
@@ -8569,7 +8595,7 @@ async function parseAndValidateObjectResultWithRepair(result, outputStrategy, re
|
|
|
8569
8595
|
try {
|
|
8570
8596
|
return await parseAndValidateObjectResult(result, outputStrategy, context);
|
|
8571
8597
|
} catch (error) {
|
|
8572
|
-
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (
|
|
8598
|
+
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (import_provider25.JSONParseError.isInstance(error.cause) || import_provider25.TypeValidationError.isInstance(error.cause))) {
|
|
8573
8599
|
const repairedText = await repairText({
|
|
8574
8600
|
text: result,
|
|
8575
8601
|
error: error.cause
|
|
@@ -8706,7 +8732,7 @@ function validateObjectGenerationInput({
|
|
|
8706
8732
|
}
|
|
8707
8733
|
|
|
8708
8734
|
// src/generate-object/generate-object.ts
|
|
8709
|
-
var originalGenerateId3 = (0,
|
|
8735
|
+
var originalGenerateId3 = (0, import_provider_utils28.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
|
8710
8736
|
async function generateObject(options) {
|
|
8711
8737
|
const {
|
|
8712
8738
|
model: modelArg,
|
|
@@ -8751,7 +8777,7 @@ async function generateObject(options) {
|
|
|
8751
8777
|
enumValues
|
|
8752
8778
|
});
|
|
8753
8779
|
const callSettings = prepareCallSettings(settings);
|
|
8754
|
-
const headersWithUserAgent = (0,
|
|
8780
|
+
const headersWithUserAgent = (0, import_provider_utils28.withUserAgentSuffix)(
|
|
8755
8781
|
headers != null ? headers : {},
|
|
8756
8782
|
`ai/${VERSION}`
|
|
8757
8783
|
);
|
|
@@ -8786,7 +8812,7 @@ async function generateObject(options) {
|
|
|
8786
8812
|
}),
|
|
8787
8813
|
tracer,
|
|
8788
8814
|
fn: async (span) => {
|
|
8789
|
-
var
|
|
8815
|
+
var _a14;
|
|
8790
8816
|
let result;
|
|
8791
8817
|
let finishReason;
|
|
8792
8818
|
let usage;
|
|
@@ -8832,7 +8858,7 @@ async function generateObject(options) {
|
|
|
8832
8858
|
}),
|
|
8833
8859
|
tracer,
|
|
8834
8860
|
fn: async (span2) => {
|
|
8835
|
-
var
|
|
8861
|
+
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
8836
8862
|
const result2 = await model.doGenerate({
|
|
8837
8863
|
responseFormat: {
|
|
8838
8864
|
type: "json",
|
|
@@ -8847,7 +8873,7 @@ async function generateObject(options) {
|
|
|
8847
8873
|
headers: headersWithUserAgent
|
|
8848
8874
|
});
|
|
8849
8875
|
const responseData = {
|
|
8850
|
-
id: (_b = (
|
|
8876
|
+
id: (_b = (_a15 = result2.response) == null ? void 0 : _a15.id) != null ? _b : generateId2(),
|
|
8851
8877
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
|
8852
8878
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
|
|
8853
8879
|
headers: (_g = result2.response) == null ? void 0 : _g.headers,
|
|
@@ -8901,7 +8927,7 @@ async function generateObject(options) {
|
|
|
8901
8927
|
usage = asLanguageModelUsage(generateResult.usage);
|
|
8902
8928
|
warnings = generateResult.warnings;
|
|
8903
8929
|
resultProviderMetadata = generateResult.providerMetadata;
|
|
8904
|
-
request = (
|
|
8930
|
+
request = (_a14 = generateResult.request) != null ? _a14 : {};
|
|
8905
8931
|
response = generateResult.responseData;
|
|
8906
8932
|
reasoning = generateResult.reasoning;
|
|
8907
8933
|
logWarnings({
|
|
@@ -8964,9 +8990,9 @@ var DefaultGenerateObjectResult = class {
|
|
|
8964
8990
|
this.reasoning = options.reasoning;
|
|
8965
8991
|
}
|
|
8966
8992
|
toJsonResponse(init) {
|
|
8967
|
-
var
|
|
8993
|
+
var _a14;
|
|
8968
8994
|
return new Response(JSON.stringify(this.object), {
|
|
8969
|
-
status: (
|
|
8995
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
|
8970
8996
|
headers: prepareHeaders(init == null ? void 0 : init.headers, {
|
|
8971
8997
|
"content-type": "application/json; charset=utf-8"
|
|
8972
8998
|
})
|
|
@@ -8975,7 +9001,7 @@ var DefaultGenerateObjectResult = class {
|
|
|
8975
9001
|
};
|
|
8976
9002
|
|
|
8977
9003
|
// src/generate-object/stream-object.ts
|
|
8978
|
-
var
|
|
9004
|
+
var import_provider_utils30 = require("@ai-sdk/provider-utils");
|
|
8979
9005
|
|
|
8980
9006
|
// src/util/cosine-similarity.ts
|
|
8981
9007
|
function cosineSimilarity(vector1, vector2) {
|
|
@@ -9085,15 +9111,15 @@ var SerialJobExecutor = class {
|
|
|
9085
9111
|
};
|
|
9086
9112
|
|
|
9087
9113
|
// src/util/simulate-readable-stream.ts
|
|
9088
|
-
var
|
|
9114
|
+
var import_provider_utils29 = require("@ai-sdk/provider-utils");
|
|
9089
9115
|
function simulateReadableStream({
|
|
9090
9116
|
chunks,
|
|
9091
9117
|
initialDelayInMs = 0,
|
|
9092
9118
|
chunkDelayInMs = 0,
|
|
9093
9119
|
_internal
|
|
9094
9120
|
}) {
|
|
9095
|
-
var
|
|
9096
|
-
const delay2 = (
|
|
9121
|
+
var _a14;
|
|
9122
|
+
const delay2 = (_a14 = _internal == null ? void 0 : _internal.delay) != null ? _a14 : import_provider_utils29.delay;
|
|
9097
9123
|
let index = 0;
|
|
9098
9124
|
return new ReadableStream({
|
|
9099
9125
|
async pull(controller) {
|
|
@@ -9108,7 +9134,7 @@ function simulateReadableStream({
|
|
|
9108
9134
|
}
|
|
9109
9135
|
|
|
9110
9136
|
// src/generate-object/stream-object.ts
|
|
9111
|
-
var originalGenerateId4 = (0,
|
|
9137
|
+
var originalGenerateId4 = (0, import_provider_utils30.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
|
9112
9138
|
function streamObject(options) {
|
|
9113
9139
|
const {
|
|
9114
9140
|
model,
|
|
@@ -9198,13 +9224,13 @@ var DefaultStreamObjectResult = class {
|
|
|
9198
9224
|
currentDate,
|
|
9199
9225
|
now: now2
|
|
9200
9226
|
}) {
|
|
9201
|
-
this._object = new
|
|
9202
|
-
this._usage = new
|
|
9203
|
-
this._providerMetadata = new
|
|
9204
|
-
this._warnings = new
|
|
9205
|
-
this._request = new
|
|
9206
|
-
this._response = new
|
|
9207
|
-
this._finishReason = new
|
|
9227
|
+
this._object = new import_provider_utils30.DelayedPromise();
|
|
9228
|
+
this._usage = new import_provider_utils30.DelayedPromise();
|
|
9229
|
+
this._providerMetadata = new import_provider_utils30.DelayedPromise();
|
|
9230
|
+
this._warnings = new import_provider_utils30.DelayedPromise();
|
|
9231
|
+
this._request = new import_provider_utils30.DelayedPromise();
|
|
9232
|
+
this._response = new import_provider_utils30.DelayedPromise();
|
|
9233
|
+
this._finishReason = new import_provider_utils30.DelayedPromise();
|
|
9208
9234
|
const model = resolveLanguageModel(modelArg);
|
|
9209
9235
|
const { maxRetries, retry } = prepareRetries({
|
|
9210
9236
|
maxRetries: maxRetriesArg,
|
|
@@ -9351,7 +9377,7 @@ var DefaultStreamObjectResult = class {
|
|
|
9351
9377
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
|
9352
9378
|
new TransformStream({
|
|
9353
9379
|
async transform(chunk, controller) {
|
|
9354
|
-
var
|
|
9380
|
+
var _a14, _b, _c;
|
|
9355
9381
|
if (typeof chunk === "object" && chunk.type === "stream-start") {
|
|
9356
9382
|
warnings = chunk.warnings;
|
|
9357
9383
|
return;
|
|
@@ -9401,7 +9427,7 @@ var DefaultStreamObjectResult = class {
|
|
|
9401
9427
|
switch (chunk.type) {
|
|
9402
9428
|
case "response-metadata": {
|
|
9403
9429
|
fullResponse = {
|
|
9404
|
-
id: (
|
|
9430
|
+
id: (_a14 = chunk.id) != null ? _a14 : fullResponse.id,
|
|
9405
9431
|
timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
|
|
9406
9432
|
modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
|
|
9407
9433
|
};
|
|
@@ -9631,7 +9657,7 @@ var DefaultStreamObjectResult = class {
|
|
|
9631
9657
|
};
|
|
9632
9658
|
|
|
9633
9659
|
// src/generate-speech/generate-speech.ts
|
|
9634
|
-
var
|
|
9660
|
+
var import_provider_utils31 = require("@ai-sdk/provider-utils");
|
|
9635
9661
|
|
|
9636
9662
|
// src/generate-speech/generated-audio-file.ts
|
|
9637
9663
|
var DefaultGeneratedAudioFile = class extends DefaultGeneratedFile {
|
|
@@ -9672,12 +9698,12 @@ async function generateSpeech({
|
|
|
9672
9698
|
abortSignal,
|
|
9673
9699
|
headers
|
|
9674
9700
|
}) {
|
|
9675
|
-
var
|
|
9701
|
+
var _a14;
|
|
9676
9702
|
const resolvedModel = resolveSpeechModel(model);
|
|
9677
9703
|
if (!resolvedModel) {
|
|
9678
9704
|
throw new Error("Model could not be resolved");
|
|
9679
9705
|
}
|
|
9680
|
-
const headersWithUserAgent = (0,
|
|
9706
|
+
const headersWithUserAgent = (0, import_provider_utils31.withUserAgentSuffix)(
|
|
9681
9707
|
headers != null ? headers : {},
|
|
9682
9708
|
`ai/${VERSION}`
|
|
9683
9709
|
);
|
|
@@ -9709,10 +9735,10 @@ async function generateSpeech({
|
|
|
9709
9735
|
return new DefaultSpeechResult({
|
|
9710
9736
|
audio: new DefaultGeneratedAudioFile({
|
|
9711
9737
|
data: result.audio,
|
|
9712
|
-
mediaType: (
|
|
9738
|
+
mediaType: (_a14 = detectMediaType({
|
|
9713
9739
|
data: result.audio,
|
|
9714
9740
|
signatures: audioMediaTypeSignatures
|
|
9715
|
-
})) != null ?
|
|
9741
|
+
})) != null ? _a14 : "audio/mp3"
|
|
9716
9742
|
}),
|
|
9717
9743
|
warnings: result.warnings,
|
|
9718
9744
|
responses: [result.response],
|
|
@@ -9721,11 +9747,11 @@ async function generateSpeech({
|
|
|
9721
9747
|
}
|
|
9722
9748
|
var DefaultSpeechResult = class {
|
|
9723
9749
|
constructor(options) {
|
|
9724
|
-
var
|
|
9750
|
+
var _a14;
|
|
9725
9751
|
this.audio = options.audio;
|
|
9726
9752
|
this.warnings = options.warnings;
|
|
9727
9753
|
this.responses = options.responses;
|
|
9728
|
-
this.providerMetadata = (
|
|
9754
|
+
this.providerMetadata = (_a14 = options.providerMetadata) != null ? _a14 : {};
|
|
9729
9755
|
}
|
|
9730
9756
|
};
|
|
9731
9757
|
|
|
@@ -9809,8 +9835,8 @@ function pruneMessages({
|
|
|
9809
9835
|
}
|
|
9810
9836
|
|
|
9811
9837
|
// src/generate-text/smooth-stream.ts
|
|
9812
|
-
var
|
|
9813
|
-
var
|
|
9838
|
+
var import_provider_utils32 = require("@ai-sdk/provider-utils");
|
|
9839
|
+
var import_provider26 = require("@ai-sdk/provider");
|
|
9814
9840
|
var CHUNKING_REGEXPS = {
|
|
9815
9841
|
word: /\S+\s+/m,
|
|
9816
9842
|
line: /\n+/m
|
|
@@ -9818,7 +9844,7 @@ var CHUNKING_REGEXPS = {
|
|
|
9818
9844
|
function smoothStream({
|
|
9819
9845
|
delayInMs = 10,
|
|
9820
9846
|
chunking = "word",
|
|
9821
|
-
_internal: { delay: delay2 =
|
|
9847
|
+
_internal: { delay: delay2 = import_provider_utils32.delay } = {}
|
|
9822
9848
|
} = {}) {
|
|
9823
9849
|
let detectChunk;
|
|
9824
9850
|
if (typeof chunking === "function") {
|
|
@@ -9840,7 +9866,7 @@ function smoothStream({
|
|
|
9840
9866
|
} else {
|
|
9841
9867
|
const chunkingRegex = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking;
|
|
9842
9868
|
if (chunkingRegex == null) {
|
|
9843
|
-
throw new
|
|
9869
|
+
throw new import_provider26.InvalidArgumentError({
|
|
9844
9870
|
argument: "chunking",
|
|
9845
9871
|
message: `Chunking must be "word" or "line" or a RegExp. Received: ${chunking}`
|
|
9846
9872
|
});
|
|
@@ -10157,13 +10183,13 @@ function addToolInputExamplesMiddleware({
|
|
|
10157
10183
|
return {
|
|
10158
10184
|
specificationVersion: "v3",
|
|
10159
10185
|
transformParams: async ({ params }) => {
|
|
10160
|
-
var
|
|
10161
|
-
if (!((
|
|
10186
|
+
var _a14;
|
|
10187
|
+
if (!((_a14 = params.tools) == null ? void 0 : _a14.length)) {
|
|
10162
10188
|
return params;
|
|
10163
10189
|
}
|
|
10164
10190
|
const transformedTools = params.tools.map((tool2) => {
|
|
10165
|
-
var
|
|
10166
|
-
if (tool2.type !== "function" || !((
|
|
10191
|
+
var _a15;
|
|
10192
|
+
if (tool2.type !== "function" || !((_a15 = tool2.inputExamples) == null ? void 0 : _a15.length)) {
|
|
10167
10193
|
return tool2;
|
|
10168
10194
|
}
|
|
10169
10195
|
const formattedExamples = tool2.inputExamples.map((example, index) => format(example, index)).join("\n");
|
|
@@ -10210,7 +10236,7 @@ var doWrap = ({
|
|
|
10210
10236
|
modelId,
|
|
10211
10237
|
providerId
|
|
10212
10238
|
}) => {
|
|
10213
|
-
var
|
|
10239
|
+
var _a14, _b, _c;
|
|
10214
10240
|
async function doTransform({
|
|
10215
10241
|
params,
|
|
10216
10242
|
type
|
|
@@ -10219,7 +10245,7 @@ var doWrap = ({
|
|
|
10219
10245
|
}
|
|
10220
10246
|
return {
|
|
10221
10247
|
specificationVersion: "v3",
|
|
10222
|
-
provider: (
|
|
10248
|
+
provider: (_a14 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a14 : model.provider,
|
|
10223
10249
|
modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
|
|
10224
10250
|
supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
|
|
10225
10251
|
async doGenerate(params) {
|
|
@@ -10266,7 +10292,7 @@ var doWrap2 = ({
|
|
|
10266
10292
|
modelId,
|
|
10267
10293
|
providerId
|
|
10268
10294
|
}) => {
|
|
10269
|
-
var
|
|
10295
|
+
var _a14, _b, _c, _d;
|
|
10270
10296
|
async function doTransform({
|
|
10271
10297
|
params
|
|
10272
10298
|
}) {
|
|
@@ -10274,7 +10300,7 @@ var doWrap2 = ({
|
|
|
10274
10300
|
}
|
|
10275
10301
|
return {
|
|
10276
10302
|
specificationVersion: "v3",
|
|
10277
|
-
provider: (
|
|
10303
|
+
provider: (_a14 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a14 : model.provider,
|
|
10278
10304
|
modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
|
|
10279
10305
|
maxEmbeddingsPerCall: (_c = overrideMaxEmbeddingsPerCall == null ? void 0 : overrideMaxEmbeddingsPerCall({ model })) != null ? _c : model.maxEmbeddingsPerCall,
|
|
10280
10306
|
supportsParallelCalls: (_d = overrideSupportsParallelCalls == null ? void 0 : overrideSupportsParallelCalls({ model })) != null ? _d : model.supportsParallelCalls,
|
|
@@ -10329,7 +10355,7 @@ function wrapProvider({
|
|
|
10329
10355
|
}
|
|
10330
10356
|
|
|
10331
10357
|
// src/registry/custom-provider.ts
|
|
10332
|
-
var
|
|
10358
|
+
var import_provider27 = require("@ai-sdk/provider");
|
|
10333
10359
|
function customProvider({
|
|
10334
10360
|
languageModels,
|
|
10335
10361
|
embeddingModels,
|
|
@@ -10349,7 +10375,7 @@ function customProvider({
|
|
|
10349
10375
|
if (fallbackProvider) {
|
|
10350
10376
|
return fallbackProvider.languageModel(modelId);
|
|
10351
10377
|
}
|
|
10352
|
-
throw new
|
|
10378
|
+
throw new import_provider27.NoSuchModelError({ modelId, modelType: "languageModel" });
|
|
10353
10379
|
},
|
|
10354
10380
|
embeddingModel(modelId) {
|
|
10355
10381
|
if (embeddingModels != null && modelId in embeddingModels) {
|
|
@@ -10358,7 +10384,7 @@ function customProvider({
|
|
|
10358
10384
|
if (fallbackProvider) {
|
|
10359
10385
|
return fallbackProvider.embeddingModel(modelId);
|
|
10360
10386
|
}
|
|
10361
|
-
throw new
|
|
10387
|
+
throw new import_provider27.NoSuchModelError({ modelId, modelType: "embeddingModel" });
|
|
10362
10388
|
},
|
|
10363
10389
|
imageModel(modelId) {
|
|
10364
10390
|
if (imageModels != null && modelId in imageModels) {
|
|
@@ -10367,7 +10393,7 @@ function customProvider({
|
|
|
10367
10393
|
if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {
|
|
10368
10394
|
return fallbackProvider.imageModel(modelId);
|
|
10369
10395
|
}
|
|
10370
|
-
throw new
|
|
10396
|
+
throw new import_provider27.NoSuchModelError({ modelId, modelType: "imageModel" });
|
|
10371
10397
|
},
|
|
10372
10398
|
transcriptionModel(modelId) {
|
|
10373
10399
|
if (transcriptionModels != null && modelId in transcriptionModels) {
|
|
@@ -10376,7 +10402,7 @@ function customProvider({
|
|
|
10376
10402
|
if (fallbackProvider == null ? void 0 : fallbackProvider.transcriptionModel) {
|
|
10377
10403
|
return fallbackProvider.transcriptionModel(modelId);
|
|
10378
10404
|
}
|
|
10379
|
-
throw new
|
|
10405
|
+
throw new import_provider27.NoSuchModelError({ modelId, modelType: "transcriptionModel" });
|
|
10380
10406
|
},
|
|
10381
10407
|
speechModel(modelId) {
|
|
10382
10408
|
if (speechModels != null && modelId in speechModels) {
|
|
@@ -10385,7 +10411,7 @@ function customProvider({
|
|
|
10385
10411
|
if (fallbackProvider == null ? void 0 : fallbackProvider.speechModel) {
|
|
10386
10412
|
return fallbackProvider.speechModel(modelId);
|
|
10387
10413
|
}
|
|
10388
|
-
throw new
|
|
10414
|
+
throw new import_provider27.NoSuchModelError({ modelId, modelType: "speechModel" });
|
|
10389
10415
|
},
|
|
10390
10416
|
rerankingModel(modelId) {
|
|
10391
10417
|
if (rerankingModels != null && modelId in rerankingModels) {
|
|
@@ -10394,19 +10420,19 @@ function customProvider({
|
|
|
10394
10420
|
if (fallbackProvider == null ? void 0 : fallbackProvider.rerankingModel) {
|
|
10395
10421
|
return fallbackProvider.rerankingModel(modelId);
|
|
10396
10422
|
}
|
|
10397
|
-
throw new
|
|
10423
|
+
throw new import_provider27.NoSuchModelError({ modelId, modelType: "rerankingModel" });
|
|
10398
10424
|
}
|
|
10399
10425
|
};
|
|
10400
10426
|
}
|
|
10401
10427
|
var experimental_customProvider = customProvider;
|
|
10402
10428
|
|
|
10403
10429
|
// src/registry/no-such-provider-error.ts
|
|
10404
|
-
var
|
|
10405
|
-
var
|
|
10406
|
-
var
|
|
10407
|
-
var
|
|
10408
|
-
var
|
|
10409
|
-
var NoSuchProviderError = class extends
|
|
10430
|
+
var import_provider28 = require("@ai-sdk/provider");
|
|
10431
|
+
var name13 = "AI_NoSuchProviderError";
|
|
10432
|
+
var marker13 = `vercel.ai.error.${name13}`;
|
|
10433
|
+
var symbol13 = Symbol.for(marker13);
|
|
10434
|
+
var _a13;
|
|
10435
|
+
var NoSuchProviderError = class extends import_provider28.NoSuchModelError {
|
|
10410
10436
|
constructor({
|
|
10411
10437
|
modelId,
|
|
10412
10438
|
modelType,
|
|
@@ -10414,19 +10440,19 @@ var NoSuchProviderError = class extends import_provider29.NoSuchModelError {
|
|
|
10414
10440
|
availableProviders,
|
|
10415
10441
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
|
10416
10442
|
}) {
|
|
10417
|
-
super({ errorName:
|
|
10418
|
-
this[
|
|
10443
|
+
super({ errorName: name13, modelId, modelType, message });
|
|
10444
|
+
this[_a13] = true;
|
|
10419
10445
|
this.providerId = providerId;
|
|
10420
10446
|
this.availableProviders = availableProviders;
|
|
10421
10447
|
}
|
|
10422
10448
|
static isInstance(error) {
|
|
10423
|
-
return
|
|
10449
|
+
return import_provider28.AISDKError.hasMarker(error, marker13);
|
|
10424
10450
|
}
|
|
10425
10451
|
};
|
|
10426
|
-
|
|
10452
|
+
_a13 = symbol13;
|
|
10427
10453
|
|
|
10428
10454
|
// src/registry/provider-registry.ts
|
|
10429
|
-
var
|
|
10455
|
+
var import_provider29 = require("@ai-sdk/provider");
|
|
10430
10456
|
function createProviderRegistry(providers, {
|
|
10431
10457
|
separator = ":",
|
|
10432
10458
|
languageModelMiddleware
|
|
@@ -10471,7 +10497,7 @@ var DefaultProviderRegistry = class {
|
|
|
10471
10497
|
splitId(id, modelType) {
|
|
10472
10498
|
const index = id.indexOf(this.separator);
|
|
10473
10499
|
if (index === -1) {
|
|
10474
|
-
throw new
|
|
10500
|
+
throw new import_provider29.NoSuchModelError({
|
|
10475
10501
|
modelId: id,
|
|
10476
10502
|
modelType,
|
|
10477
10503
|
message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId${this.separator}modelId")`
|
|
@@ -10480,14 +10506,14 @@ var DefaultProviderRegistry = class {
|
|
|
10480
10506
|
return [id.slice(0, index), id.slice(index + this.separator.length)];
|
|
10481
10507
|
}
|
|
10482
10508
|
languageModel(id) {
|
|
10483
|
-
var
|
|
10509
|
+
var _a14, _b;
|
|
10484
10510
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
|
10485
|
-
let model = (_b = (
|
|
10486
|
-
|
|
10511
|
+
let model = (_b = (_a14 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
|
|
10512
|
+
_a14,
|
|
10487
10513
|
modelId
|
|
10488
10514
|
);
|
|
10489
10515
|
if (model == null) {
|
|
10490
|
-
throw new
|
|
10516
|
+
throw new import_provider29.NoSuchModelError({ modelId: id, modelType: "languageModel" });
|
|
10491
10517
|
}
|
|
10492
10518
|
if (this.languageModelMiddleware != null) {
|
|
10493
10519
|
model = wrapLanguageModel({
|
|
@@ -10498,12 +10524,12 @@ var DefaultProviderRegistry = class {
|
|
|
10498
10524
|
return model;
|
|
10499
10525
|
}
|
|
10500
10526
|
embeddingModel(id) {
|
|
10501
|
-
var
|
|
10527
|
+
var _a14;
|
|
10502
10528
|
const [providerId, modelId] = this.splitId(id, "embeddingModel");
|
|
10503
10529
|
const provider = this.getProvider(providerId, "embeddingModel");
|
|
10504
|
-
const model = (
|
|
10530
|
+
const model = (_a14 = provider.embeddingModel) == null ? void 0 : _a14.call(provider, modelId);
|
|
10505
10531
|
if (model == null) {
|
|
10506
|
-
throw new
|
|
10532
|
+
throw new import_provider29.NoSuchModelError({
|
|
10507
10533
|
modelId: id,
|
|
10508
10534
|
modelType: "embeddingModel"
|
|
10509
10535
|
});
|
|
@@ -10511,22 +10537,22 @@ var DefaultProviderRegistry = class {
|
|
|
10511
10537
|
return model;
|
|
10512
10538
|
}
|
|
10513
10539
|
imageModel(id) {
|
|
10514
|
-
var
|
|
10540
|
+
var _a14;
|
|
10515
10541
|
const [providerId, modelId] = this.splitId(id, "imageModel");
|
|
10516
10542
|
const provider = this.getProvider(providerId, "imageModel");
|
|
10517
|
-
const model = (
|
|
10543
|
+
const model = (_a14 = provider.imageModel) == null ? void 0 : _a14.call(provider, modelId);
|
|
10518
10544
|
if (model == null) {
|
|
10519
|
-
throw new
|
|
10545
|
+
throw new import_provider29.NoSuchModelError({ modelId: id, modelType: "imageModel" });
|
|
10520
10546
|
}
|
|
10521
10547
|
return model;
|
|
10522
10548
|
}
|
|
10523
10549
|
transcriptionModel(id) {
|
|
10524
|
-
var
|
|
10550
|
+
var _a14;
|
|
10525
10551
|
const [providerId, modelId] = this.splitId(id, "transcriptionModel");
|
|
10526
10552
|
const provider = this.getProvider(providerId, "transcriptionModel");
|
|
10527
|
-
const model = (
|
|
10553
|
+
const model = (_a14 = provider.transcriptionModel) == null ? void 0 : _a14.call(provider, modelId);
|
|
10528
10554
|
if (model == null) {
|
|
10529
|
-
throw new
|
|
10555
|
+
throw new import_provider29.NoSuchModelError({
|
|
10530
10556
|
modelId: id,
|
|
10531
10557
|
modelType: "transcriptionModel"
|
|
10532
10558
|
});
|
|
@@ -10534,22 +10560,22 @@ var DefaultProviderRegistry = class {
|
|
|
10534
10560
|
return model;
|
|
10535
10561
|
}
|
|
10536
10562
|
speechModel(id) {
|
|
10537
|
-
var
|
|
10563
|
+
var _a14;
|
|
10538
10564
|
const [providerId, modelId] = this.splitId(id, "speechModel");
|
|
10539
10565
|
const provider = this.getProvider(providerId, "speechModel");
|
|
10540
|
-
const model = (
|
|
10566
|
+
const model = (_a14 = provider.speechModel) == null ? void 0 : _a14.call(provider, modelId);
|
|
10541
10567
|
if (model == null) {
|
|
10542
|
-
throw new
|
|
10568
|
+
throw new import_provider29.NoSuchModelError({ modelId: id, modelType: "speechModel" });
|
|
10543
10569
|
}
|
|
10544
10570
|
return model;
|
|
10545
10571
|
}
|
|
10546
10572
|
rerankingModel(id) {
|
|
10547
|
-
var
|
|
10573
|
+
var _a14;
|
|
10548
10574
|
const [providerId, modelId] = this.splitId(id, "rerankingModel");
|
|
10549
10575
|
const provider = this.getProvider(providerId, "rerankingModel");
|
|
10550
|
-
const model = (
|
|
10576
|
+
const model = (_a14 = provider.rerankingModel) == null ? void 0 : _a14.call(provider, modelId);
|
|
10551
10577
|
if (model == null) {
|
|
10552
|
-
throw new
|
|
10578
|
+
throw new import_provider29.NoSuchModelError({ modelId: id, modelType: "rerankingModel" });
|
|
10553
10579
|
}
|
|
10554
10580
|
return model;
|
|
10555
10581
|
}
|
|
@@ -10604,7 +10630,7 @@ async function rerank({
|
|
|
10604
10630
|
}),
|
|
10605
10631
|
tracer,
|
|
10606
10632
|
fn: async () => {
|
|
10607
|
-
var
|
|
10633
|
+
var _a14, _b;
|
|
10608
10634
|
const { ranking, response, providerMetadata, warnings } = await retry(
|
|
10609
10635
|
() => recordSpan({
|
|
10610
10636
|
name: "ai.rerank.doRerank",
|
|
@@ -10668,7 +10694,7 @@ async function rerank({
|
|
|
10668
10694
|
providerMetadata,
|
|
10669
10695
|
response: {
|
|
10670
10696
|
id: response == null ? void 0 : response.id,
|
|
10671
|
-
timestamp: (
|
|
10697
|
+
timestamp: (_a14 = response == null ? void 0 : response.timestamp) != null ? _a14 : /* @__PURE__ */ new Date(),
|
|
10672
10698
|
modelId: (_b = response == null ? void 0 : response.modelId) != null ? _b : model.modelId,
|
|
10673
10699
|
headers: response == null ? void 0 : response.headers,
|
|
10674
10700
|
body: response == null ? void 0 : response.body
|
|
@@ -10690,11 +10716,11 @@ var DefaultRerankResult = class {
|
|
|
10690
10716
|
};
|
|
10691
10717
|
|
|
10692
10718
|
// src/transcribe/transcribe.ts
|
|
10693
|
-
var
|
|
10719
|
+
var import_provider_utils33 = require("@ai-sdk/provider-utils");
|
|
10694
10720
|
|
|
10695
10721
|
// src/error/no-transcript-generated-error.ts
|
|
10696
|
-
var
|
|
10697
|
-
var NoTranscriptGeneratedError = class extends
|
|
10722
|
+
var import_provider30 = require("@ai-sdk/provider");
|
|
10723
|
+
var NoTranscriptGeneratedError = class extends import_provider30.AISDKError {
|
|
10698
10724
|
constructor(options) {
|
|
10699
10725
|
super({
|
|
10700
10726
|
name: "AI_NoTranscriptGeneratedError",
|
|
@@ -10721,23 +10747,23 @@ async function transcribe({
|
|
|
10721
10747
|
maxRetries: maxRetriesArg,
|
|
10722
10748
|
abortSignal
|
|
10723
10749
|
});
|
|
10724
|
-
const headersWithUserAgent = (0,
|
|
10750
|
+
const headersWithUserAgent = (0, import_provider_utils33.withUserAgentSuffix)(
|
|
10725
10751
|
headers != null ? headers : {},
|
|
10726
10752
|
`ai/${VERSION}`
|
|
10727
10753
|
);
|
|
10728
10754
|
const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
|
|
10729
10755
|
const result = await retry(
|
|
10730
10756
|
() => {
|
|
10731
|
-
var
|
|
10757
|
+
var _a14;
|
|
10732
10758
|
return resolvedModel.doGenerate({
|
|
10733
10759
|
audio: audioData,
|
|
10734
10760
|
abortSignal,
|
|
10735
10761
|
headers: headersWithUserAgent,
|
|
10736
10762
|
providerOptions,
|
|
10737
|
-
mediaType: (
|
|
10763
|
+
mediaType: (_a14 = detectMediaType({
|
|
10738
10764
|
data: audioData,
|
|
10739
10765
|
signatures: audioMediaTypeSignatures
|
|
10740
|
-
})) != null ?
|
|
10766
|
+
})) != null ? _a14 : "audio/wav"
|
|
10741
10767
|
});
|
|
10742
10768
|
}
|
|
10743
10769
|
);
|
|
@@ -10761,19 +10787,19 @@ async function transcribe({
|
|
|
10761
10787
|
}
|
|
10762
10788
|
var DefaultTranscriptionResult = class {
|
|
10763
10789
|
constructor(options) {
|
|
10764
|
-
var
|
|
10790
|
+
var _a14;
|
|
10765
10791
|
this.text = options.text;
|
|
10766
10792
|
this.segments = options.segments;
|
|
10767
10793
|
this.language = options.language;
|
|
10768
10794
|
this.durationInSeconds = options.durationInSeconds;
|
|
10769
10795
|
this.warnings = options.warnings;
|
|
10770
10796
|
this.responses = options.responses;
|
|
10771
|
-
this.providerMetadata = (
|
|
10797
|
+
this.providerMetadata = (_a14 = options.providerMetadata) != null ? _a14 : {};
|
|
10772
10798
|
}
|
|
10773
10799
|
};
|
|
10774
10800
|
|
|
10775
10801
|
// src/ui/call-completion-api.ts
|
|
10776
|
-
var
|
|
10802
|
+
var import_provider_utils34 = require("@ai-sdk/provider-utils");
|
|
10777
10803
|
|
|
10778
10804
|
// src/ui/process-text-stream.ts
|
|
10779
10805
|
async function processTextStream({
|
|
@@ -10807,7 +10833,7 @@ async function callCompletionApi({
|
|
|
10807
10833
|
onError,
|
|
10808
10834
|
fetch: fetch2 = getOriginalFetch()
|
|
10809
10835
|
}) {
|
|
10810
|
-
var
|
|
10836
|
+
var _a14;
|
|
10811
10837
|
try {
|
|
10812
10838
|
setLoading(true);
|
|
10813
10839
|
setError(void 0);
|
|
@@ -10821,13 +10847,13 @@ async function callCompletionApi({
|
|
|
10821
10847
|
...body
|
|
10822
10848
|
}),
|
|
10823
10849
|
credentials,
|
|
10824
|
-
headers: (0,
|
|
10850
|
+
headers: (0, import_provider_utils34.withUserAgentSuffix)(
|
|
10825
10851
|
{
|
|
10826
10852
|
"Content-Type": "application/json",
|
|
10827
10853
|
...headers
|
|
10828
10854
|
},
|
|
10829
10855
|
`ai-sdk/${VERSION}`,
|
|
10830
|
-
(0,
|
|
10856
|
+
(0, import_provider_utils34.getRuntimeEnvironmentUserAgent)()
|
|
10831
10857
|
),
|
|
10832
10858
|
signal: abortController.signal
|
|
10833
10859
|
}).catch((err) => {
|
|
@@ -10835,7 +10861,7 @@ async function callCompletionApi({
|
|
|
10835
10861
|
});
|
|
10836
10862
|
if (!response.ok) {
|
|
10837
10863
|
throw new Error(
|
|
10838
|
-
(
|
|
10864
|
+
(_a14 = await response.text()) != null ? _a14 : "Failed to fetch the chat response."
|
|
10839
10865
|
);
|
|
10840
10866
|
}
|
|
10841
10867
|
if (!response.body) {
|
|
@@ -10855,7 +10881,7 @@ async function callCompletionApi({
|
|
|
10855
10881
|
}
|
|
10856
10882
|
case "data": {
|
|
10857
10883
|
await consumeStream({
|
|
10858
|
-
stream: (0,
|
|
10884
|
+
stream: (0, import_provider_utils34.parseJsonEventStream)({
|
|
10859
10885
|
stream: response.body,
|
|
10860
10886
|
schema: uiMessageChunkSchema
|
|
10861
10887
|
}).pipeThrough(
|
|
@@ -10907,7 +10933,7 @@ async function callCompletionApi({
|
|
|
10907
10933
|
}
|
|
10908
10934
|
|
|
10909
10935
|
// src/ui/chat.ts
|
|
10910
|
-
var
|
|
10936
|
+
var import_provider_utils37 = require("@ai-sdk/provider-utils");
|
|
10911
10937
|
|
|
10912
10938
|
// src/ui/convert-file-list-to-file-ui-parts.ts
|
|
10913
10939
|
async function convertFileListToFileUIParts(files) {
|
|
@@ -10919,12 +10945,12 @@ async function convertFileListToFileUIParts(files) {
|
|
|
10919
10945
|
}
|
|
10920
10946
|
return Promise.all(
|
|
10921
10947
|
Array.from(files).map(async (file) => {
|
|
10922
|
-
const { name:
|
|
10948
|
+
const { name: name14, type } = file;
|
|
10923
10949
|
const dataUrl = await new Promise((resolve3, reject) => {
|
|
10924
10950
|
const reader = new FileReader();
|
|
10925
10951
|
reader.onload = (readerEvent) => {
|
|
10926
|
-
var
|
|
10927
|
-
resolve3((
|
|
10952
|
+
var _a14;
|
|
10953
|
+
resolve3((_a14 = readerEvent.target) == null ? void 0 : _a14.result);
|
|
10928
10954
|
};
|
|
10929
10955
|
reader.onerror = (error) => reject(error);
|
|
10930
10956
|
reader.readAsDataURL(file);
|
|
@@ -10932,7 +10958,7 @@ async function convertFileListToFileUIParts(files) {
|
|
|
10932
10958
|
return {
|
|
10933
10959
|
type: "file",
|
|
10934
10960
|
mediaType: type,
|
|
10935
|
-
filename:
|
|
10961
|
+
filename: name14,
|
|
10936
10962
|
url: dataUrl
|
|
10937
10963
|
};
|
|
10938
10964
|
})
|
|
@@ -10940,10 +10966,10 @@ async function convertFileListToFileUIParts(files) {
|
|
|
10940
10966
|
}
|
|
10941
10967
|
|
|
10942
10968
|
// src/ui/default-chat-transport.ts
|
|
10943
|
-
var
|
|
10969
|
+
var import_provider_utils36 = require("@ai-sdk/provider-utils");
|
|
10944
10970
|
|
|
10945
10971
|
// src/ui/http-chat-transport.ts
|
|
10946
|
-
var
|
|
10972
|
+
var import_provider_utils35 = require("@ai-sdk/provider-utils");
|
|
10947
10973
|
var HttpChatTransport = class {
|
|
10948
10974
|
constructor({
|
|
10949
10975
|
api = "/api/chat",
|
|
@@ -10966,15 +10992,15 @@ var HttpChatTransport = class {
|
|
|
10966
10992
|
abortSignal,
|
|
10967
10993
|
...options
|
|
10968
10994
|
}) {
|
|
10969
|
-
var
|
|
10970
|
-
const resolvedBody = await (0,
|
|
10971
|
-
const resolvedHeaders = await (0,
|
|
10972
|
-
const resolvedCredentials = await (0,
|
|
10995
|
+
var _a14, _b, _c, _d, _e;
|
|
10996
|
+
const resolvedBody = await (0, import_provider_utils35.resolve)(this.body);
|
|
10997
|
+
const resolvedHeaders = await (0, import_provider_utils35.resolve)(this.headers);
|
|
10998
|
+
const resolvedCredentials = await (0, import_provider_utils35.resolve)(this.credentials);
|
|
10973
10999
|
const baseHeaders = {
|
|
10974
|
-
...(0,
|
|
10975
|
-
...(0,
|
|
11000
|
+
...(0, import_provider_utils35.normalizeHeaders)(resolvedHeaders),
|
|
11001
|
+
...(0, import_provider_utils35.normalizeHeaders)(options.headers)
|
|
10976
11002
|
};
|
|
10977
|
-
const preparedRequest = await ((
|
|
11003
|
+
const preparedRequest = await ((_a14 = this.prepareSendMessagesRequest) == null ? void 0 : _a14.call(this, {
|
|
10978
11004
|
api: this.api,
|
|
10979
11005
|
id: options.chatId,
|
|
10980
11006
|
messages: options.messages,
|
|
@@ -10986,7 +11012,7 @@ var HttpChatTransport = class {
|
|
|
10986
11012
|
messageId: options.messageId
|
|
10987
11013
|
}));
|
|
10988
11014
|
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : this.api;
|
|
10989
|
-
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? (0,
|
|
11015
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? (0, import_provider_utils35.normalizeHeaders)(preparedRequest.headers) : baseHeaders;
|
|
10990
11016
|
const body = (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : {
|
|
10991
11017
|
...resolvedBody,
|
|
10992
11018
|
...options.body,
|
|
@@ -10999,13 +11025,13 @@ var HttpChatTransport = class {
|
|
|
10999
11025
|
const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
|
|
11000
11026
|
const response = await fetch2(api, {
|
|
11001
11027
|
method: "POST",
|
|
11002
|
-
headers: (0,
|
|
11028
|
+
headers: (0, import_provider_utils35.withUserAgentSuffix)(
|
|
11003
11029
|
{
|
|
11004
11030
|
"Content-Type": "application/json",
|
|
11005
11031
|
...headers
|
|
11006
11032
|
},
|
|
11007
11033
|
`ai-sdk/${VERSION}`,
|
|
11008
|
-
(0,
|
|
11034
|
+
(0, import_provider_utils35.getRuntimeEnvironmentUserAgent)()
|
|
11009
11035
|
),
|
|
11010
11036
|
body: JSON.stringify(body),
|
|
11011
11037
|
credentials,
|
|
@@ -11022,15 +11048,15 @@ var HttpChatTransport = class {
|
|
|
11022
11048
|
return this.processResponseStream(response.body);
|
|
11023
11049
|
}
|
|
11024
11050
|
async reconnectToStream(options) {
|
|
11025
|
-
var
|
|
11026
|
-
const resolvedBody = await (0,
|
|
11027
|
-
const resolvedHeaders = await (0,
|
|
11028
|
-
const resolvedCredentials = await (0,
|
|
11051
|
+
var _a14, _b, _c, _d, _e;
|
|
11052
|
+
const resolvedBody = await (0, import_provider_utils35.resolve)(this.body);
|
|
11053
|
+
const resolvedHeaders = await (0, import_provider_utils35.resolve)(this.headers);
|
|
11054
|
+
const resolvedCredentials = await (0, import_provider_utils35.resolve)(this.credentials);
|
|
11029
11055
|
const baseHeaders = {
|
|
11030
|
-
...(0,
|
|
11031
|
-
...(0,
|
|
11056
|
+
...(0, import_provider_utils35.normalizeHeaders)(resolvedHeaders),
|
|
11057
|
+
...(0, import_provider_utils35.normalizeHeaders)(options.headers)
|
|
11032
11058
|
};
|
|
11033
|
-
const preparedRequest = await ((
|
|
11059
|
+
const preparedRequest = await ((_a14 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a14.call(this, {
|
|
11034
11060
|
api: this.api,
|
|
11035
11061
|
id: options.chatId,
|
|
11036
11062
|
body: { ...resolvedBody, ...options.body },
|
|
@@ -11039,15 +11065,15 @@ var HttpChatTransport = class {
|
|
|
11039
11065
|
requestMetadata: options.metadata
|
|
11040
11066
|
}));
|
|
11041
11067
|
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : `${this.api}/${options.chatId}/stream`;
|
|
11042
|
-
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? (0,
|
|
11068
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? (0, import_provider_utils35.normalizeHeaders)(preparedRequest.headers) : baseHeaders;
|
|
11043
11069
|
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : resolvedCredentials;
|
|
11044
11070
|
const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
|
|
11045
11071
|
const response = await fetch2(api, {
|
|
11046
11072
|
method: "GET",
|
|
11047
|
-
headers: (0,
|
|
11073
|
+
headers: (0, import_provider_utils35.withUserAgentSuffix)(
|
|
11048
11074
|
headers,
|
|
11049
11075
|
`ai-sdk/${VERSION}`,
|
|
11050
|
-
(0,
|
|
11076
|
+
(0, import_provider_utils35.getRuntimeEnvironmentUserAgent)()
|
|
11051
11077
|
),
|
|
11052
11078
|
credentials
|
|
11053
11079
|
});
|
|
@@ -11072,7 +11098,7 @@ var DefaultChatTransport = class extends HttpChatTransport {
|
|
|
11072
11098
|
super(options);
|
|
11073
11099
|
}
|
|
11074
11100
|
processResponseStream(stream) {
|
|
11075
|
-
return (0,
|
|
11101
|
+
return (0, import_provider_utils36.parseJsonEventStream)({
|
|
11076
11102
|
stream,
|
|
11077
11103
|
schema: uiMessageChunkSchema
|
|
11078
11104
|
}).pipeThrough(
|
|
@@ -11091,7 +11117,7 @@ var DefaultChatTransport = class extends HttpChatTransport {
|
|
|
11091
11117
|
// src/ui/chat.ts
|
|
11092
11118
|
var AbstractChat = class {
|
|
11093
11119
|
constructor({
|
|
11094
|
-
generateId: generateId2 =
|
|
11120
|
+
generateId: generateId2 = import_provider_utils37.generateId,
|
|
11095
11121
|
id = generateId2(),
|
|
11096
11122
|
transport = new DefaultChatTransport(),
|
|
11097
11123
|
messageMetadataSchema,
|
|
@@ -11112,11 +11138,11 @@ var AbstractChat = class {
|
|
|
11112
11138
|
* If a messageId is provided, the message will be replaced.
|
|
11113
11139
|
*/
|
|
11114
11140
|
this.sendMessage = async (message, options) => {
|
|
11115
|
-
var
|
|
11141
|
+
var _a14, _b, _c, _d;
|
|
11116
11142
|
if (message == null) {
|
|
11117
11143
|
await this.makeRequest({
|
|
11118
11144
|
trigger: "submit-message",
|
|
11119
|
-
messageId: (
|
|
11145
|
+
messageId: (_a14 = this.lastMessage) == null ? void 0 : _a14.id,
|
|
11120
11146
|
...options
|
|
11121
11147
|
});
|
|
11122
11148
|
return;
|
|
@@ -11209,7 +11235,7 @@ var AbstractChat = class {
|
|
|
11209
11235
|
approved,
|
|
11210
11236
|
reason
|
|
11211
11237
|
}) => this.jobExecutor.run(async () => {
|
|
11212
|
-
var
|
|
11238
|
+
var _a14, _b;
|
|
11213
11239
|
const messages = this.state.messages;
|
|
11214
11240
|
const lastMessage = messages[messages.length - 1];
|
|
11215
11241
|
const updatePart = (part) => isToolUIPart(part) && part.state === "approval-requested" && part.approval.id === id ? {
|
|
@@ -11224,7 +11250,7 @@ var AbstractChat = class {
|
|
|
11224
11250
|
if (this.activeResponse) {
|
|
11225
11251
|
this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
|
|
11226
11252
|
}
|
|
11227
|
-
if (this.status !== "streaming" && this.status !== "submitted" && ((
|
|
11253
|
+
if (this.status !== "streaming" && this.status !== "submitted" && ((_a14 = this.sendAutomaticallyWhen) == null ? void 0 : _a14.call(this, { messages: this.state.messages }))) {
|
|
11228
11254
|
this.makeRequest({
|
|
11229
11255
|
trigger: "submit-message",
|
|
11230
11256
|
messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
|
|
@@ -11238,7 +11264,7 @@ var AbstractChat = class {
|
|
|
11238
11264
|
output,
|
|
11239
11265
|
errorText
|
|
11240
11266
|
}) => this.jobExecutor.run(async () => {
|
|
11241
|
-
var
|
|
11267
|
+
var _a14, _b;
|
|
11242
11268
|
const messages = this.state.messages;
|
|
11243
11269
|
const lastMessage = messages[messages.length - 1];
|
|
11244
11270
|
const updatePart = (part) => isToolUIPart(part) && part.toolCallId === toolCallId ? { ...part, state, output, errorText } : part;
|
|
@@ -11249,7 +11275,7 @@ var AbstractChat = class {
|
|
|
11249
11275
|
if (this.activeResponse) {
|
|
11250
11276
|
this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
|
|
11251
11277
|
}
|
|
11252
|
-
if (this.status !== "streaming" && this.status !== "submitted" && ((
|
|
11278
|
+
if (this.status !== "streaming" && this.status !== "submitted" && ((_a14 = this.sendAutomaticallyWhen) == null ? void 0 : _a14.call(this, { messages: this.state.messages }))) {
|
|
11253
11279
|
this.makeRequest({
|
|
11254
11280
|
trigger: "submit-message",
|
|
11255
11281
|
messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
|
|
@@ -11262,10 +11288,10 @@ var AbstractChat = class {
|
|
|
11262
11288
|
* Abort the current request immediately, keep the generated tokens if any.
|
|
11263
11289
|
*/
|
|
11264
11290
|
this.stop = async () => {
|
|
11265
|
-
var
|
|
11291
|
+
var _a14;
|
|
11266
11292
|
if (this.status !== "streaming" && this.status !== "submitted")
|
|
11267
11293
|
return;
|
|
11268
|
-
if ((
|
|
11294
|
+
if ((_a14 = this.activeResponse) == null ? void 0 : _a14.abortController) {
|
|
11269
11295
|
this.activeResponse.abortController.abort();
|
|
11270
11296
|
}
|
|
11271
11297
|
};
|
|
@@ -11320,7 +11346,7 @@ var AbstractChat = class {
|
|
|
11320
11346
|
body,
|
|
11321
11347
|
messageId
|
|
11322
11348
|
}) {
|
|
11323
|
-
var
|
|
11349
|
+
var _a14, _b, _c, _d;
|
|
11324
11350
|
this.setStatus({ status: "submitted", error: void 0 });
|
|
11325
11351
|
const lastMessage = this.lastMessage;
|
|
11326
11352
|
let isAbort = false;
|
|
@@ -11369,9 +11395,9 @@ var AbstractChat = class {
|
|
|
11369
11395
|
() => job({
|
|
11370
11396
|
state: activeResponse.state,
|
|
11371
11397
|
write: () => {
|
|
11372
|
-
var
|
|
11398
|
+
var _a15;
|
|
11373
11399
|
this.setStatus({ status: "streaming" });
|
|
11374
|
-
const replaceLastMessage = activeResponse.state.message.id === ((
|
|
11400
|
+
const replaceLastMessage = activeResponse.state.message.id === ((_a15 = this.lastMessage) == null ? void 0 : _a15.id);
|
|
11375
11401
|
if (replaceLastMessage) {
|
|
11376
11402
|
this.state.replaceMessage(
|
|
11377
11403
|
this.state.messages.length - 1,
|
|
@@ -11423,7 +11449,7 @@ var AbstractChat = class {
|
|
|
11423
11449
|
isAbort,
|
|
11424
11450
|
isDisconnect,
|
|
11425
11451
|
isError,
|
|
11426
|
-
finishReason: (
|
|
11452
|
+
finishReason: (_a14 = this.activeResponse) == null ? void 0 : _a14.state.finishReason
|
|
11427
11453
|
});
|
|
11428
11454
|
} catch (err) {
|
|
11429
11455
|
console.error(err);
|
|
@@ -11591,6 +11617,7 @@ var TextStreamChatTransport = class extends HttpChatTransport {
|
|
|
11591
11617
|
extractReasoningMiddleware,
|
|
11592
11618
|
gateway,
|
|
11593
11619
|
generateId,
|
|
11620
|
+
generateImage,
|
|
11594
11621
|
generateObject,
|
|
11595
11622
|
generateText,
|
|
11596
11623
|
getStaticToolName,
|