@ai-sdk/amazon-bedrock 3.0.0-canary.13 → 3.0.0-canary.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/README.md +2 -2
- package/dist/index.d.mts +14 -14
- package/dist/index.d.ts +14 -14
- package/dist/index.js +55 -54
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +57 -54
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -44,9 +44,9 @@ var bedrockProviderOptions = z.object({
|
|
|
44
44
|
*/
|
|
45
45
|
additionalModelRequestFields: z.record(z.any()).optional(),
|
|
46
46
|
reasoningConfig: z.object({
|
|
47
|
-
type: z.union([z.literal("enabled"), z.literal("disabled")]).
|
|
48
|
-
budgetTokens: z.number().
|
|
49
|
-
}).
|
|
47
|
+
type: z.union([z.literal("enabled"), z.literal("disabled")]).optional(),
|
|
48
|
+
budgetTokens: z.number().optional()
|
|
49
|
+
}).optional()
|
|
50
50
|
});
|
|
51
51
|
|
|
52
52
|
// src/bedrock-error.ts
|
|
@@ -618,7 +618,7 @@ var BedrockChatLanguageModel = class {
|
|
|
618
618
|
};
|
|
619
619
|
}
|
|
620
620
|
async doGenerate(options) {
|
|
621
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
621
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
622
622
|
const { command: args, warnings } = await this.getArgs(options);
|
|
623
623
|
const url = `${this.getUrl(this.modelId)}/converse`;
|
|
624
624
|
const { value: response, responseHeaders } = await postJsonToApi({
|
|
@@ -685,10 +685,9 @@ var BedrockChatLanguageModel = class {
|
|
|
685
685
|
const providerMetadata = response.trace || response.usage ? {
|
|
686
686
|
bedrock: {
|
|
687
687
|
...response.trace && typeof response.trace === "object" ? { trace: response.trace } : {},
|
|
688
|
-
...response.usage && {
|
|
688
|
+
...((_h = response.usage) == null ? void 0 : _h.cacheWriteInputTokens) != null && {
|
|
689
689
|
usage: {
|
|
690
|
-
|
|
691
|
-
cacheWriteInputTokens: (_k = (_j = response.usage) == null ? void 0 : _j.cacheWriteInputTokens) != null ? _k : Number.NaN
|
|
690
|
+
cacheWriteInputTokens: response.usage.cacheWriteInputTokens
|
|
692
691
|
}
|
|
693
692
|
}
|
|
694
693
|
}
|
|
@@ -699,8 +698,10 @@ var BedrockChatLanguageModel = class {
|
|
|
699
698
|
response.stopReason
|
|
700
699
|
),
|
|
701
700
|
usage: {
|
|
702
|
-
inputTokens: (
|
|
703
|
-
outputTokens: (
|
|
701
|
+
inputTokens: (_i = response.usage) == null ? void 0 : _i.inputTokens,
|
|
702
|
+
outputTokens: (_j = response.usage) == null ? void 0 : _j.outputTokens,
|
|
703
|
+
totalTokens: ((_k = response.usage) == null ? void 0 : _k.inputTokens) + ((_l = response.usage) == null ? void 0 : _l.outputTokens),
|
|
704
|
+
cachedInputTokens: (_n = (_m = response.usage) == null ? void 0 : _m.cacheReadInputTokens) != null ? _n : void 0
|
|
704
705
|
},
|
|
705
706
|
response: {
|
|
706
707
|
// TODO add id, timestamp, etc
|
|
@@ -731,7 +732,8 @@ var BedrockChatLanguageModel = class {
|
|
|
731
732
|
let finishReason = "unknown";
|
|
732
733
|
const usage = {
|
|
733
734
|
inputTokens: void 0,
|
|
734
|
-
outputTokens: void 0
|
|
735
|
+
outputTokens: void 0,
|
|
736
|
+
totalTokens: void 0
|
|
735
737
|
};
|
|
736
738
|
let providerMetadata = void 0;
|
|
737
739
|
const toolCallContentBlocks = {};
|
|
@@ -742,7 +744,7 @@ var BedrockChatLanguageModel = class {
|
|
|
742
744
|
controller.enqueue({ type: "stream-start", warnings });
|
|
743
745
|
},
|
|
744
746
|
transform(chunk, controller) {
|
|
745
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m
|
|
747
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
746
748
|
function enqueueError(bedrockError) {
|
|
747
749
|
finishReason = "error";
|
|
748
750
|
controller.enqueue({ type: "error", error: bedrockError });
|
|
@@ -776,10 +778,11 @@ var BedrockChatLanguageModel = class {
|
|
|
776
778
|
if (value.metadata) {
|
|
777
779
|
usage.inputTokens = (_b = (_a = value.metadata.usage) == null ? void 0 : _a.inputTokens) != null ? _b : usage.inputTokens;
|
|
778
780
|
usage.outputTokens = (_d = (_c = value.metadata.usage) == null ? void 0 : _c.outputTokens) != null ? _d : usage.outputTokens;
|
|
779
|
-
|
|
781
|
+
usage.totalTokens = ((_e = usage.inputTokens) != null ? _e : 0) + ((_f = usage.outputTokens) != null ? _f : 0);
|
|
782
|
+
usage.cachedInputTokens = (_h = (_g = value.metadata.usage) == null ? void 0 : _g.cacheReadInputTokens) != null ? _h : usage.cachedInputTokens;
|
|
783
|
+
const cacheUsage = ((_i = value.metadata.usage) == null ? void 0 : _i.cacheWriteInputTokens) != null ? {
|
|
780
784
|
usage: {
|
|
781
|
-
|
|
782
|
-
cacheWriteInputTokens: (_j = (_i = value.metadata.usage) == null ? void 0 : _i.cacheWriteInputTokens) != null ? _j : Number.NaN
|
|
785
|
+
cacheWriteInputTokens: value.metadata.usage.cacheWriteInputTokens
|
|
783
786
|
}
|
|
784
787
|
} : void 0;
|
|
785
788
|
const trace = value.metadata.trace ? {
|
|
@@ -794,13 +797,13 @@ var BedrockChatLanguageModel = class {
|
|
|
794
797
|
};
|
|
795
798
|
}
|
|
796
799
|
}
|
|
797
|
-
if (((
|
|
800
|
+
if (((_j = value.contentBlockDelta) == null ? void 0 : _j.delta) && "text" in value.contentBlockDelta.delta && value.contentBlockDelta.delta.text) {
|
|
798
801
|
controller.enqueue({
|
|
799
802
|
type: "text",
|
|
800
803
|
text: value.contentBlockDelta.delta.text
|
|
801
804
|
});
|
|
802
805
|
}
|
|
803
|
-
if (((
|
|
806
|
+
if (((_k = value.contentBlockDelta) == null ? void 0 : _k.delta) && "reasoningContent" in value.contentBlockDelta.delta && value.contentBlockDelta.delta.reasoningContent) {
|
|
804
807
|
const reasoningContent = value.contentBlockDelta.delta.reasoningContent;
|
|
805
808
|
if ("text" in reasoningContent && reasoningContent.text) {
|
|
806
809
|
controller.enqueue({
|
|
@@ -832,7 +835,7 @@ var BedrockChatLanguageModel = class {
|
|
|
832
835
|
}
|
|
833
836
|
}
|
|
834
837
|
const contentBlockStart = value.contentBlockStart;
|
|
835
|
-
if (((
|
|
838
|
+
if (((_l = contentBlockStart == null ? void 0 : contentBlockStart.start) == null ? void 0 : _l.toolUse) != null) {
|
|
836
839
|
const toolUse = contentBlockStart.start.toolUse;
|
|
837
840
|
toolCallContentBlocks[contentBlockStart.contentBlockIndex] = {
|
|
838
841
|
toolCallId: toolUse.toolUseId,
|
|
@@ -843,7 +846,7 @@ var BedrockChatLanguageModel = class {
|
|
|
843
846
|
const contentBlockDelta = value.contentBlockDelta;
|
|
844
847
|
if ((contentBlockDelta == null ? void 0 : contentBlockDelta.delta) && "toolUse" in contentBlockDelta.delta && contentBlockDelta.delta.toolUse) {
|
|
845
848
|
const contentBlock = toolCallContentBlocks[contentBlockDelta.contentBlockIndex];
|
|
846
|
-
const delta = (
|
|
849
|
+
const delta = (_m = contentBlockDelta.delta.toolUse.input) != null ? _m : "";
|
|
847
850
|
controller.enqueue({
|
|
848
851
|
type: "tool-call-delta",
|
|
849
852
|
toolCallType: "function",
|
|
@@ -989,6 +992,9 @@ var bedrockReasoningMetadataSchema = z3.object({
|
|
|
989
992
|
});
|
|
990
993
|
|
|
991
994
|
// src/bedrock-embedding-model.ts
|
|
995
|
+
import {
|
|
996
|
+
TooManyEmbeddingValuesForCallError
|
|
997
|
+
} from "@ai-sdk/provider";
|
|
992
998
|
import {
|
|
993
999
|
combineHeaders as combineHeaders2,
|
|
994
1000
|
createJsonErrorResponseHandler as createJsonErrorResponseHandler2,
|
|
@@ -1035,48 +1041,45 @@ var BedrockEmbeddingModel = class {
|
|
|
1035
1041
|
providerOptions
|
|
1036
1042
|
}) {
|
|
1037
1043
|
var _a;
|
|
1044
|
+
if (values.length > this.maxEmbeddingsPerCall) {
|
|
1045
|
+
throw new TooManyEmbeddingValuesForCallError({
|
|
1046
|
+
provider: this.provider,
|
|
1047
|
+
modelId: this.modelId,
|
|
1048
|
+
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
|
1049
|
+
values
|
|
1050
|
+
});
|
|
1051
|
+
}
|
|
1038
1052
|
const bedrockOptions = (_a = await parseProviderOptions3({
|
|
1039
1053
|
provider: "bedrock",
|
|
1040
1054
|
providerOptions,
|
|
1041
1055
|
schema: bedrockEmbeddingProviderOptions
|
|
1042
1056
|
})) != null ? _a : {};
|
|
1043
|
-
const
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
inputTextTokenCount: response.inputTextTokenCount
|
|
1069
|
-
};
|
|
1057
|
+
const args = {
|
|
1058
|
+
inputText: values[0],
|
|
1059
|
+
dimensions: bedrockOptions.dimensions,
|
|
1060
|
+
normalize: bedrockOptions.normalize
|
|
1061
|
+
};
|
|
1062
|
+
const url = this.getUrl(this.modelId);
|
|
1063
|
+
const { value: response } = await postJsonToApi2({
|
|
1064
|
+
url,
|
|
1065
|
+
headers: await resolve2(
|
|
1066
|
+
combineHeaders2(await resolve2(this.config.headers), headers)
|
|
1067
|
+
),
|
|
1068
|
+
body: args,
|
|
1069
|
+
failedResponseHandler: createJsonErrorResponseHandler2({
|
|
1070
|
+
errorSchema: BedrockErrorSchema,
|
|
1071
|
+
errorToMessage: (error) => `${error.type}: ${error.message}`
|
|
1072
|
+
}),
|
|
1073
|
+
successfulResponseHandler: createJsonResponseHandler2(
|
|
1074
|
+
BedrockEmbeddingResponseSchema
|
|
1075
|
+
),
|
|
1076
|
+
fetch: this.config.fetch,
|
|
1077
|
+
abortSignal
|
|
1078
|
+
});
|
|
1079
|
+
return {
|
|
1080
|
+
embeddings: [response.embedding],
|
|
1081
|
+
usage: { tokens: response.inputTextTokenCount }
|
|
1070
1082
|
};
|
|
1071
|
-
const responses = await Promise.all(values.map(embedSingleText));
|
|
1072
|
-
return responses.reduce(
|
|
1073
|
-
(accumulated, response) => {
|
|
1074
|
-
accumulated.embeddings.push(response.embedding);
|
|
1075
|
-
accumulated.usage.tokens += response.inputTextTokenCount;
|
|
1076
|
-
return accumulated;
|
|
1077
|
-
},
|
|
1078
|
-
{ embeddings: [], usage: { tokens: 0 } }
|
|
1079
|
-
);
|
|
1080
1083
|
}
|
|
1081
1084
|
};
|
|
1082
1085
|
var BedrockEmbeddingResponseSchema = z5.object({
|