@ai-sdk/amazon-bedrock 3.0.55 → 3.0.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,39 @@
1
1
  # @ai-sdk/amazon-bedrock
2
2
 
3
+ ## 3.0.57
4
+
5
+ ### Patch Changes
6
+
7
+ - 708df55: feat(provider/amazon-bedrock,provider/google-vertex-anthropic): add support for tool calling with structured output
8
+
9
+ Added support for combining tool calling with structured outputs in both Amazon Bedrock and Google Vertex Anthropic providers. This allows developers to use tools (like weather lookups, web search, etc.) alongside structured JSON output schemas, enabling multi-step agentic workflows with structured final outputs.
10
+
11
+ **Amazon Bedrock Changes:**
12
+
13
+ - Removed incorrect warning that prevented using tools with JSON response format
14
+ - Updated tool choice to use `{ type: 'required' }` instead of specific tool selection when using structured outputs
15
+ - Added `isJsonResponseFromTool` parameter to finish reason mapping
16
+ - JSON tool responses are correctly converted to text content and finish reason is mapped from `tool_use` to `stop`
17
+ - Added comprehensive test coverage for combining tools with structured outputs
18
+ - Added example files demonstrating the feature
19
+
20
+ **Google Vertex Anthropic Changes:**
21
+
22
+ - Inherits support from underlying Anthropic provider implementation
23
+ - Added test coverage to verify the feature works correctly
24
+ - Added example files demonstrating the feature
25
+
26
+ This brings Anthropic provider's structured output capabilities to the Amazon Bedrock and Google Vertex Anthropic providers.
27
+
28
+ ## 3.0.56
29
+
30
+ ### Patch Changes
31
+
32
+ - f1f5804: fix(amazon-bedrock): clamp temperature to valid 0-1 range with warnings
33
+ - 9cb8436: fix(amazon-bedrock): move anthropic_beta to request body
34
+ - Updated dependencies [54b7c08]
35
+ - @ai-sdk/anthropic@2.0.45
36
+
3
37
  ## 3.0.55
4
38
 
5
39
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -10,6 +10,7 @@ declare const bedrockProviderOptions: z.ZodObject<{
10
10
  type: z.ZodOptional<z.ZodUnion<readonly [z.ZodLiteral<"enabled">, z.ZodLiteral<"disabled">]>>;
11
11
  budgetTokens: z.ZodOptional<z.ZodNumber>;
12
12
  }, z.core.$strip>>;
13
+ anthropicBeta: z.ZodOptional<z.ZodArray<z.ZodString>>;
13
14
  }, z.core.$strip>;
14
15
  type BedrockProviderOptions = z.infer<typeof bedrockProviderOptions>;
15
16
 
package/dist/index.d.ts CHANGED
@@ -10,6 +10,7 @@ declare const bedrockProviderOptions: z.ZodObject<{
10
10
  type: z.ZodOptional<z.ZodUnion<readonly [z.ZodLiteral<"enabled">, z.ZodLiteral<"disabled">]>>;
11
11
  budgetTokens: z.ZodOptional<z.ZodNumber>;
12
12
  }, z.core.$strip>>;
13
+ anthropicBeta: z.ZodOptional<z.ZodArray<z.ZodString>>;
13
14
  }, z.core.$strip>;
14
15
  type BedrockProviderOptions = z.infer<typeof bedrockProviderOptions>;
15
16
 
package/dist/index.js CHANGED
@@ -30,7 +30,7 @@ module.exports = __toCommonJS(src_exports);
30
30
  var import_provider_utils8 = require("@ai-sdk/provider-utils");
31
31
 
32
32
  // src/version.ts
33
- var VERSION = true ? "3.0.55" : "0.0.0-test";
33
+ var VERSION = true ? "3.0.57" : "0.0.0-test";
34
34
 
35
35
  // src/bedrock-provider.ts
36
36
  var import_internal2 = require("@ai-sdk/anthropic/internal");
@@ -97,7 +97,11 @@ var bedrockProviderOptions = import_v4.z.object({
97
97
  reasoningConfig: import_v4.z.object({
98
98
  type: import_v4.z.union([import_v4.z.literal("enabled"), import_v4.z.literal("disabled")]).optional(),
99
99
  budgetTokens: import_v4.z.number().optional()
100
- }).optional()
100
+ }).optional(),
101
+ /**
102
+ * Anthropic beta features to enable
103
+ */
104
+ anthropicBeta: import_v4.z.array(import_v4.z.string()).optional()
101
105
  });
102
106
 
103
107
  // src/bedrock-error.ts
@@ -645,7 +649,7 @@ function groupIntoBlocks(prompt) {
645
649
  }
646
650
 
647
651
  // src/map-bedrock-finish-reason.ts
648
- function mapBedrockFinishReason(finishReason) {
652
+ function mapBedrockFinishReason(finishReason, isJsonResponseFromTool) {
649
653
  switch (finishReason) {
650
654
  case "stop_sequence":
651
655
  case "end_turn":
@@ -656,7 +660,7 @@ function mapBedrockFinishReason(finishReason) {
656
660
  case "guardrail_intervened":
657
661
  return "content-filter";
658
662
  case "tool_use":
659
- return "tool-calls";
663
+ return isJsonResponseFromTool ? "stop" : "tool-calls";
660
664
  default:
661
665
  return "unknown";
662
666
  }
@@ -688,7 +692,7 @@ var BedrockChatLanguageModel = class {
688
692
  toolChoice,
689
693
  providerOptions
690
694
  }) {
691
- var _a, _b, _c, _d, _e, _f;
695
+ var _a, _b, _c, _d, _e, _f, _g;
692
696
  const bedrockOptions = (_a = await (0, import_provider_utils4.parseProviderOptions)({
693
697
  provider: "bedrock",
694
698
  providerOptions,
@@ -713,6 +717,21 @@ var BedrockChatLanguageModel = class {
713
717
  setting: "seed"
714
718
  });
715
719
  }
720
+ if (temperature != null && temperature > 1) {
721
+ warnings.push({
722
+ type: "unsupported-setting",
723
+ setting: "temperature",
724
+ details: `${temperature} exceeds bedrock maximum of 1.0. clamped to 1.0`
725
+ });
726
+ temperature = 1;
727
+ } else if (temperature != null && temperature < 0) {
728
+ warnings.push({
729
+ type: "unsupported-setting",
730
+ setting: "temperature",
731
+ details: `${temperature} is below bedrock minimum of 0. clamped to 0`
732
+ });
733
+ temperature = 0;
734
+ }
716
735
  if (responseFormat != null && responseFormat.type !== "text" && responseFormat.type !== "json") {
717
736
  warnings.push({
718
737
  type: "unsupported-setting",
@@ -720,14 +739,6 @@ var BedrockChatLanguageModel = class {
720
739
  details: "Only text and json response formats are supported."
721
740
  });
722
741
  }
723
- if (tools != null && (responseFormat == null ? void 0 : responseFormat.type) === "json") {
724
- if (tools.length > 0) {
725
- warnings.push({
726
- type: "other",
727
- message: "JSON response format does not support tools. The provided tools are ignored."
728
- });
729
- }
730
- }
731
742
  const jsonResponseTool = (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null ? {
732
743
  type: "function",
733
744
  name: "json",
@@ -735,8 +746,8 @@ var BedrockChatLanguageModel = class {
735
746
  inputSchema: responseFormat.schema
736
747
  } : void 0;
737
748
  const { toolConfig, additionalTools, toolWarnings, betas } = await prepareTools({
738
- tools: jsonResponseTool ? [jsonResponseTool, ...tools != null ? tools : []] : tools,
739
- toolChoice: jsonResponseTool != null ? { type: "tool", toolName: jsonResponseTool.name } : toolChoice,
749
+ tools: jsonResponseTool ? [...tools != null ? tools : [], jsonResponseTool] : tools,
750
+ toolChoice: jsonResponseTool != null ? { type: "required" } : toolChoice,
740
751
  modelId: this.modelId
741
752
  });
742
753
  warnings.push(...toolWarnings);
@@ -746,8 +757,16 @@ var BedrockChatLanguageModel = class {
746
757
  ...additionalTools
747
758
  };
748
759
  }
749
- const isThinking = ((_b = bedrockOptions.reasoningConfig) == null ? void 0 : _b.type) === "enabled";
750
- const thinkingBudget = (_c = bedrockOptions.reasoningConfig) == null ? void 0 : _c.budgetTokens;
760
+ if (betas.size > 0 || bedrockOptions.anthropicBeta) {
761
+ const existingBetas = (_b = bedrockOptions.anthropicBeta) != null ? _b : [];
762
+ const mergedBetas = betas.size > 0 ? [...existingBetas, ...Array.from(betas)] : existingBetas;
763
+ bedrockOptions.additionalModelRequestFields = {
764
+ ...bedrockOptions.additionalModelRequestFields,
765
+ anthropic_beta: mergedBetas
766
+ };
767
+ }
768
+ const isThinking = ((_c = bedrockOptions.reasoningConfig) == null ? void 0 : _c.type) === "enabled";
769
+ const thinkingBudget = (_d = bedrockOptions.reasoningConfig) == null ? void 0 : _d.budgetTokens;
751
770
  const inferenceConfig = {
752
771
  ...maxOutputTokens != null && { maxTokens: maxOutputTokens },
753
772
  ...temperature != null && { temperature },
@@ -764,7 +783,7 @@ var BedrockChatLanguageModel = class {
764
783
  bedrockOptions.additionalModelRequestFields = {
765
784
  ...bedrockOptions.additionalModelRequestFields,
766
785
  thinking: {
767
- type: (_d = bedrockOptions.reasoningConfig) == null ? void 0 : _d.type,
786
+ type: (_e = bedrockOptions.reasoningConfig) == null ? void 0 : _e.type,
768
787
  budget_tokens: thinkingBudget
769
788
  }
770
789
  };
@@ -793,7 +812,7 @@ var BedrockChatLanguageModel = class {
793
812
  details: "topK is not supported when thinking is enabled"
794
813
  });
795
814
  }
796
- const hasAnyTools = ((_f = (_e = toolConfig.tools) == null ? void 0 : _e.length) != null ? _f : 0) > 0 || additionalTools;
815
+ const hasAnyTools = ((_g = (_f = toolConfig.tools) == null ? void 0 : _f.length) != null ? _g : 0) > 0 || additionalTools;
797
816
  let filteredPrompt = prompt;
798
817
  if (!hasAnyTools) {
799
818
  const hasToolContent = prompt.some(
@@ -842,27 +861,21 @@ var BedrockChatLanguageModel = class {
842
861
  };
843
862
  }
844
863
  async getHeaders({
845
- betas,
846
864
  headers
847
865
  }) {
848
- return (0, import_provider_utils4.combineHeaders)(
849
- await (0, import_provider_utils4.resolve)(this.config.headers),
850
- betas.size > 0 ? { "anthropic-beta": Array.from(betas).join(",") } : {},
851
- headers
852
- );
866
+ return (0, import_provider_utils4.combineHeaders)(await (0, import_provider_utils4.resolve)(this.config.headers), headers);
853
867
  }
854
868
  async doGenerate(options) {
855
869
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
856
870
  const {
857
871
  command: args,
858
872
  warnings,
859
- usesJsonResponseTool,
860
- betas
873
+ usesJsonResponseTool
861
874
  } = await this.getArgs(options);
862
875
  const url = `${this.getUrl(this.modelId)}/converse`;
863
876
  const { value: response, responseHeaders } = await (0, import_provider_utils4.postJsonToApi)({
864
877
  url,
865
- headers: await this.getHeaders({ betas, headers: options.headers }),
878
+ headers: await this.getHeaders({ headers: options.headers }),
866
879
  body: args,
867
880
  failedResponseHandler: (0, import_provider_utils4.createJsonErrorResponseHandler)({
868
881
  errorSchema: BedrockErrorSchema,
@@ -878,11 +891,10 @@ var BedrockChatLanguageModel = class {
878
891
  fetch: this.config.fetch
879
892
  });
880
893
  const content = [];
894
+ let isJsonResponseFromTool = false;
881
895
  for (const part of response.output.message.content) {
882
896
  if (part.text) {
883
- if (!usesJsonResponseTool) {
884
- content.push({ type: "text", text: part.text });
885
- }
897
+ content.push({ type: "text", text: part.text });
886
898
  }
887
899
  if (part.reasoningContent) {
888
900
  if ("reasoningText" in part.reasoningContent) {
@@ -911,21 +923,24 @@ var BedrockChatLanguageModel = class {
911
923
  }
912
924
  }
913
925
  if (part.toolUse) {
914
- content.push(
915
- // when a json response tool is used, the tool call becomes the text:
916
- usesJsonResponseTool ? {
926
+ const isJsonResponseTool = usesJsonResponseTool && part.toolUse.name === "json";
927
+ if (isJsonResponseTool) {
928
+ isJsonResponseFromTool = true;
929
+ content.push({
917
930
  type: "text",
918
931
  text: JSON.stringify(part.toolUse.input)
919
- } : {
932
+ });
933
+ } else {
934
+ content.push({
920
935
  type: "tool-call",
921
936
  toolCallId: (_c = (_b = part.toolUse) == null ? void 0 : _b.toolUseId) != null ? _c : this.config.generateId(),
922
937
  toolName: (_e = (_d = part.toolUse) == null ? void 0 : _d.name) != null ? _e : `tool-${this.config.generateId()}`,
923
938
  input: JSON.stringify((_g = (_f = part.toolUse) == null ? void 0 : _f.input) != null ? _g : "")
924
- }
925
- );
939
+ });
940
+ }
926
941
  }
927
942
  }
928
- const providerMetadata = response.trace || response.usage || usesJsonResponseTool ? {
943
+ const providerMetadata = response.trace || response.usage || isJsonResponseFromTool ? {
929
944
  bedrock: {
930
945
  ...response.trace && typeof response.trace === "object" ? { trace: response.trace } : {},
931
946
  ...((_h = response.usage) == null ? void 0 : _h.cacheWriteInputTokens) != null && {
@@ -933,13 +948,14 @@ var BedrockChatLanguageModel = class {
933
948
  cacheWriteInputTokens: response.usage.cacheWriteInputTokens
934
949
  }
935
950
  },
936
- ...usesJsonResponseTool && { isJsonResponseFromTool: true }
951
+ ...isJsonResponseFromTool && { isJsonResponseFromTool: true }
937
952
  }
938
953
  } : void 0;
939
954
  return {
940
955
  content,
941
956
  finishReason: mapBedrockFinishReason(
942
- response.stopReason
957
+ response.stopReason,
958
+ isJsonResponseFromTool
943
959
  ),
944
960
  usage: {
945
961
  inputTokens: (_i = response.usage) == null ? void 0 : _i.inputTokens,
@@ -959,13 +975,12 @@ var BedrockChatLanguageModel = class {
959
975
  const {
960
976
  command: args,
961
977
  warnings,
962
- usesJsonResponseTool,
963
- betas
978
+ usesJsonResponseTool
964
979
  } = await this.getArgs(options);
965
980
  const url = `${this.getUrl(this.modelId)}/converse-stream`;
966
981
  const { value: response, responseHeaders } = await (0, import_provider_utils4.postJsonToApi)({
967
982
  url,
968
- headers: await this.getHeaders({ betas, headers: options.headers }),
983
+ headers: await this.getHeaders({ headers: options.headers }),
969
984
  body: args,
970
985
  failedResponseHandler: (0, import_provider_utils4.createJsonErrorResponseHandler)({
971
986
  errorSchema: BedrockErrorSchema,
@@ -982,6 +997,7 @@ var BedrockChatLanguageModel = class {
982
997
  totalTokens: void 0
983
998
  };
984
999
  let providerMetadata = void 0;
1000
+ let isJsonResponseFromTool = false;
985
1001
  const contentBlocks = {};
986
1002
  return {
987
1003
  stream: response.pipeThrough(
@@ -1021,7 +1037,8 @@ var BedrockChatLanguageModel = class {
1021
1037
  }
1022
1038
  if (value.messageStop) {
1023
1039
  finishReason = mapBedrockFinishReason(
1024
- value.messageStop.stopReason
1040
+ value.messageStop.stopReason,
1041
+ isJsonResponseFromTool
1025
1042
  );
1026
1043
  }
1027
1044
  if (value.metadata) {
@@ -1037,14 +1054,11 @@ var BedrockChatLanguageModel = class {
1037
1054
  const trace = value.metadata.trace ? {
1038
1055
  trace: value.metadata.trace
1039
1056
  } : void 0;
1040
- if (cacheUsage || trace || usesJsonResponseTool) {
1057
+ if (cacheUsage || trace) {
1041
1058
  providerMetadata = {
1042
1059
  bedrock: {
1043
1060
  ...cacheUsage,
1044
- ...trace,
1045
- ...usesJsonResponseTool && {
1046
- isJsonResponseFromTool: true
1047
- }
1061
+ ...trace
1048
1062
  }
1049
1063
  };
1050
1064
  }
@@ -1061,20 +1075,16 @@ var BedrockChatLanguageModel = class {
1061
1075
  const blockIndex = value.contentBlockDelta.contentBlockIndex || 0;
1062
1076
  if (contentBlocks[blockIndex] == null) {
1063
1077
  contentBlocks[blockIndex] = { type: "text" };
1064
- if (!usesJsonResponseTool) {
1065
- controller.enqueue({
1066
- type: "text-start",
1067
- id: String(blockIndex)
1068
- });
1069
- }
1070
- }
1071
- if (!usesJsonResponseTool) {
1072
1078
  controller.enqueue({
1073
- type: "text-delta",
1074
- id: String(blockIndex),
1075
- delta: value.contentBlockDelta.delta.text
1079
+ type: "text-start",
1080
+ id: String(blockIndex)
1076
1081
  });
1077
1082
  }
1083
+ controller.enqueue({
1084
+ type: "text-delta",
1085
+ id: String(blockIndex),
1086
+ delta: value.contentBlockDelta.delta.text
1087
+ });
1078
1088
  }
1079
1089
  if (((_n = value.contentBlockStop) == null ? void 0 : _n.contentBlockIndex) != null) {
1080
1090
  const blockIndex = value.contentBlockStop.contentBlockIndex;
@@ -1086,14 +1096,13 @@ var BedrockChatLanguageModel = class {
1086
1096
  id: String(blockIndex)
1087
1097
  });
1088
1098
  } else if (contentBlock.type === "text") {
1089
- if (!usesJsonResponseTool) {
1090
- controller.enqueue({
1091
- type: "text-end",
1092
- id: String(blockIndex)
1093
- });
1094
- }
1099
+ controller.enqueue({
1100
+ type: "text-end",
1101
+ id: String(blockIndex)
1102
+ });
1095
1103
  } else if (contentBlock.type === "tool-call") {
1096
- if (usesJsonResponseTool) {
1104
+ if (contentBlock.isJsonResponseTool) {
1105
+ isJsonResponseFromTool = true;
1097
1106
  controller.enqueue({
1098
1107
  type: "text-start",
1099
1108
  id: String(blockIndex)
@@ -1167,13 +1176,15 @@ var BedrockChatLanguageModel = class {
1167
1176
  if (((_p = contentBlockStart == null ? void 0 : contentBlockStart.start) == null ? void 0 : _p.toolUse) != null) {
1168
1177
  const toolUse = contentBlockStart.start.toolUse;
1169
1178
  const blockIndex = contentBlockStart.contentBlockIndex;
1179
+ const isJsonResponseTool = usesJsonResponseTool && toolUse.name === "json";
1170
1180
  contentBlocks[blockIndex] = {
1171
1181
  type: "tool-call",
1172
1182
  toolCallId: toolUse.toolUseId,
1173
1183
  toolName: toolUse.name,
1174
- jsonText: ""
1184
+ jsonText: "",
1185
+ isJsonResponseTool
1175
1186
  };
1176
- if (!usesJsonResponseTool) {
1187
+ if (!isJsonResponseTool) {
1177
1188
  controller.enqueue({
1178
1189
  type: "tool-input-start",
1179
1190
  id: toolUse.toolUseId,
@@ -1187,7 +1198,7 @@ var BedrockChatLanguageModel = class {
1187
1198
  const contentBlock = contentBlocks[blockIndex];
1188
1199
  if ((contentBlock == null ? void 0 : contentBlock.type) === "tool-call") {
1189
1200
  const delta = (_q = contentBlockDelta.delta.toolUse.input) != null ? _q : "";
1190
- if (!usesJsonResponseTool) {
1201
+ if (!contentBlock.isJsonResponseTool) {
1191
1202
  controller.enqueue({
1192
1203
  type: "tool-input-delta",
1193
1204
  id: contentBlock.toolCallId,
@@ -1199,6 +1210,20 @@ var BedrockChatLanguageModel = class {
1199
1210
  }
1200
1211
  },
1201
1212
  flush(controller) {
1213
+ if (isJsonResponseFromTool) {
1214
+ if (providerMetadata) {
1215
+ providerMetadata.bedrock = {
1216
+ ...providerMetadata.bedrock,
1217
+ isJsonResponseFromTool: true
1218
+ };
1219
+ } else {
1220
+ providerMetadata = {
1221
+ bedrock: {
1222
+ isJsonResponseFromTool: true
1223
+ }
1224
+ };
1225
+ }
1226
+ }
1202
1227
  controller.enqueue({
1203
1228
  type: "finish",
1204
1229
  finishReason,