@ai-sdk/amazon-bedrock 3.0.0-canary.5 → 3.0.0-canary.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -7,17 +7,15 @@ import {
7
7
  } from "@ai-sdk/provider-utils";
8
8
 
9
9
  // src/bedrock-chat-language-model.ts
10
- import {
11
- InvalidArgumentError
12
- } from "@ai-sdk/provider";
13
10
  import {
14
11
  combineHeaders,
15
12
  createJsonErrorResponseHandler,
16
13
  createJsonResponseHandler,
17
14
  postJsonToApi,
18
- resolve
15
+ resolve,
16
+ parseProviderOptions
19
17
  } from "@ai-sdk/provider-utils";
20
- import { z as z2 } from "zod";
18
+ import { z as z3 } from "zod";
21
19
 
22
20
  // src/bedrock-api-types.ts
23
21
  var BEDROCK_CACHE_POINT = {
@@ -36,11 +34,26 @@ var BEDROCK_STOP_REASONS = [
36
34
  "tool_use"
37
35
  ];
38
36
 
39
- // src/bedrock-error.ts
37
+ // src/bedrock-chat-options.ts
40
38
  import { z } from "zod";
41
- var BedrockErrorSchema = z.object({
42
- message: z.string(),
43
- type: z.string().nullish()
39
+ var bedrockProviderOptions = z.object({
40
+ /**
41
+ * Additional inference parameters that the model supports,
42
+ * beyond the base set of inference parameters that Converse
43
+ * supports in the inferenceConfig field
44
+ */
45
+ additionalModelRequestFields: z.record(z.any()).optional(),
46
+ reasoningConfig: z.object({
47
+ type: z.union([z.literal("enabled"), z.literal("disabled")]).nullish(),
48
+ budgetTokens: z.number().nullish()
49
+ }).nullish()
50
+ });
51
+
52
+ // src/bedrock-error.ts
53
+ import { z as z2 } from "zod";
54
+ var BedrockErrorSchema = z2.object({
55
+ message: z2.string(),
56
+ type: z2.string().nullish()
44
57
  });
45
58
 
46
59
  // src/bedrock-event-stream-response-handler.ts
@@ -194,7 +207,7 @@ function prepareTools({
194
207
  import {
195
208
  UnsupportedFunctionalityError as UnsupportedFunctionalityError2
196
209
  } from "@ai-sdk/provider";
197
- import { createIdGenerator } from "@ai-sdk/provider-utils";
210
+ import { convertToBase64, createIdGenerator } from "@ai-sdk/provider-utils";
198
211
  var generateFileId = createIdGenerator({ prefix: "file", size: 16 });
199
212
  function getCachePoint(providerMetadata) {
200
213
  var _a;
@@ -250,7 +263,7 @@ function convertToBedrockChatMessages(prompt) {
250
263
  bedrockContent.push({
251
264
  image: {
252
265
  format: bedrockImageFormat,
253
- source: { bytes: part.data }
266
+ source: { bytes: convertToBase64(part.data) }
254
267
  }
255
268
  });
256
269
  } else {
@@ -260,9 +273,7 @@ function convertToBedrockChatMessages(prompt) {
260
273
  "/"
261
274
  )) == null ? void 0 : _d[1],
262
275
  name: generateFileId(),
263
- source: {
264
- bytes: part.data
265
- }
276
+ source: { bytes: convertToBase64(part.data) }
266
277
  }
267
278
  });
268
279
  }
@@ -479,9 +490,8 @@ function mapBedrockFinishReason(finishReason) {
479
490
 
480
491
  // src/bedrock-chat-language-model.ts
481
492
  var BedrockChatLanguageModel = class {
482
- constructor(modelId, settings, config) {
493
+ constructor(modelId, config) {
483
494
  this.modelId = modelId;
484
- this.settings = settings;
485
495
  this.config = config;
486
496
  this.specificationVersion = "v2";
487
497
  this.provider = "amazon-bedrock";
@@ -490,7 +500,7 @@ var BedrockChatLanguageModel = class {
490
500
  }
491
501
  getArgs({
492
502
  prompt,
493
- maxTokens,
503
+ maxOutputTokens,
494
504
  temperature,
495
505
  topP,
496
506
  topK,
@@ -503,7 +513,12 @@ var BedrockChatLanguageModel = class {
503
513
  toolChoice,
504
514
  providerOptions
505
515
  }) {
506
- var _a, _b, _c, _d, _e, _f, _g;
516
+ var _a, _b, _c, _d, _e;
517
+ const bedrockOptions = (_a = parseProviderOptions({
518
+ provider: "bedrock",
519
+ providerOptions,
520
+ schema: bedrockProviderOptions
521
+ })) != null ? _a : {};
507
522
  const warnings = [];
508
523
  if (frequencyPenalty != null) {
509
524
  warnings.push({
@@ -537,34 +552,24 @@ var BedrockChatLanguageModel = class {
537
552
  });
538
553
  }
539
554
  const { system, messages } = convertToBedrockChatMessages(prompt);
540
- const reasoningConfigOptions = BedrockReasoningConfigOptionsSchema.safeParse(
541
- (_a = providerOptions == null ? void 0 : providerOptions.bedrock) == null ? void 0 : _a.reasoning_config
542
- );
543
- if (!reasoningConfigOptions.success) {
544
- throw new InvalidArgumentError({
545
- argument: "providerOptions.bedrock.reasoning_config",
546
- message: "invalid reasoning configuration options",
547
- cause: reasoningConfigOptions.error
548
- });
549
- }
550
- const isThinking = ((_b = reasoningConfigOptions.data) == null ? void 0 : _b.type) === "enabled";
551
- const thinkingBudget = (_e = (_c = reasoningConfigOptions.data) == null ? void 0 : _c.budgetTokens) != null ? _e : (_d = reasoningConfigOptions.data) == null ? void 0 : _d.budget_tokens;
555
+ const isThinking = ((_b = bedrockOptions.reasoningConfig) == null ? void 0 : _b.type) === "enabled";
556
+ const thinkingBudget = (_c = bedrockOptions.reasoningConfig) == null ? void 0 : _c.budgetTokens;
552
557
  const inferenceConfig = {
553
- ...maxTokens != null && { maxTokens },
558
+ ...maxOutputTokens != null && { maxOutputTokens },
554
559
  ...temperature != null && { temperature },
555
560
  ...topP != null && { topP },
556
561
  ...stopSequences != null && { stopSequences }
557
562
  };
558
563
  if (isThinking && thinkingBudget != null) {
559
- if (inferenceConfig.maxTokens != null) {
560
- inferenceConfig.maxTokens += thinkingBudget;
564
+ if (inferenceConfig.maxOutputTokens != null) {
565
+ inferenceConfig.maxOutputTokens += thinkingBudget;
561
566
  } else {
562
- inferenceConfig.maxTokens = thinkingBudget + 4096;
567
+ inferenceConfig.maxOutputTokens = thinkingBudget + 4096;
563
568
  }
564
- this.settings.additionalModelRequestFields = {
565
- ...this.settings.additionalModelRequestFields,
566
- reasoning_config: {
567
- type: (_f = reasoningConfigOptions.data) == null ? void 0 : _f.type,
569
+ bedrockOptions.additionalModelRequestFields = {
570
+ ...bedrockOptions.additionalModelRequestFields,
571
+ reasoningConfig: {
572
+ type: (_d = bedrockOptions.reasoningConfig) == null ? void 0 : _d.type,
568
573
  budget_tokens: thinkingBudget
569
574
  }
570
575
  };
@@ -590,18 +595,18 @@ var BedrockChatLanguageModel = class {
590
595
  command: {
591
596
  system,
592
597
  messages,
593
- additionalModelRequestFields: this.settings.additionalModelRequestFields,
598
+ additionalModelRequestFields: bedrockOptions.additionalModelRequestFields,
594
599
  ...Object.keys(inferenceConfig).length > 0 && {
595
600
  inferenceConfig
596
601
  },
597
602
  ...providerOptions == null ? void 0 : providerOptions.bedrock,
598
- ...((_g = toolConfig.tools) == null ? void 0 : _g.length) ? { toolConfig } : {}
603
+ ...((_e = toolConfig.tools) == null ? void 0 : _e.length) ? { toolConfig } : {}
599
604
  },
600
605
  warnings: [...warnings, ...toolWarnings]
601
606
  };
602
607
  }
603
608
  async doGenerate(options) {
604
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
609
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
605
610
  const { command: args, warnings } = this.getArgs(options);
606
611
  const url = `${this.getUrl(this.modelId)}/converse`;
607
612
  const { value: response, responseHeaders } = await postJsonToApi({
@@ -635,33 +640,41 @@ var BedrockChatLanguageModel = class {
635
640
  }
636
641
  }
637
642
  } : void 0;
638
- const reasoning = response.output.message.content.filter((content) => content.reasoningContent).map((content) => {
639
- var _a2;
640
- if (content.reasoningContent && "reasoningText" in content.reasoningContent) {
641
- return {
642
- type: "text",
643
- text: content.reasoningContent.reasoningText.text,
644
- ...content.reasoningContent.reasoningText.signature && {
645
- signature: content.reasoningContent.reasoningText.signature
643
+ const reasoning = [];
644
+ for (const content of response.output.message.content) {
645
+ if (content.reasoningContent) {
646
+ if ("reasoningText" in content.reasoningContent) {
647
+ reasoning.push({
648
+ type: "reasoning",
649
+ reasoningType: "text",
650
+ text: content.reasoningContent.reasoningText.text
651
+ });
652
+ if (content.reasoningContent.reasoningText.signature) {
653
+ reasoning.push({
654
+ type: "reasoning",
655
+ reasoningType: "signature",
656
+ signature: content.reasoningContent.reasoningText.signature
657
+ });
646
658
  }
647
- };
648
- } else if (content.reasoningContent && "redactedReasoning" in content.reasoningContent) {
649
- return {
650
- type: "redacted",
651
- data: (_a2 = content.reasoningContent.redactedReasoning.data) != null ? _a2 : ""
652
- };
653
- } else {
654
- return void 0;
659
+ } else if ("redactedReasoning" in content.reasoningContent) {
660
+ reasoning.push({
661
+ type: "reasoning",
662
+ reasoningType: "redacted",
663
+ data: (_e = content.reasoningContent.redactedReasoning.data) != null ? _e : ""
664
+ });
665
+ }
655
666
  }
656
- }).filter((item) => item !== void 0);
667
+ }
668
+ const text = (_h = (_g = (_f = response.output) == null ? void 0 : _f.message) == null ? void 0 : _g.content) == null ? void 0 : _h.map((part) => {
669
+ var _a2;
670
+ return (_a2 = part.text) != null ? _a2 : "";
671
+ }).join("");
657
672
  return {
658
- text: (_h = (_g = (_f = (_e = response.output) == null ? void 0 : _e.message) == null ? void 0 : _f.content) == null ? void 0 : _g.map((part) => {
659
- var _a2;
660
- return (_a2 = part.text) != null ? _a2 : "";
661
- }).join("")) != null ? _h : void 0,
673
+ text: text != null ? { type: "text", text } : void 0,
662
674
  toolCalls: (_l = (_k = (_j = (_i = response.output) == null ? void 0 : _i.message) == null ? void 0 : _j.content) == null ? void 0 : _k.filter((part) => !!part.toolUse)) == null ? void 0 : _l.map((part) => {
663
675
  var _a2, _b2, _c2, _d2, _e2, _f2;
664
676
  return {
677
+ type: "tool-call",
665
678
  toolCallType: "function",
666
679
  toolCallId: (_b2 = (_a2 = part.toolUse) == null ? void 0 : _a2.toolUseId) != null ? _b2 : this.config.generateId(),
667
680
  toolName: (_d2 = (_c2 = part.toolUse) == null ? void 0 : _c2.name) != null ? _d2 : `tool-${this.config.generateId()}`,
@@ -672,8 +685,8 @@ var BedrockChatLanguageModel = class {
672
685
  response.stopReason
673
686
  ),
674
687
  usage: {
675
- promptTokens: (_n = (_m = response.usage) == null ? void 0 : _m.inputTokens) != null ? _n : Number.NaN,
676
- completionTokens: (_p = (_o = response.usage) == null ? void 0 : _o.outputTokens) != null ? _p : Number.NaN
688
+ inputTokens: (_m = response.usage) == null ? void 0 : _m.inputTokens,
689
+ outputTokens: (_n = response.usage) == null ? void 0 : _n.outputTokens
677
690
  },
678
691
  response: {
679
692
  // TODO add id, timestamp, etc
@@ -703,9 +716,9 @@ var BedrockChatLanguageModel = class {
703
716
  fetch: this.config.fetch
704
717
  });
705
718
  let finishReason = "unknown";
706
- let usage = {
707
- promptTokens: Number.NaN,
708
- completionTokens: Number.NaN
719
+ const usage = {
720
+ inputTokens: void 0,
721
+ outputTokens: void 0
709
722
  };
710
723
  let providerMetadata = void 0;
711
724
  const toolCallContentBlocks = {};
@@ -745,10 +758,8 @@ var BedrockChatLanguageModel = class {
745
758
  );
746
759
  }
747
760
  if (value.metadata) {
748
- usage = {
749
- promptTokens: (_b = (_a = value.metadata.usage) == null ? void 0 : _a.inputTokens) != null ? _b : Number.NaN,
750
- completionTokens: (_d = (_c = value.metadata.usage) == null ? void 0 : _c.outputTokens) != null ? _d : Number.NaN
751
- };
761
+ usage.inputTokens = (_b = (_a = value.metadata.usage) == null ? void 0 : _a.inputTokens) != null ? _b : usage.inputTokens;
762
+ usage.outputTokens = (_d = (_c = value.metadata.usage) == null ? void 0 : _c.outputTokens) != null ? _d : usage.outputTokens;
752
763
  const cacheUsage = ((_e = value.metadata.usage) == null ? void 0 : _e.cacheReadInputTokens) != null || ((_f = value.metadata.usage) == null ? void 0 : _f.cacheWriteInputTokens) != null ? {
753
764
  usage: {
754
765
  cacheReadInputTokens: (_h = (_g = value.metadata.usage) == null ? void 0 : _g.cacheReadInputTokens) != null ? _h : Number.NaN,
@@ -769,8 +780,8 @@ var BedrockChatLanguageModel = class {
769
780
  }
770
781
  if (((_k = value.contentBlockDelta) == null ? void 0 : _k.delta) && "text" in value.contentBlockDelta.delta && value.contentBlockDelta.delta.text) {
771
782
  controller.enqueue({
772
- type: "text-delta",
773
- textDelta: value.contentBlockDelta.delta.text
783
+ type: "text",
784
+ text: value.contentBlockDelta.delta.text
774
785
  });
775
786
  }
776
787
  if (((_l = value.contentBlockDelta) == null ? void 0 : _l.delta) && "reasoningContent" in value.contentBlockDelta.delta && value.contentBlockDelta.delta.reasoningContent) {
@@ -778,16 +789,19 @@ var BedrockChatLanguageModel = class {
778
789
  if ("text" in reasoningContent && reasoningContent.text) {
779
790
  controller.enqueue({
780
791
  type: "reasoning",
781
- textDelta: reasoningContent.text
792
+ reasoningType: "text",
793
+ text: reasoningContent.text
782
794
  });
783
795
  } else if ("signature" in reasoningContent && reasoningContent.signature) {
784
796
  controller.enqueue({
785
- type: "reasoning-signature",
797
+ type: "reasoning",
798
+ reasoningType: "signature",
786
799
  signature: reasoningContent.signature
787
800
  });
788
801
  } else if ("data" in reasoningContent && reasoningContent.data) {
789
802
  controller.enqueue({
790
- type: "redacted-reasoning",
803
+ type: "reasoning",
804
+ reasoningType: "redacted",
791
805
  data: reasoningContent.data
792
806
  });
793
807
  }
@@ -849,105 +863,100 @@ var BedrockChatLanguageModel = class {
849
863
  return `${this.config.baseUrl()}/model/${encodedModelId}`;
850
864
  }
851
865
  };
852
- var BedrockReasoningConfigOptionsSchema = z2.object({
853
- type: z2.union([z2.literal("enabled"), z2.literal("disabled")]).nullish(),
854
- budget_tokens: z2.number().nullish(),
855
- budgetTokens: z2.number().nullish()
856
- }).nullish();
857
- var BedrockStopReasonSchema = z2.union([
858
- z2.enum(BEDROCK_STOP_REASONS),
859
- z2.string()
866
+ var BedrockStopReasonSchema = z3.union([
867
+ z3.enum(BEDROCK_STOP_REASONS),
868
+ z3.string()
860
869
  ]);
861
- var BedrockToolUseSchema = z2.object({
862
- toolUseId: z2.string(),
863
- name: z2.string(),
864
- input: z2.unknown()
870
+ var BedrockToolUseSchema = z3.object({
871
+ toolUseId: z3.string(),
872
+ name: z3.string(),
873
+ input: z3.unknown()
865
874
  });
866
- var BedrockReasoningTextSchema = z2.object({
867
- signature: z2.string().nullish(),
868
- text: z2.string()
875
+ var BedrockReasoningTextSchema = z3.object({
876
+ signature: z3.string().nullish(),
877
+ text: z3.string()
869
878
  });
870
- var BedrockRedactedReasoningSchema = z2.object({
871
- data: z2.string()
879
+ var BedrockRedactedReasoningSchema = z3.object({
880
+ data: z3.string()
872
881
  });
873
- var BedrockResponseSchema = z2.object({
874
- metrics: z2.object({
875
- latencyMs: z2.number()
882
+ var BedrockResponseSchema = z3.object({
883
+ metrics: z3.object({
884
+ latencyMs: z3.number()
876
885
  }).nullish(),
877
- output: z2.object({
878
- message: z2.object({
879
- content: z2.array(
880
- z2.object({
881
- text: z2.string().nullish(),
886
+ output: z3.object({
887
+ message: z3.object({
888
+ content: z3.array(
889
+ z3.object({
890
+ text: z3.string().nullish(),
882
891
  toolUse: BedrockToolUseSchema.nullish(),
883
- reasoningContent: z2.union([
884
- z2.object({
892
+ reasoningContent: z3.union([
893
+ z3.object({
885
894
  reasoningText: BedrockReasoningTextSchema
886
895
  }),
887
- z2.object({
896
+ z3.object({
888
897
  redactedReasoning: BedrockRedactedReasoningSchema
889
898
  })
890
899
  ]).nullish()
891
900
  })
892
901
  ),
893
- role: z2.string()
902
+ role: z3.string()
894
903
  })
895
904
  }),
896
905
  stopReason: BedrockStopReasonSchema,
897
- trace: z2.unknown().nullish(),
898
- usage: z2.object({
899
- inputTokens: z2.number(),
900
- outputTokens: z2.number(),
901
- totalTokens: z2.number(),
902
- cacheReadInputTokens: z2.number().nullish(),
903
- cacheWriteInputTokens: z2.number().nullish()
906
+ trace: z3.unknown().nullish(),
907
+ usage: z3.object({
908
+ inputTokens: z3.number(),
909
+ outputTokens: z3.number(),
910
+ totalTokens: z3.number(),
911
+ cacheReadInputTokens: z3.number().nullish(),
912
+ cacheWriteInputTokens: z3.number().nullish()
904
913
  })
905
914
  });
906
- var BedrockStreamSchema = z2.object({
907
- contentBlockDelta: z2.object({
908
- contentBlockIndex: z2.number(),
909
- delta: z2.union([
910
- z2.object({ text: z2.string() }),
911
- z2.object({ toolUse: z2.object({ input: z2.string() }) }),
912
- z2.object({
913
- reasoningContent: z2.object({ text: z2.string() })
915
+ var BedrockStreamSchema = z3.object({
916
+ contentBlockDelta: z3.object({
917
+ contentBlockIndex: z3.number(),
918
+ delta: z3.union([
919
+ z3.object({ text: z3.string() }),
920
+ z3.object({ toolUse: z3.object({ input: z3.string() }) }),
921
+ z3.object({
922
+ reasoningContent: z3.object({ text: z3.string() })
914
923
  }),
915
- z2.object({
916
- reasoningContent: z2.object({
917
- signature: z2.string()
924
+ z3.object({
925
+ reasoningContent: z3.object({
926
+ signature: z3.string()
918
927
  })
919
928
  }),
920
- z2.object({
921
- reasoningContent: z2.object({ data: z2.string() })
929
+ z3.object({
930
+ reasoningContent: z3.object({ data: z3.string() })
922
931
  })
923
932
  ]).nullish()
924
933
  }).nullish(),
925
- contentBlockStart: z2.object({
926
- contentBlockIndex: z2.number(),
927
- start: z2.object({
934
+ contentBlockStart: z3.object({
935
+ contentBlockIndex: z3.number(),
936
+ start: z3.object({
928
937
  toolUse: BedrockToolUseSchema.nullish()
929
938
  }).nullish()
930
939
  }).nullish(),
931
- contentBlockStop: z2.object({
932
- contentBlockIndex: z2.number()
940
+ contentBlockStop: z3.object({
941
+ contentBlockIndex: z3.number()
933
942
  }).nullish(),
934
- internalServerException: z2.record(z2.unknown()).nullish(),
935
- messageStop: z2.object({
936
- additionalModelResponseFields: z2.record(z2.unknown()).nullish(),
943
+ internalServerException: z3.record(z3.unknown()).nullish(),
944
+ messageStop: z3.object({
945
+ additionalModelResponseFields: z3.record(z3.unknown()).nullish(),
937
946
  stopReason: BedrockStopReasonSchema
938
947
  }).nullish(),
939
- metadata: z2.object({
940
- trace: z2.unknown().nullish(),
941
- usage: z2.object({
942
- cacheReadInputTokens: z2.number().nullish(),
943
- cacheWriteInputTokens: z2.number().nullish(),
944
- inputTokens: z2.number(),
945
- outputTokens: z2.number()
948
+ metadata: z3.object({
949
+ trace: z3.unknown().nullish(),
950
+ usage: z3.object({
951
+ cacheReadInputTokens: z3.number().nullish(),
952
+ cacheWriteInputTokens: z3.number().nullish(),
953
+ inputTokens: z3.number(),
954
+ outputTokens: z3.number()
946
955
  }).nullish()
947
956
  }).nullish(),
948
- modelStreamErrorException: z2.record(z2.unknown()).nullish(),
949
- throttlingException: z2.record(z2.unknown()).nullish(),
950
- validationException: z2.record(z2.unknown()).nullish()
957
+ modelStreamErrorException: z3.record(z3.unknown()).nullish(),
958
+ throttlingException: z3.record(z3.unknown()).nullish(),
959
+ validationException: z3.record(z3.unknown()).nullish()
951
960
  });
952
961
 
953
962
  // src/bedrock-embedding-model.ts
@@ -958,13 +967,13 @@ import {
958
967
  postJsonToApi as postJsonToApi2,
959
968
  resolve as resolve2
960
969
  } from "@ai-sdk/provider-utils";
961
- import { z as z3 } from "zod";
970
+ import { z as z4 } from "zod";
962
971
  var BedrockEmbeddingModel = class {
963
972
  constructor(modelId, settings, config) {
964
973
  this.modelId = modelId;
965
974
  this.settings = settings;
966
975
  this.config = config;
967
- this.specificationVersion = "v1";
976
+ this.specificationVersion = "v2";
968
977
  this.provider = "amazon-bedrock";
969
978
  this.maxEmbeddingsPerCall = void 0;
970
979
  this.supportsParallelCalls = true;
@@ -1017,9 +1026,9 @@ var BedrockEmbeddingModel = class {
1017
1026
  );
1018
1027
  }
1019
1028
  };
1020
- var BedrockEmbeddingResponseSchema = z3.object({
1021
- embedding: z3.array(z3.number()),
1022
- inputTextTokenCount: z3.number()
1029
+ var BedrockEmbeddingResponseSchema = z4.object({
1030
+ embedding: z4.array(z4.number()),
1031
+ inputTextTokenCount: z4.number()
1023
1032
  });
1024
1033
 
1025
1034
  // src/bedrock-image-model.ts
@@ -1037,7 +1046,7 @@ var modelMaxImagesPerCall = {
1037
1046
  };
1038
1047
 
1039
1048
  // src/bedrock-image-model.ts
1040
- import { z as z4 } from "zod";
1049
+ import { z as z5 } from "zod";
1041
1050
  var BedrockImageModel = class {
1042
1051
  constructor(modelId, settings, config) {
1043
1052
  this.modelId = modelId;
@@ -1119,8 +1128,8 @@ var BedrockImageModel = class {
1119
1128
  };
1120
1129
  }
1121
1130
  };
1122
- var bedrockImageResponseSchema = z4.object({
1123
- images: z4.array(z4.string())
1131
+ var bedrockImageResponseSchema = z5.object({
1132
+ images: z5.array(z5.string())
1124
1133
  });
1125
1134
 
1126
1135
  // src/headers-utils.ts
@@ -1245,22 +1254,22 @@ function createAmazonBedrock(options = {}) {
1245
1254
  })}.amazonaws.com`
1246
1255
  )) != null ? _b : `https://bedrock-runtime.us-east-1.amazonaws.com`;
1247
1256
  };
1248
- const createChatModel = (modelId, settings = {}) => {
1257
+ const createChatModel = (modelId) => {
1249
1258
  var _a;
1250
- return new BedrockChatLanguageModel(modelId, settings, {
1259
+ return new BedrockChatLanguageModel(modelId, {
1251
1260
  baseUrl: getBaseUrl,
1252
1261
  headers: (_a = options.headers) != null ? _a : {},
1253
1262
  fetch: sigv4Fetch,
1254
1263
  generateId
1255
1264
  });
1256
1265
  };
1257
- const provider = function(modelId, settings) {
1266
+ const provider = function(modelId) {
1258
1267
  if (new.target) {
1259
1268
  throw new Error(
1260
1269
  "The Amazon Bedrock model function cannot be called with the new keyword."
1261
1270
  );
1262
1271
  }
1263
- return createChatModel(modelId, settings);
1272
+ return createChatModel(modelId);
1264
1273
  };
1265
1274
  const createEmbeddingModel = (modelId, settings = {}) => {
1266
1275
  var _a;