@ai-sdk/amazon-bedrock 3.0.0-canary.0 → 3.0.0-canary.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,23 @@
1
1
  # @ai-sdk/amazon-bedrock
2
2
 
3
+ ## 3.0.0-canary.2
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [c57e248]
8
+ - Updated dependencies [33f4a6a]
9
+ - @ai-sdk/provider@2.0.0-canary.1
10
+ - @ai-sdk/provider-utils@3.0.0-canary.2
11
+
12
+ ## 3.0.0-canary.1
13
+
14
+ ### Patch Changes
15
+
16
+ - Updated dependencies [060370c]
17
+ - Updated dependencies [0c0c0b3]
18
+ - Updated dependencies [63d791d]
19
+ - @ai-sdk/provider-utils@3.0.0-canary.1
20
+
3
21
  ## 3.0.0-canary.0
4
22
 
5
23
  ### Major Changes
package/dist/index.d.mts CHANGED
@@ -1,4 +1,4 @@
1
- import { ProviderV1, LanguageModelV1, EmbeddingModelV1, ImageModelV1 } from '@ai-sdk/provider';
1
+ import { ProviderV2, LanguageModelV2, EmbeddingModelV1, ImageModelV1 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
 
4
4
  type BedrockChatModelId = 'amazon.titan-tg1-large' | 'amazon.titan-text-express-v1' | 'anthropic.claude-v2' | 'anthropic.claude-v2:1' | 'anthropic.claude-instant-v1' | 'anthropic.claude-3-7-sonnet-20250219-v1:0' | 'anthropic.claude-3-5-sonnet-20240620-v1:0' | 'anthropic.claude-3-5-sonnet-20241022-v2:0' | 'anthropic.claude-3-5-haiku-20241022-v1:0' | 'anthropic.claude-3-sonnet-20240229-v1:0' | 'anthropic.claude-3-haiku-20240307-v1:0' | 'anthropic.claude-3-opus-20240229-v1:0' | 'cohere.command-text-v14' | 'cohere.command-light-text-v14' | 'cohere.command-r-v1:0' | 'cohere.command-r-plus-v1:0' | 'meta.llama3-70b-instruct-v1:0' | 'meta.llama3-8b-instruct-v1:0' | 'meta.llama3-1-405b-instruct-v1:0' | 'meta.llama3-1-70b-instruct-v1:0' | 'meta.llama3-1-8b-instruct-v1:0' | 'meta.llama3-2-11b-instruct-v1:0' | 'meta.llama3-2-1b-instruct-v1:0' | 'meta.llama3-2-3b-instruct-v1:0' | 'meta.llama3-2-90b-instruct-v1:0' | 'mistral.mistral-7b-instruct-v0:2' | 'mistral.mixtral-8x7b-instruct-v0:1' | 'mistral.mistral-large-2402-v1:0' | 'mistral.mistral-small-2402-v1:0' | 'amazon.titan-text-express-v1' | 'amazon.titan-text-lite-v1' | (string & {});
@@ -83,9 +83,9 @@ interface AmazonBedrockProviderSettings {
83
83
  credentialProvider?: () => PromiseLike<Omit<BedrockCredentials, 'region'>>;
84
84
  generateId?: () => string;
85
85
  }
86
- interface AmazonBedrockProvider extends ProviderV1 {
87
- (modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV1;
88
- languageModel(modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV1;
86
+ interface AmazonBedrockProvider extends ProviderV2 {
87
+ (modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV2;
88
+ languageModel(modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV2;
89
89
  embedding(modelId: BedrockEmbeddingModelId, settings?: BedrockEmbeddingSettings): EmbeddingModelV1<string>;
90
90
  image(modelId: BedrockImageModelId, settings?: BedrockImageSettings): ImageModelV1;
91
91
  imageModel(modelId: BedrockImageModelId, settings?: BedrockImageSettings): ImageModelV1;
package/dist/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { ProviderV1, LanguageModelV1, EmbeddingModelV1, ImageModelV1 } from '@ai-sdk/provider';
1
+ import { ProviderV2, LanguageModelV2, EmbeddingModelV1, ImageModelV1 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
 
4
4
  type BedrockChatModelId = 'amazon.titan-tg1-large' | 'amazon.titan-text-express-v1' | 'anthropic.claude-v2' | 'anthropic.claude-v2:1' | 'anthropic.claude-instant-v1' | 'anthropic.claude-3-7-sonnet-20250219-v1:0' | 'anthropic.claude-3-5-sonnet-20240620-v1:0' | 'anthropic.claude-3-5-sonnet-20241022-v2:0' | 'anthropic.claude-3-5-haiku-20241022-v1:0' | 'anthropic.claude-3-sonnet-20240229-v1:0' | 'anthropic.claude-3-haiku-20240307-v1:0' | 'anthropic.claude-3-opus-20240229-v1:0' | 'cohere.command-text-v14' | 'cohere.command-light-text-v14' | 'cohere.command-r-v1:0' | 'cohere.command-r-plus-v1:0' | 'meta.llama3-70b-instruct-v1:0' | 'meta.llama3-8b-instruct-v1:0' | 'meta.llama3-1-405b-instruct-v1:0' | 'meta.llama3-1-70b-instruct-v1:0' | 'meta.llama3-1-8b-instruct-v1:0' | 'meta.llama3-2-11b-instruct-v1:0' | 'meta.llama3-2-1b-instruct-v1:0' | 'meta.llama3-2-3b-instruct-v1:0' | 'meta.llama3-2-90b-instruct-v1:0' | 'mistral.mistral-7b-instruct-v0:2' | 'mistral.mixtral-8x7b-instruct-v0:1' | 'mistral.mistral-large-2402-v1:0' | 'mistral.mistral-small-2402-v1:0' | 'amazon.titan-text-express-v1' | 'amazon.titan-text-lite-v1' | (string & {});
@@ -83,9 +83,9 @@ interface AmazonBedrockProviderSettings {
83
83
  credentialProvider?: () => PromiseLike<Omit<BedrockCredentials, 'region'>>;
84
84
  generateId?: () => string;
85
85
  }
86
- interface AmazonBedrockProvider extends ProviderV1 {
87
- (modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV1;
88
- languageModel(modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV1;
86
+ interface AmazonBedrockProvider extends ProviderV2 {
87
+ (modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV2;
88
+ languageModel(modelId: BedrockChatModelId, settings?: BedrockChatSettings): LanguageModelV2;
89
89
  embedding(modelId: BedrockEmbeddingModelId, settings?: BedrockEmbeddingSettings): EmbeddingModelV1<string>;
90
90
  image(modelId: BedrockImageModelId, settings?: BedrockImageSettings): ImageModelV1;
91
91
  imageModel(modelId: BedrockImageModelId, settings?: BedrockImageSettings): ImageModelV1;
package/dist/index.js CHANGED
@@ -130,9 +130,11 @@ var createBedrockEventStreamResponseHandler = (chunkSchema) => async ({ response
130
130
 
131
131
  // src/bedrock-prepare-tools.ts
132
132
  var import_provider2 = require("@ai-sdk/provider");
133
- function prepareTools(mode) {
134
- var _a;
135
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
133
+ function prepareTools({
134
+ tools,
135
+ toolChoice
136
+ }) {
137
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
136
138
  if (tools == null) {
137
139
  return {
138
140
  toolConfig: { tools: void 0, toolChoice: void 0 },
@@ -156,7 +158,6 @@ function prepareTools(mode) {
156
158
  });
157
159
  }
158
160
  }
159
- const toolChoice = mode.toolChoice;
160
161
  if (toolChoice == null) {
161
162
  return {
162
163
  toolConfig: { tools: bedrockTools, toolChoice: void 0 },
@@ -223,7 +224,7 @@ function convertToBedrockChatMessages(prompt) {
223
224
  }
224
225
  for (const message of block.messages) {
225
226
  system.push({ text: message.content });
226
- if (getCachePoint(message.providerMetadata)) {
227
+ if (getCachePoint(message.providerOptions)) {
227
228
  system.push(BEDROCK_CACHE_POINT);
228
229
  }
229
230
  }
@@ -232,7 +233,7 @@ function convertToBedrockChatMessages(prompt) {
232
233
  case "user": {
233
234
  const bedrockContent = [];
234
235
  for (const message of block.messages) {
235
- const { role, content, providerMetadata } = message;
236
+ const { role, content, providerOptions } = message;
236
237
  switch (role) {
237
238
  case "user": {
238
239
  for (let j = 0; j < content.length; j++) {
@@ -332,7 +333,7 @@ function convertToBedrockChatMessages(prompt) {
332
333
  throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
333
334
  }
334
335
  }
335
- if (getCachePoint(providerMetadata)) {
336
+ if (getCachePoint(providerOptions)) {
336
337
  bedrockContent.push(BEDROCK_CACHE_POINT);
337
338
  }
338
339
  }
@@ -406,7 +407,7 @@ function convertToBedrockChatMessages(prompt) {
406
407
  }
407
408
  }
408
409
  }
409
- if (getCachePoint(message.providerMetadata)) {
410
+ if (getCachePoint(message.providerOptions)) {
410
411
  bedrockContent.push(BEDROCK_CACHE_POINT);
411
412
  }
412
413
  }
@@ -498,13 +499,12 @@ var BedrockChatLanguageModel = class {
498
499
  this.modelId = modelId;
499
500
  this.settings = settings;
500
501
  this.config = config;
501
- this.specificationVersion = "v1";
502
+ this.specificationVersion = "v2";
502
503
  this.provider = "amazon-bedrock";
503
504
  this.defaultObjectGenerationMode = "tool";
504
505
  this.supportsImageUrls = false;
505
506
  }
506
507
  getArgs({
507
- mode,
508
508
  prompt,
509
509
  maxTokens,
510
510
  temperature,
@@ -515,10 +515,11 @@ var BedrockChatLanguageModel = class {
515
515
  stopSequences,
516
516
  responseFormat,
517
517
  seed,
518
- providerMetadata
518
+ tools,
519
+ toolChoice,
520
+ providerOptions
519
521
  }) {
520
522
  var _a, _b, _c, _d, _e, _f, _g;
521
- const type = mode.type;
522
523
  const warnings = [];
523
524
  if (frequencyPenalty != null) {
524
525
  warnings.push({
@@ -553,7 +554,7 @@ var BedrockChatLanguageModel = class {
553
554
  }
554
555
  const { system, messages } = convertToBedrockChatMessages(prompt);
555
556
  const reasoningConfigOptions = BedrockReasoningConfigOptionsSchema.safeParse(
556
- (_a = providerMetadata == null ? void 0 : providerMetadata.bedrock) == null ? void 0 : _a.reasoning_config
557
+ (_a = providerOptions == null ? void 0 : providerOptions.bedrock) == null ? void 0 : _a.reasoning_config
557
558
  );
558
559
  if (!reasoningConfigOptions.success) {
559
560
  throw new import_provider4.InvalidArgumentError({
@@ -600,58 +601,20 @@ var BedrockChatLanguageModel = class {
600
601
  details: "topP is not supported when thinking is enabled"
601
602
  });
602
603
  }
603
- const baseArgs = {
604
- system,
605
- additionalModelRequestFields: this.settings.additionalModelRequestFields,
606
- ...Object.keys(inferenceConfig).length > 0 && {
607
- inferenceConfig
604
+ const { toolConfig, toolWarnings } = prepareTools({ tools, toolChoice });
605
+ return {
606
+ command: {
607
+ system,
608
+ messages,
609
+ additionalModelRequestFields: this.settings.additionalModelRequestFields,
610
+ ...Object.keys(inferenceConfig).length > 0 && {
611
+ inferenceConfig
612
+ },
613
+ ...providerOptions == null ? void 0 : providerOptions.bedrock,
614
+ ...((_g = toolConfig.tools) == null ? void 0 : _g.length) ? { toolConfig } : {}
608
615
  },
609
- messages,
610
- ...providerMetadata == null ? void 0 : providerMetadata.bedrock
616
+ warnings: [...warnings, ...toolWarnings]
611
617
  };
612
- switch (type) {
613
- case "regular": {
614
- const { toolConfig, toolWarnings } = prepareTools(mode);
615
- return {
616
- command: {
617
- ...baseArgs,
618
- ...((_g = toolConfig.tools) == null ? void 0 : _g.length) ? { toolConfig } : {}
619
- },
620
- warnings: [...warnings, ...toolWarnings]
621
- };
622
- }
623
- case "object-json": {
624
- throw new import_provider4.UnsupportedFunctionalityError({
625
- functionality: "json-mode object generation"
626
- });
627
- }
628
- case "object-tool": {
629
- return {
630
- command: {
631
- ...baseArgs,
632
- toolConfig: {
633
- tools: [
634
- {
635
- toolSpec: {
636
- name: mode.tool.name,
637
- description: mode.tool.description,
638
- inputSchema: {
639
- json: mode.tool.parameters
640
- }
641
- }
642
- }
643
- ],
644
- toolChoice: { tool: { name: mode.tool.name } }
645
- }
646
- },
647
- warnings
648
- };
649
- }
650
- default: {
651
- const _exhaustiveCheck = type;
652
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
653
- }
654
- }
655
618
  }
656
619
  async doGenerate(options) {
657
620
  var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;