bedrock-wrapper 2.7.2 → 2.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bedrock-models.js CHANGED
@@ -6,6 +6,66 @@
6
6
  // https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/cross-region-inference
7
7
 
8
8
  export const bedrock_models = [
9
+ {
10
+ // ======================
11
+ // == Claude 4.5 Opus ==
12
+ // ======================
13
+ "modelName": "Claude-4-5-Opus",
14
+ // "modelId": "anthropic.claude-opus-4-5-20251101-v1:0", // single-region (us-east-1, us-east-2, us-west-1, us-west-2 only)
15
+ "modelId": "global.anthropic.claude-opus-4-5-20251101-v1:0", // cross-region inference profile
16
+ "vision": true,
17
+ "messages_api": true,
18
+ "system_as_separate_field": true,
19
+ "display_role_names": true,
20
+ "max_tokens_param_name": "max_tokens",
21
+ "max_supported_response_tokens": 131072,
22
+ "stop_sequences_param_name": "stop_sequences",
23
+ "response_chunk_element": "delta.text",
24
+ "response_nonchunk_element": "content[0].text",
25
+ "thinking_response_chunk_element": "delta.thinking",
26
+ "thinking_response_nonchunk_element": "content[0].thinking",
27
+ "special_request_schema": {
28
+ "anthropic_version": "bedrock-2023-05-31",
29
+ "anthropic_beta": ["output-128k-2025-02-19"],
30
+ },
31
+ "image_support": {
32
+ "max_image_size": 20971520, // 20MB
33
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
34
+ "max_images_per_request": 10
35
+ }
36
+ },
37
+ {
38
+ // ===============================
39
+ // == Claude 4.5 Opus Thinking ==
40
+ // ===============================
41
+ "modelName": "Claude-4-5-Opus-Thinking",
42
+ // "modelId": "anthropic.claude-opus-4-5-20251101-v1:0", // single-region (us-east-1, us-east-2, us-west-1, us-west-2 only)
43
+ "modelId": "global.anthropic.claude-opus-4-5-20251101-v1:0", // cross-region inference profile
44
+ "vision": true,
45
+ "messages_api": true,
46
+ "system_as_separate_field": true,
47
+ "display_role_names": true,
48
+ "max_tokens_param_name": "max_tokens",
49
+ "max_supported_response_tokens": 131072,
50
+ "stop_sequences_param_name": "stop_sequences",
51
+ "response_chunk_element": "delta.text",
52
+ "response_nonchunk_element": "content[0].text",
53
+ "thinking_response_chunk_element": "delta.thinking",
54
+ "thinking_response_nonchunk_element": "content[0].thinking",
55
+ "special_request_schema": {
56
+ "anthropic_version": "bedrock-2023-05-31",
57
+ "anthropic_beta": ["output-128k-2025-02-19"],
58
+ "thinking": {
59
+ "type": "enabled",
60
+ "budget_tokens": 16000
61
+ },
62
+ },
63
+ "image_support": {
64
+ "max_image_size": 20971520, // 20MB
65
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
66
+ "max_images_per_request": 10
67
+ }
68
+ },
9
69
  {
10
70
  // =====================
11
71
  // == Claude 4.1 Opus ==
@@ -191,8 +251,8 @@ export const bedrock_models = [
191
251
  // == Claude 4.5 Haiku ==
192
252
  // ======================
193
253
  "modelName": "Claude-4-5-Haiku",
194
- // "modelId": "anthropic.claude-haiku-4-5-20251001-v1:0",
195
- "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
254
+ // "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
255
+ "modelId": "global.anthropic.claude-haiku-4-5-20251001-v1:0",
196
256
  "vision": true,
197
257
  "messages_api": true,
198
258
  "system_as_separate_field": true,
@@ -219,8 +279,8 @@ export const bedrock_models = [
219
279
  // == Claude 4.5 Haiku Thinking ==
220
280
  // ===============================
221
281
  "modelName": "Claude-4-5-Haiku-Thinking",
222
- // "modelId": "anthropic.claude-haiku-4-5-20251001-v1:0",
223
- "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
282
+ // "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
283
+ "modelId": "global.anthropic.claude-haiku-4-5-20251001-v1:0",
224
284
  "vision": true,
225
285
  "messages_api": true,
226
286
  "system_as_separate_field": true,
@@ -369,7 +429,8 @@ export const bedrock_models = [
369
429
  // == Claude 3.5 Sonnet v2 ==
370
430
  // ==========================
371
431
  "modelName": "Claude-3-5-Sonnet-v2",
372
- "modelId": "anthropic.claude-3-5-sonnet-20241022-v2:0",
432
+ // "modelId": "anthropic.claude-3-5-sonnet-20241022-v2:0", // single-region
433
+ "modelId": "us.anthropic.claude-3-5-sonnet-20241022-v2:0", // cross-region inference profile
373
434
  "vision": true,
374
435
  "messages_api": true,
375
436
  "system_as_separate_field": true,
@@ -393,7 +454,8 @@ export const bedrock_models = [
393
454
  // == Claude 3.5 Sonnet ==
394
455
  // =======================
395
456
  "modelName": "Claude-3-5-Sonnet",
396
- "modelId": "anthropic.claude-3-5-sonnet-20240620-v1:0",
457
+ // "modelId": "anthropic.claude-3-5-sonnet-20240620-v1:0", // single-region
458
+ "modelId": "us.anthropic.claude-3-5-sonnet-20240620-v1:0", // cross-region inference profile
397
459
  "vision": true,
398
460
  "messages_api": true,
399
461
  "system_as_separate_field": true,
@@ -417,7 +479,8 @@ export const bedrock_models = [
417
479
  // == Claude 3.5 Haiku ==
418
480
  // ======================
419
481
  "modelName": "Claude-3-5-Haiku",
420
- "modelId": "anthropic.claude-3-5-haiku-20241022-v1:0",
482
+ // "modelId": "anthropic.claude-3-5-haiku-20241022-v1:0", // single-region
483
+ "modelId": "us.anthropic.claude-3-5-haiku-20241022-v1:0", // cross-region inference profile
421
484
  "vision": false,
422
485
  "messages_api": true,
423
486
  "system_as_separate_field": true,
@@ -436,7 +499,8 @@ export const bedrock_models = [
436
499
  // == Claude 3 Haiku ==
437
500
  // ====================
438
501
  "modelName": "Claude-3-Haiku",
439
- "modelId": "anthropic.claude-3-haiku-20240307-v1:0",
502
+ // "modelId": "anthropic.claude-3-haiku-20240307-v1:0", // single-region
503
+ "modelId": "us.anthropic.claude-3-haiku-20240307-v1:0", // cross-region inference profile
440
504
  "vision": true,
441
505
  "messages_api": true,
442
506
  "system_as_separate_field": true,
@@ -600,7 +664,8 @@ export const bedrock_models = [
600
664
  // == Llama 3.1 8b ==
601
665
  // ==================
602
666
  "modelName": "Llama-3-1-8b",
603
- "modelId": "meta.llama3-1-8b-instruct-v1:0",
667
+ // "modelId": "meta.llama3-1-8b-instruct-v1:0", // single-region
668
+ "modelId": "us.meta.llama3-1-8b-instruct-v1:0", // cross-region inference profile
604
669
  "vision": false,
605
670
  "messages_api": false,
606
671
  "bos_text": "<|begin_of_text|>",
@@ -627,7 +692,8 @@ export const bedrock_models = [
627
692
  // == Llama 3.1 70b ==
628
693
  // ===================
629
694
  "modelName": "Llama-3-1-70b",
630
- "modelId": "meta.llama3-1-70b-instruct-v1:0",
695
+ // "modelId": "meta.llama3-1-70b-instruct-v1:0", // single-region
696
+ "modelId": "us.meta.llama3-1-70b-instruct-v1:0", // cross-region inference profile
631
697
  "vision": false,
632
698
  "messages_api": false,
633
699
  "bos_text": "<|begin_of_text|>",
@@ -778,6 +844,30 @@ export const bedrock_models = [
778
844
  "max_images_per_request": 10
779
845
  }
780
846
  },
847
+ {
848
+ // ==================
849
+ // == Nova 2 Lite ==
850
+ // ==================
851
+ "modelName": "Nova-2-Lite",
852
+ "modelId": "us.amazon.nova-2-lite-v1:0",
853
+ "vision": true,
854
+ "messages_api": true,
855
+ "system_as_separate_field": true,
856
+ "display_role_names": true,
857
+ "max_tokens_param_name": "maxTokens",
858
+ "max_supported_response_tokens": 5000,
859
+ "stop_sequences_param_name": "stopSequences",
860
+ "response_chunk_element": "contentBlockDelta.delta.text",
861
+ "response_nonchunk_element": "output.message.content[0].text",
862
+ "special_request_schema": {
863
+ "schemaVersion": "messages-v1"
864
+ },
865
+ "image_support": {
866
+ "max_image_size": 5242880,
867
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
868
+ "max_images_per_request": 10
869
+ }
870
+ },
781
871
  {
782
872
  // =================
783
873
  // == Nova Micro ==
@@ -960,6 +1050,117 @@ export const bedrock_models = [
960
1050
  "stop_sequences_param_name": "stop",
961
1051
  "response_chunk_element": "outputs[0].text"
962
1052
  },
1053
+ {
1054
+ // =====================
1055
+ // == Mistral Large 3 ==
1056
+ // =====================
1057
+ "modelName": "Mistral-Large-3",
1058
+ "modelId": "mistral.mistral-large-3-675b-instruct",
1059
+ "vision": true,
1060
+ "messages_api": true,
1061
+ "system_as_separate_field": false,
1062
+ "display_role_names": true,
1063
+ "skip_empty_assistant_message": true,
1064
+ "max_tokens_param_name": "max_tokens",
1065
+ "max_supported_response_tokens": 32768,
1066
+ "stop_sequences_param_name": "stop",
1067
+ "response_chunk_element": "choices[0].delta.content",
1068
+ "response_nonchunk_element": "choices[0].message.content",
1069
+ "special_request_schema": {},
1070
+ "image_support": {
1071
+ "max_image_size": 20971520,
1072
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1073
+ "max_images_per_request": 10
1074
+ }
1075
+ },
1076
+ {
1077
+ // ====================
1078
+ // == Ministral 3 3b ==
1079
+ // ====================
1080
+ "modelName": "Ministral-3-3b",
1081
+ "modelId": "mistral.ministral-3-3b-instruct",
1082
+ "vision": true,
1083
+ "messages_api": true,
1084
+ "system_as_separate_field": false,
1085
+ "display_role_names": true,
1086
+ "skip_empty_assistant_message": true,
1087
+ "max_tokens_param_name": "max_tokens",
1088
+ "max_supported_response_tokens": 8192,
1089
+ "stop_sequences_param_name": "stop",
1090
+ "response_chunk_element": "choices[0].delta.content",
1091
+ "response_nonchunk_element": "choices[0].message.content",
1092
+ "special_request_schema": {},
1093
+ "image_support": {
1094
+ "max_image_size": 20971520,
1095
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1096
+ "max_images_per_request": 10
1097
+ }
1098
+ },
1099
+ {
1100
+ // ====================
1101
+ // == Ministral 3 8b ==
1102
+ // ====================
1103
+ "modelName": "Ministral-3-8b",
1104
+ "modelId": "mistral.ministral-3-8b-instruct",
1105
+ "vision": true,
1106
+ "messages_api": true,
1107
+ "system_as_separate_field": false,
1108
+ "display_role_names": true,
1109
+ "skip_empty_assistant_message": true,
1110
+ "max_tokens_param_name": "max_tokens",
1111
+ "max_supported_response_tokens": 8192,
1112
+ "stop_sequences_param_name": "stop",
1113
+ "response_chunk_element": "choices[0].delta.content",
1114
+ "response_nonchunk_element": "choices[0].message.content",
1115
+ "special_request_schema": {},
1116
+ "image_support": {
1117
+ "max_image_size": 20971520,
1118
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1119
+ "max_images_per_request": 10
1120
+ }
1121
+ },
1122
+ {
1123
+ // =====================
1124
+ // == Ministral 3 14b ==
1125
+ // =====================
1126
+ "modelName": "Ministral-3-14b",
1127
+ "modelId": "mistral.ministral-3-14b-instruct",
1128
+ "vision": true,
1129
+ "messages_api": true,
1130
+ "system_as_separate_field": false,
1131
+ "display_role_names": true,
1132
+ "skip_empty_assistant_message": true,
1133
+ "max_tokens_param_name": "max_tokens",
1134
+ "max_supported_response_tokens": 16384,
1135
+ "stop_sequences_param_name": "stop",
1136
+ "response_chunk_element": "choices[0].delta.content",
1137
+ "response_nonchunk_element": "choices[0].message.content",
1138
+ "special_request_schema": {},
1139
+ "image_support": {
1140
+ "max_image_size": 20971520,
1141
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1142
+ "max_images_per_request": 10
1143
+ }
1144
+ },
1145
+ {
1146
+ // ==========================
1147
+ // == Magistral Small 2509 ==
1148
+ // ==========================
1149
+ "modelName": "Magistral-Small-2509",
1150
+ "modelId": "mistral.magistral-small-2509",
1151
+ "vision": false,
1152
+ "messages_api": true,
1153
+ "converse_api_only": true,
1154
+ "system_as_separate_field": false,
1155
+ "display_role_names": true,
1156
+ "skip_empty_assistant_message": true,
1157
+ "max_tokens_param_name": "max_tokens",
1158
+ "max_supported_response_tokens": 8192,
1159
+ "stop_sequences_param_name": "stop",
1160
+ "response_chunk_element": "choices[0].delta.content",
1161
+ "response_nonchunk_element": "choices[0].message.content",
1162
+ "special_request_schema": {}
1163
+ },
963
1164
  {
964
1165
  // ================
965
1166
  // == Qwen3-32B ==
@@ -1028,6 +1229,23 @@ export const bedrock_models = [
1028
1229
  "response_nonchunk_element": "choices[0].message.content",
1029
1230
  "special_request_schema": {}
1030
1231
  },
1232
+ {
1233
+ // =========================
1234
+ // == Qwen3-Next-80B-A3B ==
1235
+ // =========================
1236
+ "modelName": "Qwen3-Next-80B-A3B",
1237
+ "modelId": "qwen.qwen3-next-80b-a3b",
1238
+ "vision": false,
1239
+ "messages_api": true,
1240
+ "system_as_separate_field": false,
1241
+ "display_role_names": true,
1242
+ "max_tokens_param_name": "max_tokens",
1243
+ "max_supported_response_tokens": 32768,
1244
+ "stop_sequences_param_name": "stop",
1245
+ "response_chunk_element": "choices[0].delta.content",
1246
+ "response_nonchunk_element": "choices[0].message.content",
1247
+ "special_request_schema": {}
1248
+ },
1031
1249
  {
1032
1250
  // =================
1033
1251
  // == DeepSeek-R1 ==
@@ -1076,4 +1294,128 @@ export const bedrock_models = [
1076
1294
  "response_nonchunk_element": "choices[0].message.content",
1077
1295
  "special_request_schema": {}
1078
1296
  },
1079
- ];
1297
+ {
1298
+ // ========================================
1299
+ // == Google Gemma Models ==
1300
+ // ========================================
1301
+ // == Gemma-3-4b ==
1302
+ // ================
1303
+ "modelName": "Gemma-3-4b",
1304
+ "modelId": "google.gemma-3-4b-it",
1305
+ "vision": true,
1306
+ "messages_api": true,
1307
+ "system_as_separate_field": false,
1308
+ "display_role_names": true,
1309
+ "max_tokens_param_name": "max_tokens",
1310
+ "max_supported_response_tokens": 8192,
1311
+ "stop_sequences_param_name": "stop",
1312
+ "response_chunk_element": "choices[0].delta.content",
1313
+ "response_nonchunk_element": "choices[0].message.content",
1314
+ "special_request_schema": {},
1315
+ "image_support": {
1316
+ "max_image_size": 20971520,
1317
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1318
+ "max_images_per_request": 10
1319
+ }
1320
+ },
1321
+ {
1322
+ // =================
1323
+ // == Gemma-3-12b ==
1324
+ // =================
1325
+ "modelName": "Gemma-3-12b",
1326
+ "modelId": "google.gemma-3-12b-it",
1327
+ "vision": true,
1328
+ "messages_api": true,
1329
+ "system_as_separate_field": false,
1330
+ "display_role_names": true,
1331
+ "max_tokens_param_name": "max_tokens",
1332
+ "max_supported_response_tokens": 8192,
1333
+ "stop_sequences_param_name": "stop",
1334
+ "response_chunk_element": "choices[0].delta.content",
1335
+ "response_nonchunk_element": "choices[0].message.content",
1336
+ "special_request_schema": {},
1337
+ "image_support": {
1338
+ "max_image_size": 20971520,
1339
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1340
+ "max_images_per_request": 10
1341
+ }
1342
+ },
1343
+ {
1344
+ // =================
1345
+ // == Gemma-3-27b ==
1346
+ // =================
1347
+ "modelName": "Gemma-3-27b",
1348
+ "modelId": "google.gemma-3-27b-it",
1349
+ "vision": true,
1350
+ "messages_api": true,
1351
+ "system_as_separate_field": false,
1352
+ "display_role_names": true,
1353
+ "max_tokens_param_name": "max_tokens",
1354
+ "max_supported_response_tokens": 8192,
1355
+ "stop_sequences_param_name": "stop",
1356
+ "response_chunk_element": "choices[0].delta.content",
1357
+ "response_nonchunk_element": "choices[0].message.content",
1358
+ "special_request_schema": {},
1359
+ "image_support": {
1360
+ "max_image_size": 20971520,
1361
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1362
+ "max_images_per_request": 10
1363
+ }
1364
+ },
1365
+ {
1366
+ // ========================================
1367
+ // == Moonshot AI Kimi Models ==
1368
+ // ========================================
1369
+ // == Kimi-K2 ==
1370
+ // =============
1371
+ "modelName": "Kimi-K2",
1372
+ "modelId": "moonshot.kimi-k2-thinking",
1373
+ "vision": false,
1374
+ "messages_api": true,
1375
+ "system_as_separate_field": false,
1376
+ "display_role_names": true,
1377
+ "max_tokens_param_name": "max_tokens",
1378
+ "max_supported_response_tokens": 32768,
1379
+ "stop_sequences_param_name": "stop",
1380
+ "response_chunk_element": "choices[0].delta.content",
1381
+ "response_nonchunk_element": "choices[0].message.content",
1382
+ "special_request_schema": {}
1383
+ },
1384
+ {
1385
+ // ====================
1386
+ // == Kimi-K2-Thinking ==
1387
+ // ====================
1388
+ "modelName": "Kimi-K2-Thinking",
1389
+ "modelId": "moonshot.kimi-k2-thinking",
1390
+ "vision": false,
1391
+ "messages_api": true,
1392
+ "system_as_separate_field": false,
1393
+ "display_role_names": true,
1394
+ "max_tokens_param_name": "max_tokens",
1395
+ "max_supported_response_tokens": 32768,
1396
+ "stop_sequences_param_name": "stop",
1397
+ "response_chunk_element": "choices[0].delta.content",
1398
+ "response_nonchunk_element": "choices[0].message.content",
1399
+ "preserve_reasoning": true,
1400
+ "special_request_schema": {}
1401
+ },
1402
+ {
1403
+ // ========================================
1404
+ // == MiniMax Models ==
1405
+ // ========================================
1406
+ // == MiniMax-M2 ==
1407
+ // ================
1408
+ "modelName": "MiniMax-M2",
1409
+ "modelId": "minimax.minimax-m2",
1410
+ "vision": false,
1411
+ "messages_api": true,
1412
+ "system_as_separate_field": false,
1413
+ "display_role_names": true,
1414
+ "max_tokens_param_name": "max_tokens",
1415
+ "max_supported_response_tokens": 32768,
1416
+ "stop_sequences_param_name": "stop",
1417
+ "response_chunk_element": "choices[0].delta.content",
1418
+ "response_nonchunk_element": "choices[0].message.content",
1419
+ "special_request_schema": {}
1420
+ },
1421
+ ];
@@ -222,7 +222,7 @@ async function processMessagesForInvoke(messages, awsModel) {
222
222
  message_cleaned.push(messages[i]);
223
223
  }
224
224
 
225
- if (i === (messages.length - 1) && messages[i].content !== "" && awsModel.display_role_names) {
225
+ if (i === (messages.length - 1) && messages[i].content !== "" && awsModel.display_role_names && !awsModel.skip_empty_assistant_message) {
226
226
  message_cleaned.push({role: "assistant", content: ""});
227
227
  }
228
228
  }
@@ -610,7 +610,8 @@ export async function* bedrockWrapper(awsCreds, openaiChatCompletionsCreateObjec
610
610
  const reasoningContent = event.contentBlockDelta.delta?.reasoningContent;
611
611
 
612
612
  // Handle Claude thinking data (streaming) - check both reasoningContent and thinking
613
- const thinkingText = reasoningContent?.reasoningText?.text || thinking;
613
+ // Note: streaming has delta.reasoningContent.text, non-streaming has reasoningContent.reasoningText.text
614
+ const thinkingText = reasoningContent?.reasoningText?.text || reasoningContent?.text || thinking;
614
615
  if (should_think && thinkingText) {
615
616
  if (!is_thinking) {
616
617
  is_thinking = true;
@@ -633,6 +634,11 @@ export async function* bedrockWrapper(awsCreds, openaiChatCompletionsCreateObjec
633
634
  }
634
635
  }
635
636
  }
637
+ // Fallback: if no text but reasoningContent exists and not in thinking mode,
638
+ // yield the reasoning content as the actual response (for models like Magistral)
639
+ else if (!should_think && thinkingText) {
640
+ yield thinkingText;
641
+ }
636
642
  }
637
643
  }
638
644
 
@@ -655,25 +661,35 @@ export async function* bedrockWrapper(awsCreds, openaiChatCompletionsCreateObjec
655
661
 
656
662
  for (const contentBlock of response.output.message.content) {
657
663
  // Extract thinking data for Claude models (from reasoningContent)
658
- if (include_thinking_data && contentBlock.reasoningContent &&
664
+ if (include_thinking_data && contentBlock.reasoningContent &&
659
665
  awsModel.special_request_schema?.thinking?.type === "enabled") {
660
666
  const reasoningText = contentBlock.reasoningContent.reasoningText?.text;
661
667
  if (reasoningText) {
662
668
  thinking_result += reasoningText;
663
669
  }
664
670
  }
665
-
671
+
666
672
  // Also check for legacy thinking field format
667
- if (include_thinking_data && contentBlock.thinking &&
673
+ if (include_thinking_data && contentBlock.thinking &&
668
674
  awsModel.special_request_schema?.thinking?.type === "enabled") {
669
675
  thinking_result += contentBlock.thinking;
670
676
  }
671
-
677
+
672
678
  // Extract regular text content
673
679
  if (contentBlock.text) {
674
680
  text_result += contentBlock.text;
675
681
  }
676
682
  }
683
+
684
+ // Fallback: if no regular text but reasoningContent exists (for models like Magistral),
685
+ // extract the reasoning text as the actual response
686
+ if (!text_result) {
687
+ for (const contentBlock of response.output.message.content) {
688
+ if (contentBlock.reasoningContent?.reasoningText?.text) {
689
+ text_result += contentBlock.reasoningContent.reasoningText.text;
690
+ }
691
+ }
692
+ }
677
693
 
678
694
  // Process reasoning tags for GPT-OSS models
679
695
  text_result = processReasoningTags(text_result, awsModel);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bedrock-wrapper",
3
- "version": "2.7.2",
3
+ "version": "2.8.0",
4
4
  "description": "🪨 Bedrock Wrapper is an npm package that simplifies the integration of existing OpenAI-compatible API objects with AWS Bedrock's serverless inference LLMs.",
5
5
  "homepage": "https://www.equilllabs.com/projects/bedrock-wrapper",
6
6
  "repository": {
@@ -41,7 +41,7 @@
41
41
  "author": "",
42
42
  "license": "ISC",
43
43
  "dependencies": {
44
- "@aws-sdk/client-bedrock-runtime": "^3.934.0",
44
+ "@aws-sdk/client-bedrock-runtime": "^3.943.0",
45
45
  "dotenv": "^17.2.3",
46
46
  "sharp": "^0.34.5"
47
47
  },