bedrock-wrapper 2.7.2 → 2.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bedrock-models.js CHANGED
@@ -6,6 +6,66 @@
6
6
  // https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/cross-region-inference
7
7
 
8
8
  export const bedrock_models = [
9
+ {
10
+ // ======================
11
+ // == Claude 4.5 Opus ==
12
+ // ======================
13
+ "modelName": "Claude-4-5-Opus",
14
+ // "modelId": "anthropic.claude-opus-4-5-20251101-v1:0", // single-region (us-east-1, us-east-2, us-west-1, us-west-2 only)
15
+ "modelId": "global.anthropic.claude-opus-4-5-20251101-v1:0", // cross-region inference profile
16
+ "vision": true,
17
+ "messages_api": true,
18
+ "system_as_separate_field": true,
19
+ "display_role_names": true,
20
+ "max_tokens_param_name": "max_tokens",
21
+ "max_supported_response_tokens": 131072,
22
+ "stop_sequences_param_name": "stop_sequences",
23
+ "response_chunk_element": "delta.text",
24
+ "response_nonchunk_element": "content[0].text",
25
+ "thinking_response_chunk_element": "delta.thinking",
26
+ "thinking_response_nonchunk_element": "content[0].thinking",
27
+ "special_request_schema": {
28
+ "anthropic_version": "bedrock-2023-05-31",
29
+ "anthropic_beta": ["output-128k-2025-02-19"],
30
+ },
31
+ "image_support": {
32
+ "max_image_size": 20971520, // 20MB
33
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
34
+ "max_images_per_request": 10
35
+ }
36
+ },
37
+ {
38
+ // ===============================
39
+ // == Claude 4.5 Opus Thinking ==
40
+ // ===============================
41
+ "modelName": "Claude-4-5-Opus-Thinking",
42
+ // "modelId": "anthropic.claude-opus-4-5-20251101-v1:0", // single-region (us-east-1, us-east-2, us-west-1, us-west-2 only)
43
+ "modelId": "global.anthropic.claude-opus-4-5-20251101-v1:0", // cross-region inference profile
44
+ "vision": true,
45
+ "messages_api": true,
46
+ "system_as_separate_field": true,
47
+ "display_role_names": true,
48
+ "max_tokens_param_name": "max_tokens",
49
+ "max_supported_response_tokens": 131072,
50
+ "stop_sequences_param_name": "stop_sequences",
51
+ "response_chunk_element": "delta.text",
52
+ "response_nonchunk_element": "content[0].text",
53
+ "thinking_response_chunk_element": "delta.thinking",
54
+ "thinking_response_nonchunk_element": "content[0].thinking",
55
+ "special_request_schema": {
56
+ "anthropic_version": "bedrock-2023-05-31",
57
+ "anthropic_beta": ["output-128k-2025-02-19"],
58
+ "thinking": {
59
+ "type": "enabled",
60
+ "budget_tokens": 16000
61
+ },
62
+ },
63
+ "image_support": {
64
+ "max_image_size": 20971520, // 20MB
65
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
66
+ "max_images_per_request": 10
67
+ }
68
+ },
9
69
  {
10
70
  // =====================
11
71
  // == Claude 4.1 Opus ==
@@ -191,8 +251,8 @@ export const bedrock_models = [
191
251
  // == Claude 4.5 Haiku ==
192
252
  // ======================
193
253
  "modelName": "Claude-4-5-Haiku",
194
- // "modelId": "anthropic.claude-haiku-4-5-20251001-v1:0",
195
- "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
254
+ // "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
255
+ "modelId": "global.anthropic.claude-haiku-4-5-20251001-v1:0",
196
256
  "vision": true,
197
257
  "messages_api": true,
198
258
  "system_as_separate_field": true,
@@ -219,8 +279,8 @@ export const bedrock_models = [
219
279
  // == Claude 4.5 Haiku Thinking ==
220
280
  // ===============================
221
281
  "modelName": "Claude-4-5-Haiku-Thinking",
222
- // "modelId": "anthropic.claude-haiku-4-5-20251001-v1:0",
223
- "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
282
+ // "modelId": "us.anthropic.claude-haiku-4-5-20251001-v1:0",
283
+ "modelId": "global.anthropic.claude-haiku-4-5-20251001-v1:0",
224
284
  "vision": true,
225
285
  "messages_api": true,
226
286
  "system_as_separate_field": true,
@@ -369,7 +429,8 @@ export const bedrock_models = [
369
429
  // == Claude 3.5 Sonnet v2 ==
370
430
  // ==========================
371
431
  "modelName": "Claude-3-5-Sonnet-v2",
372
- "modelId": "anthropic.claude-3-5-sonnet-20241022-v2:0",
432
+ // "modelId": "anthropic.claude-3-5-sonnet-20241022-v2:0", // single-region
433
+ "modelId": "us.anthropic.claude-3-5-sonnet-20241022-v2:0", // cross-region inference profile
373
434
  "vision": true,
374
435
  "messages_api": true,
375
436
  "system_as_separate_field": true,
@@ -393,7 +454,8 @@ export const bedrock_models = [
393
454
  // == Claude 3.5 Sonnet ==
394
455
  // =======================
395
456
  "modelName": "Claude-3-5-Sonnet",
396
- "modelId": "anthropic.claude-3-5-sonnet-20240620-v1:0",
457
+ // "modelId": "anthropic.claude-3-5-sonnet-20240620-v1:0", // single-region
458
+ "modelId": "us.anthropic.claude-3-5-sonnet-20240620-v1:0", // cross-region inference profile
397
459
  "vision": true,
398
460
  "messages_api": true,
399
461
  "system_as_separate_field": true,
@@ -417,7 +479,8 @@ export const bedrock_models = [
417
479
  // == Claude 3.5 Haiku ==
418
480
  // ======================
419
481
  "modelName": "Claude-3-5-Haiku",
420
- "modelId": "anthropic.claude-3-5-haiku-20241022-v1:0",
482
+ // "modelId": "anthropic.claude-3-5-haiku-20241022-v1:0", // single-region
483
+ "modelId": "us.anthropic.claude-3-5-haiku-20241022-v1:0", // cross-region inference profile
421
484
  "vision": false,
422
485
  "messages_api": true,
423
486
  "system_as_separate_field": true,
@@ -436,7 +499,8 @@ export const bedrock_models = [
436
499
  // == Claude 3 Haiku ==
437
500
  // ====================
438
501
  "modelName": "Claude-3-Haiku",
439
- "modelId": "anthropic.claude-3-haiku-20240307-v1:0",
502
+ // "modelId": "anthropic.claude-3-haiku-20240307-v1:0", // single-region
503
+ "modelId": "us.anthropic.claude-3-haiku-20240307-v1:0", // cross-region inference profile
440
504
  "vision": true,
441
505
  "messages_api": true,
442
506
  "system_as_separate_field": true,
@@ -483,6 +547,62 @@ export const bedrock_models = [
483
547
  "max_supported_response_tokens": 2048,
484
548
  "response_chunk_element": "generation"
485
549
  },
550
+ {
551
+ // ======================
552
+ // == Llama 4 Scout 17b ==
553
+ // ======================
554
+ "modelName": "Llama-4-Scout-17b",
555
+ // "modelId": "meta.llama4-scout-17b-instruct-v1:0",
556
+ "modelId": "us.meta.llama4-scout-17b-instruct-v1:0",
557
+ "vision": true,
558
+ "messages_api": false,
559
+ "bos_text": "<|begin_of_text|>",
560
+ "role_system_message_prefix": "",
561
+ "role_system_message_suffix": "",
562
+ "role_system_prefix": "<|start_header_id|>",
563
+ "role_system_suffix": "<|end_header_id|>",
564
+ "role_user_message_prefix": "",
565
+ "role_user_message_suffix": "",
566
+ "role_user_prefix": "<|start_header_id|>",
567
+ "role_user_suffix": "<|end_header_id|>",
568
+ "role_assistant_message_prefix": "",
569
+ "role_assistant_message_suffix": "",
570
+ "role_assistant_prefix": "<|start_header_id|>",
571
+ "role_assistant_suffix": "<|end_header_id|>",
572
+ "eom_text": "<|eot_id|>",
573
+ "display_role_names": true,
574
+ "max_tokens_param_name": "max_gen_len",
575
+ "max_supported_response_tokens": 2048,
576
+ "response_chunk_element": "generation"
577
+ },
578
+ {
579
+ // ========================
580
+ // == Llama 4 Maverick 17b ==
581
+ // ========================
582
+ "modelName": "Llama-4-Maverick-17b",
583
+ // "modelId": "meta.llama4-maverick-17b-instruct-v1:0",
584
+ "modelId": "us.meta.llama4-maverick-17b-instruct-v1:0",
585
+ "vision": true,
586
+ "messages_api": false,
587
+ "bos_text": "<|begin_of_text|>",
588
+ "role_system_message_prefix": "",
589
+ "role_system_message_suffix": "",
590
+ "role_system_prefix": "<|start_header_id|>",
591
+ "role_system_suffix": "<|end_header_id|>",
592
+ "role_user_message_prefix": "",
593
+ "role_user_message_suffix": "",
594
+ "role_user_prefix": "<|start_header_id|>",
595
+ "role_user_suffix": "<|end_header_id|>",
596
+ "role_assistant_message_prefix": "",
597
+ "role_assistant_message_suffix": "",
598
+ "role_assistant_prefix": "<|start_header_id|>",
599
+ "role_assistant_suffix": "<|end_header_id|>",
600
+ "eom_text": "<|eot_id|>",
601
+ "display_role_names": true,
602
+ "max_tokens_param_name": "max_gen_len",
603
+ "max_supported_response_tokens": 2048,
604
+ "response_chunk_element": "generation"
605
+ },
486
606
  {
487
607
  // ==================
488
608
  // == Llama 3.2 1b ==
@@ -600,7 +720,8 @@ export const bedrock_models = [
600
720
  // == Llama 3.1 8b ==
601
721
  // ==================
602
722
  "modelName": "Llama-3-1-8b",
603
- "modelId": "meta.llama3-1-8b-instruct-v1:0",
723
+ // "modelId": "meta.llama3-1-8b-instruct-v1:0", // single-region
724
+ "modelId": "us.meta.llama3-1-8b-instruct-v1:0", // cross-region inference profile
604
725
  "vision": false,
605
726
  "messages_api": false,
606
727
  "bos_text": "<|begin_of_text|>",
@@ -627,7 +748,8 @@ export const bedrock_models = [
627
748
  // == Llama 3.1 70b ==
628
749
  // ===================
629
750
  "modelName": "Llama-3-1-70b",
630
- "modelId": "meta.llama3-1-70b-instruct-v1:0",
751
+ // "modelId": "meta.llama3-1-70b-instruct-v1:0", // single-region
752
+ "modelId": "us.meta.llama3-1-70b-instruct-v1:0", // cross-region inference profile
631
753
  "vision": false,
632
754
  "messages_api": false,
633
755
  "bos_text": "<|begin_of_text|>",
@@ -778,6 +900,30 @@ export const bedrock_models = [
778
900
  "max_images_per_request": 10
779
901
  }
780
902
  },
903
+ {
904
+ // ==================
905
+ // == Nova 2 Lite ==
906
+ // ==================
907
+ "modelName": "Nova-2-Lite",
908
+ "modelId": "us.amazon.nova-2-lite-v1:0",
909
+ "vision": true,
910
+ "messages_api": true,
911
+ "system_as_separate_field": true,
912
+ "display_role_names": true,
913
+ "max_tokens_param_name": "maxTokens",
914
+ "max_supported_response_tokens": 5000,
915
+ "stop_sequences_param_name": "stopSequences",
916
+ "response_chunk_element": "contentBlockDelta.delta.text",
917
+ "response_nonchunk_element": "output.message.content[0].text",
918
+ "special_request_schema": {
919
+ "schemaVersion": "messages-v1"
920
+ },
921
+ "image_support": {
922
+ "max_image_size": 5242880,
923
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
924
+ "max_images_per_request": 10
925
+ }
926
+ },
781
927
  {
782
928
  // =================
783
929
  // == Nova Micro ==
@@ -960,6 +1106,117 @@ export const bedrock_models = [
960
1106
  "stop_sequences_param_name": "stop",
961
1107
  "response_chunk_element": "outputs[0].text"
962
1108
  },
1109
+ {
1110
+ // =====================
1111
+ // == Mistral Large 3 ==
1112
+ // =====================
1113
+ "modelName": "Mistral-Large-3",
1114
+ "modelId": "mistral.mistral-large-3-675b-instruct",
1115
+ "vision": true,
1116
+ "messages_api": true,
1117
+ "system_as_separate_field": false,
1118
+ "display_role_names": true,
1119
+ "skip_empty_assistant_message": true,
1120
+ "max_tokens_param_name": "max_tokens",
1121
+ "max_supported_response_tokens": 32768,
1122
+ "stop_sequences_param_name": "stop",
1123
+ "response_chunk_element": "choices[0].delta.content",
1124
+ "response_nonchunk_element": "choices[0].message.content",
1125
+ "special_request_schema": {},
1126
+ "image_support": {
1127
+ "max_image_size": 20971520,
1128
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1129
+ "max_images_per_request": 10
1130
+ }
1131
+ },
1132
+ {
1133
+ // ====================
1134
+ // == Ministral 3 3b ==
1135
+ // ====================
1136
+ "modelName": "Ministral-3-3b",
1137
+ "modelId": "mistral.ministral-3-3b-instruct",
1138
+ "vision": true,
1139
+ "messages_api": true,
1140
+ "system_as_separate_field": false,
1141
+ "display_role_names": true,
1142
+ "skip_empty_assistant_message": true,
1143
+ "max_tokens_param_name": "max_tokens",
1144
+ "max_supported_response_tokens": 8192,
1145
+ "stop_sequences_param_name": "stop",
1146
+ "response_chunk_element": "choices[0].delta.content",
1147
+ "response_nonchunk_element": "choices[0].message.content",
1148
+ "special_request_schema": {},
1149
+ "image_support": {
1150
+ "max_image_size": 20971520,
1151
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1152
+ "max_images_per_request": 10
1153
+ }
1154
+ },
1155
+ {
1156
+ // ====================
1157
+ // == Ministral 3 8b ==
1158
+ // ====================
1159
+ "modelName": "Ministral-3-8b",
1160
+ "modelId": "mistral.ministral-3-8b-instruct",
1161
+ "vision": true,
1162
+ "messages_api": true,
1163
+ "system_as_separate_field": false,
1164
+ "display_role_names": true,
1165
+ "skip_empty_assistant_message": true,
1166
+ "max_tokens_param_name": "max_tokens",
1167
+ "max_supported_response_tokens": 8192,
1168
+ "stop_sequences_param_name": "stop",
1169
+ "response_chunk_element": "choices[0].delta.content",
1170
+ "response_nonchunk_element": "choices[0].message.content",
1171
+ "special_request_schema": {},
1172
+ "image_support": {
1173
+ "max_image_size": 20971520,
1174
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1175
+ "max_images_per_request": 10
1176
+ }
1177
+ },
1178
+ {
1179
+ // =====================
1180
+ // == Ministral 3 14b ==
1181
+ // =====================
1182
+ "modelName": "Ministral-3-14b",
1183
+ "modelId": "mistral.ministral-3-14b-instruct",
1184
+ "vision": true,
1185
+ "messages_api": true,
1186
+ "system_as_separate_field": false,
1187
+ "display_role_names": true,
1188
+ "skip_empty_assistant_message": true,
1189
+ "max_tokens_param_name": "max_tokens",
1190
+ "max_supported_response_tokens": 16384,
1191
+ "stop_sequences_param_name": "stop",
1192
+ "response_chunk_element": "choices[0].delta.content",
1193
+ "response_nonchunk_element": "choices[0].message.content",
1194
+ "special_request_schema": {},
1195
+ "image_support": {
1196
+ "max_image_size": 20971520,
1197
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1198
+ "max_images_per_request": 10
1199
+ }
1200
+ },
1201
+ {
1202
+ // ==========================
1203
+ // == Magistral Small 2509 ==
1204
+ // ==========================
1205
+ "modelName": "Magistral-Small-2509",
1206
+ "modelId": "mistral.magistral-small-2509",
1207
+ "vision": false,
1208
+ "messages_api": true,
1209
+ "converse_api_only": true,
1210
+ "system_as_separate_field": false,
1211
+ "display_role_names": true,
1212
+ "skip_empty_assistant_message": true,
1213
+ "max_tokens_param_name": "max_tokens",
1214
+ "max_supported_response_tokens": 8192,
1215
+ "stop_sequences_param_name": "stop",
1216
+ "response_chunk_element": "choices[0].delta.content",
1217
+ "response_nonchunk_element": "choices[0].message.content",
1218
+ "special_request_schema": {}
1219
+ },
963
1220
  {
964
1221
  // ================
965
1222
  // == Qwen3-32B ==
@@ -1028,6 +1285,23 @@ export const bedrock_models = [
1028
1285
  "response_nonchunk_element": "choices[0].message.content",
1029
1286
  "special_request_schema": {}
1030
1287
  },
1288
+ {
1289
+ // =========================
1290
+ // == Qwen3-Next-80B-A3B ==
1291
+ // =========================
1292
+ "modelName": "Qwen3-Next-80B-A3B",
1293
+ "modelId": "qwen.qwen3-next-80b-a3b",
1294
+ "vision": false,
1295
+ "messages_api": true,
1296
+ "system_as_separate_field": false,
1297
+ "display_role_names": true,
1298
+ "max_tokens_param_name": "max_tokens",
1299
+ "max_supported_response_tokens": 32768,
1300
+ "stop_sequences_param_name": "stop",
1301
+ "response_chunk_element": "choices[0].delta.content",
1302
+ "response_nonchunk_element": "choices[0].message.content",
1303
+ "special_request_schema": {}
1304
+ },
1031
1305
  {
1032
1306
  // =================
1033
1307
  // == DeepSeek-R1 ==
@@ -1076,4 +1350,128 @@ export const bedrock_models = [
1076
1350
  "response_nonchunk_element": "choices[0].message.content",
1077
1351
  "special_request_schema": {}
1078
1352
  },
1079
- ];
1353
+ {
1354
+ // ========================================
1355
+ // == Google Gemma Models ==
1356
+ // ========================================
1357
+ // == Gemma-3-4b ==
1358
+ // ================
1359
+ "modelName": "Gemma-3-4b",
1360
+ "modelId": "google.gemma-3-4b-it",
1361
+ "vision": true,
1362
+ "messages_api": true,
1363
+ "system_as_separate_field": false,
1364
+ "display_role_names": true,
1365
+ "max_tokens_param_name": "max_tokens",
1366
+ "max_supported_response_tokens": 8192,
1367
+ "stop_sequences_param_name": "stop",
1368
+ "response_chunk_element": "choices[0].delta.content",
1369
+ "response_nonchunk_element": "choices[0].message.content",
1370
+ "special_request_schema": {},
1371
+ "image_support": {
1372
+ "max_image_size": 20971520,
1373
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1374
+ "max_images_per_request": 10
1375
+ }
1376
+ },
1377
+ {
1378
+ // =================
1379
+ // == Gemma-3-12b ==
1380
+ // =================
1381
+ "modelName": "Gemma-3-12b",
1382
+ "modelId": "google.gemma-3-12b-it",
1383
+ "vision": true,
1384
+ "messages_api": true,
1385
+ "system_as_separate_field": false,
1386
+ "display_role_names": true,
1387
+ "max_tokens_param_name": "max_tokens",
1388
+ "max_supported_response_tokens": 8192,
1389
+ "stop_sequences_param_name": "stop",
1390
+ "response_chunk_element": "choices[0].delta.content",
1391
+ "response_nonchunk_element": "choices[0].message.content",
1392
+ "special_request_schema": {},
1393
+ "image_support": {
1394
+ "max_image_size": 20971520,
1395
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1396
+ "max_images_per_request": 10
1397
+ }
1398
+ },
1399
+ {
1400
+ // =================
1401
+ // == Gemma-3-27b ==
1402
+ // =================
1403
+ "modelName": "Gemma-3-27b",
1404
+ "modelId": "google.gemma-3-27b-it",
1405
+ "vision": true,
1406
+ "messages_api": true,
1407
+ "system_as_separate_field": false,
1408
+ "display_role_names": true,
1409
+ "max_tokens_param_name": "max_tokens",
1410
+ "max_supported_response_tokens": 8192,
1411
+ "stop_sequences_param_name": "stop",
1412
+ "response_chunk_element": "choices[0].delta.content",
1413
+ "response_nonchunk_element": "choices[0].message.content",
1414
+ "special_request_schema": {},
1415
+ "image_support": {
1416
+ "max_image_size": 20971520,
1417
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
1418
+ "max_images_per_request": 10
1419
+ }
1420
+ },
1421
+ {
1422
+ // ========================================
1423
+ // == Moonshot AI Kimi Models ==
1424
+ // ========================================
1425
+ // == Kimi-K2 ==
1426
+ // =============
1427
+ "modelName": "Kimi-K2",
1428
+ "modelId": "moonshot.kimi-k2-thinking",
1429
+ "vision": false,
1430
+ "messages_api": true,
1431
+ "system_as_separate_field": false,
1432
+ "display_role_names": true,
1433
+ "max_tokens_param_name": "max_tokens",
1434
+ "max_supported_response_tokens": 32768,
1435
+ "stop_sequences_param_name": "stop",
1436
+ "response_chunk_element": "choices[0].delta.content",
1437
+ "response_nonchunk_element": "choices[0].message.content",
1438
+ "special_request_schema": {}
1439
+ },
1440
+ {
1441
+ // ====================
1442
+ // == Kimi-K2-Thinking ==
1443
+ // ====================
1444
+ "modelName": "Kimi-K2-Thinking",
1445
+ "modelId": "moonshot.kimi-k2-thinking",
1446
+ "vision": false,
1447
+ "messages_api": true,
1448
+ "system_as_separate_field": false,
1449
+ "display_role_names": true,
1450
+ "max_tokens_param_name": "max_tokens",
1451
+ "max_supported_response_tokens": 32768,
1452
+ "stop_sequences_param_name": "stop",
1453
+ "response_chunk_element": "choices[0].delta.content",
1454
+ "response_nonchunk_element": "choices[0].message.content",
1455
+ "preserve_reasoning": true,
1456
+ "special_request_schema": {}
1457
+ },
1458
+ {
1459
+ // ========================================
1460
+ // == MiniMax Models ==
1461
+ // ========================================
1462
+ // == MiniMax-M2 ==
1463
+ // ================
1464
+ "modelName": "MiniMax-M2",
1465
+ "modelId": "minimax.minimax-m2",
1466
+ "vision": false,
1467
+ "messages_api": true,
1468
+ "system_as_separate_field": false,
1469
+ "display_role_names": true,
1470
+ "max_tokens_param_name": "max_tokens",
1471
+ "max_supported_response_tokens": 32768,
1472
+ "stop_sequences_param_name": "stop",
1473
+ "response_chunk_element": "choices[0].delta.content",
1474
+ "response_nonchunk_element": "choices[0].message.content",
1475
+ "special_request_schema": {}
1476
+ },
1477
+ ];
@@ -222,7 +222,7 @@ async function processMessagesForInvoke(messages, awsModel) {
222
222
  message_cleaned.push(messages[i]);
223
223
  }
224
224
 
225
- if (i === (messages.length - 1) && messages[i].content !== "" && awsModel.display_role_names) {
225
+ if (i === (messages.length - 1) && messages[i].content !== "" && awsModel.display_role_names && !awsModel.skip_empty_assistant_message) {
226
226
  message_cleaned.push({role: "assistant", content: ""});
227
227
  }
228
228
  }
@@ -610,7 +610,8 @@ export async function* bedrockWrapper(awsCreds, openaiChatCompletionsCreateObjec
610
610
  const reasoningContent = event.contentBlockDelta.delta?.reasoningContent;
611
611
 
612
612
  // Handle Claude thinking data (streaming) - check both reasoningContent and thinking
613
- const thinkingText = reasoningContent?.reasoningText?.text || thinking;
613
+ // Note: streaming has delta.reasoningContent.text, non-streaming has reasoningContent.reasoningText.text
614
+ const thinkingText = reasoningContent?.reasoningText?.text || reasoningContent?.text || thinking;
614
615
  if (should_think && thinkingText) {
615
616
  if (!is_thinking) {
616
617
  is_thinking = true;
@@ -633,6 +634,11 @@ export async function* bedrockWrapper(awsCreds, openaiChatCompletionsCreateObjec
633
634
  }
634
635
  }
635
636
  }
637
+ // Fallback: if no text but reasoningContent exists and not in thinking mode,
638
+ // yield the reasoning content as the actual response (for models like Magistral)
639
+ else if (!should_think && thinkingText) {
640
+ yield thinkingText;
641
+ }
636
642
  }
637
643
  }
638
644
 
@@ -655,25 +661,35 @@ export async function* bedrockWrapper(awsCreds, openaiChatCompletionsCreateObjec
655
661
 
656
662
  for (const contentBlock of response.output.message.content) {
657
663
  // Extract thinking data for Claude models (from reasoningContent)
658
- if (include_thinking_data && contentBlock.reasoningContent &&
664
+ if (include_thinking_data && contentBlock.reasoningContent &&
659
665
  awsModel.special_request_schema?.thinking?.type === "enabled") {
660
666
  const reasoningText = contentBlock.reasoningContent.reasoningText?.text;
661
667
  if (reasoningText) {
662
668
  thinking_result += reasoningText;
663
669
  }
664
670
  }
665
-
671
+
666
672
  // Also check for legacy thinking field format
667
- if (include_thinking_data && contentBlock.thinking &&
673
+ if (include_thinking_data && contentBlock.thinking &&
668
674
  awsModel.special_request_schema?.thinking?.type === "enabled") {
669
675
  thinking_result += contentBlock.thinking;
670
676
  }
671
-
677
+
672
678
  // Extract regular text content
673
679
  if (contentBlock.text) {
674
680
  text_result += contentBlock.text;
675
681
  }
676
682
  }
683
+
684
+ // Fallback: if no regular text but reasoningContent exists (for models like Magistral),
685
+ // extract the reasoning text as the actual response
686
+ if (!text_result) {
687
+ for (const contentBlock of response.output.message.content) {
688
+ if (contentBlock.reasoningContent?.reasoningText?.text) {
689
+ text_result += contentBlock.reasoningContent.reasoningText.text;
690
+ }
691
+ }
692
+ }
677
693
 
678
694
  // Process reasoning tags for GPT-OSS models
679
695
  text_result = processReasoningTags(text_result, awsModel);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bedrock-wrapper",
3
- "version": "2.7.2",
3
+ "version": "2.9.0",
4
4
  "description": "🪨 Bedrock Wrapper is an npm package that simplifies the integration of existing OpenAI-compatible API objects with AWS Bedrock's serverless inference LLMs.",
5
5
  "homepage": "https://www.equilllabs.com/projects/bedrock-wrapper",
6
6
  "repository": {
@@ -41,7 +41,7 @@
41
41
  "author": "",
42
42
  "license": "ISC",
43
43
  "dependencies": {
44
- "@aws-sdk/client-bedrock-runtime": "^3.934.0",
44
+ "@aws-sdk/client-bedrock-runtime": "^3.943.0",
45
45
  "dotenv": "^17.2.3",
46
46
  "sharp": "^0.34.5"
47
47
  },