bedrock-wrapper 2.5.0 → 2.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,12 @@
1
1
  # Changelog
2
2
  All notable changes to this project will be documented in this file.
3
3
 
4
+ ## [2.6.1] - 2025-09-30 (Claude Sonnet 4.5)
5
+ ### Added
6
+ - Support for Claude Sonnet 4.5 models
7
+ - Claude-4-5-Sonnet
8
+ - Claude-4-5-Sonnet-Thinking
9
+
4
10
  ## [2.5.0] - 2025-08-12 (Converse API)
5
11
  ### Added
6
12
  - Support for Converse API (streaming and non-streaming)
package/README.md CHANGED
@@ -39,7 +39,7 @@ Bedrock Wrapper is an npm package that simplifies the integration of existing Op
39
39
  ```javascript
40
40
  const openaiChatCompletionsCreateObject = {
41
41
  "messages": messages,
42
- "model": "Llama-3-1-8b",
42
+ "model": "Claude-4-5-Sonnet",
43
43
  "max_tokens": LLM_MAX_GEN_TOKENS,
44
44
  "stream": true,
45
45
  "temperature": LLM_TEMPERATURE,
@@ -126,6 +126,8 @@ Bedrock Wrapper is an npm package that simplifies the integration of existing Op
126
126
  | Claude-4-1-Opus-Thinking | us.anthropic.claude-opus-4-1-20250805-v1:0 | ✅ |
127
127
  | Claude-4-Opus | us.anthropic.claude-opus-4-20250514-v1:0 | ✅ |
128
128
  | Claude-4-Opus-Thinking | us.anthropic.claude-opus-4-20250514-v1:0 | ✅ |
129
+ | Claude-4-5-Sonnet | us.anthropic.claude-sonnet-4-5-20250929-v1:0 | ✅ |
130
+ | Claude-4-5-Sonnet-Thinking | us.anthropic.claude-sonnet-4-5-20250929-v1:0 | ✅ |
129
131
  | Claude-4-Sonnet | us.anthropic.claude-sonnet-4-20250514-v1:0 | ✅ |
130
132
  | Claude-4-Sonnet-Thinking | us.anthropic.claude-sonnet-4-20250514-v1:0 | ✅ |
131
133
  | Claude-3-7-Sonnet-Thinking | us.anthropic.claude-3-7-sonnet-20250219-v1:0 | ✅ |
@@ -168,7 +170,7 @@ Please modify the `bedrock_models.js` file and submit a PR 🏆 or create an Iss
168
170
 
169
171
  ### Image Support
170
172
 
171
- For models with image support (Claude 4 series, Claude 3.7 Sonnet, Claude 3.5 Sonnet, Claude 3 Haiku, Nova Pro, and Nova Lite), you can include images in your messages using the following format (not all models support system prompts):
173
+ For models with image support (Claude 4+ series including Claude 4.5 Sonnet, Claude 3.7 Sonnet, Claude 3.5 Sonnet, Claude 3 Haiku, Nova Pro, and Nova Lite), you can include images in your messages using the following format (not all models support system prompts):
172
174
 
173
175
  ```javascript
174
176
  messages = [
@@ -257,6 +259,41 @@ const result = await bedrockWrapper(awsCreds, {
257
259
 
258
260
  ---
259
261
 
262
+ ### Parameter Restrictions
263
+
264
+ Some AWS Bedrock models have specific parameter restrictions that are automatically handled by the wrapper:
265
+
266
+ #### Claude 4+ Models (Temperature/Top-P Mutual Exclusion)
267
+
268
+ **Affected Models:**
269
+ - Claude-4-5-Sonnet & Claude-4-5-Sonnet-Thinking
270
+ - Claude-4-Sonnet & Claude-4-Sonnet-Thinking
271
+ - Claude-4-Opus & Claude-4-Opus-Thinking
272
+ - Claude-4-1-Opus & Claude-4-1-Opus-Thinking
273
+
274
+ **Restriction:** These models cannot accept both `temperature` and `top_p` parameters simultaneously.
275
+
276
+ **Automatic Handling:** When both parameters are provided, the wrapper automatically:
277
+ 1. **Keeps `temperature`** (prioritized as more commonly used)
278
+ 2. **Removes `top_p`** to prevent validation errors
279
+ 3. **Works with both APIs** (Invoke API and Converse API)
280
+
281
+ ```javascript
282
+ const request = {
283
+ messages: [{ role: "user", content: "Hello" }],
284
+ model: "Claude-4-5-Sonnet",
285
+ temperature: 0.7, // ✅ Kept
286
+ top_p: 0.9 // ❌ Automatically removed
287
+ };
288
+
289
+ // No error thrown - wrapper handles the restriction automatically
290
+ const response = await bedrockWrapper(awsCreds, request);
291
+ ```
292
+
293
+ **Why This Happens:** AWS Bedrock enforces this restriction on newer Claude models to ensure optimal performance and prevent conflicting sampling parameters.
294
+
295
+ ---
296
+
260
297
  ### 🧪 Testing
261
298
 
262
299
  The package includes comprehensive test suites to verify functionality:
package/bedrock-models.js CHANGED
@@ -126,6 +126,72 @@ export const bedrock_models = [
126
126
  "max_images_per_request": 10
127
127
  }
128
128
  },
129
+ {
130
+ // =======================
131
+ // == Claude 4.5 Sonnet ==
132
+ // =======================
133
+ "modelName": "Claude-4-5-Sonnet",
134
+ // "modelId": "anthropic.claude-sonnet-4-5-20250929-v1:0",
135
+ "modelId": "us.anthropic.claude-sonnet-4-5-20250929-v1:0",
136
+ "vision": true,
137
+ "messages_api": true,
138
+ "system_as_separate_field": true,
139
+ "display_role_names": true,
140
+ "max_tokens_param_name": "max_tokens",
141
+ "max_supported_response_tokens": 131072,
142
+ "stop_sequences_param_name": "stop_sequences",
143
+ "response_chunk_element": "delta.text",
144
+ "response_nonchunk_element": "content[0].text",
145
+ "thinking_response_chunk_element": "delta.thinking",
146
+ "thinking_response_nonchunk_element": "content[0].thinking",
147
+ "parameter_restrictions": {
148
+ "mutually_exclusive": [["temperature", "top_p"]]
149
+ },
150
+ "special_request_schema": {
151
+ "anthropic_version": "bedrock-2023-05-31",
152
+ "anthropic_beta": ["output-128k-2025-02-19"],
153
+ },
154
+ "image_support": {
155
+ "max_image_size": 20971520, // 20MB
156
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
157
+ "max_images_per_request": 10
158
+ }
159
+ },
160
+ {
161
+ // ================================
162
+ // == Claude 4.5 Sonnet Thinking ==
163
+ // ================================
164
+ "modelName": "Claude-4-5-Sonnet-Thinking",
165
+ // "modelId": "anthropic.claude-sonnet-4-5-20250929-v1:0",
166
+ "modelId": "us.anthropic.claude-sonnet-4-5-20250929-v1:0",
167
+ "vision": true,
168
+ "messages_api": true,
169
+ "system_as_separate_field": true,
170
+ "display_role_names": true,
171
+ "max_tokens_param_name": "max_tokens",
172
+ "max_supported_response_tokens": 131072,
173
+ "stop_sequences_param_name": "stop_sequences",
174
+ "response_chunk_element": "delta.text",
175
+ "response_nonchunk_element": "content[0].text",
176
+ "thinking_response_chunk_element": "delta.thinking",
177
+ "thinking_response_nonchunk_element": "content[0].thinking",
178
+ "parameter_restrictions": {
179
+ "mutually_exclusive": [["temperature", "top_p"]]
180
+ },
181
+ "special_request_schema": {
182
+ "anthropic_version": "bedrock-2023-05-31",
183
+ "anthropic_beta": ["output-128k-2025-02-19"],
184
+ "thinking": {
185
+ "type": "enabled",
186
+ "budget_tokens": 16000
187
+ },
188
+ },
189
+ "image_support": {
190
+ "max_image_size": 20971520, // 20MB
191
+ "supported_formats": ["jpeg", "png", "gif", "webp"],
192
+ "max_images_per_request": 10
193
+ }
194
+ },
129
195
  {
130
196
  // =====================
131
197
  // == Claude 4 Sonnet ==
@@ -144,6 +210,9 @@ export const bedrock_models = [
144
210
  "response_nonchunk_element": "content[0].text",
145
211
  "thinking_response_chunk_element": "delta.thinking",
146
212
  "thinking_response_nonchunk_element": "content[0].thinking",
213
+ "parameter_restrictions": {
214
+ "mutually_exclusive": [["temperature", "top_p"]]
215
+ },
147
216
  "special_request_schema": {
148
217
  "anthropic_version": "bedrock-2023-05-31",
149
218
  "anthropic_beta": ["output-128k-2025-02-19"],
@@ -172,6 +241,9 @@ export const bedrock_models = [
172
241
  "response_nonchunk_element": "content[0].text",
173
242
  "thinking_response_chunk_element": "delta.thinking",
174
243
  "thinking_response_nonchunk_element": "content[0].thinking",
244
+ "parameter_restrictions": {
245
+ "mutually_exclusive": [["temperature", "top_p"]]
246
+ },
175
247
  "special_request_schema": {
176
248
  "anthropic_version": "bedrock-2023-05-31",
177
249
  "anthropic_beta": ["output-128k-2025-02-19"],
@@ -288,6 +288,57 @@ function buildInvokePrompt(message_cleaned, awsModel) {
288
288
  }
289
289
  }
290
290
 
291
+ // Apply parameter restrictions for models that have them
292
+ function applyParameterRestrictions(params, awsModel) {
293
+ if (!awsModel.parameter_restrictions) {
294
+ return params;
295
+ }
296
+
297
+ const restrictions = awsModel.parameter_restrictions;
298
+
299
+ // Handle mutually exclusive parameters
300
+ if (restrictions.mutually_exclusive) {
301
+ for (const exclusiveGroup of restrictions.mutually_exclusive) {
302
+ // Check for both top_p and topP variants
303
+ const presentParams = exclusiveGroup.filter(param => {
304
+ if (param === 'top_p') {
305
+ return (params['top_p'] !== undefined && params['top_p'] !== null) ||
306
+ (params['topP'] !== undefined && params['topP'] !== null);
307
+ }
308
+ return params[param] !== undefined && params[param] !== null;
309
+ });
310
+
311
+ if (presentParams.length > 1) {
312
+ // Keep the first parameter and remove others
313
+ // For temperature/top_p, prioritize temperature as it's more commonly used
314
+ const priorityOrder = ['temperature', 'top_p'];
315
+ const sortedParams = presentParams.sort((a, b) => {
316
+ const aIndex = priorityOrder.indexOf(a);
317
+ const bIndex = priorityOrder.indexOf(b);
318
+ if (aIndex !== -1 && bIndex !== -1) return aIndex - bIndex;
319
+ if (aIndex !== -1) return -1;
320
+ if (bIndex !== -1) return 1;
321
+ return 0;
322
+ });
323
+
324
+ // Keep the first (highest priority) parameter, remove others
325
+ for (let i = 1; i < sortedParams.length; i++) {
326
+ const paramToRemove = sortedParams[i];
327
+ if (paramToRemove === 'top_p') {
328
+ // Remove both variants
329
+ delete params['top_p'];
330
+ delete params['topP'];
331
+ } else {
332
+ delete params[paramToRemove];
333
+ }
334
+ }
335
+ }
336
+ }
337
+ }
338
+
339
+ return params;
340
+ }
341
+
291
342
  // Build request object for Invoke API (model-specific)
292
343
  function buildInvokeRequest(prompt, awsModel, max_gen_tokens, temperature, top_p, stop_sequences, stop, system_message) {
293
344
  if (awsModel.messages_api) {
@@ -328,17 +379,24 @@ function buildInvokeRequest(prompt, awsModel, max_gen_tokens, temperature, top_p
328
379
  });
329
380
 
330
381
  const stopSequencesValue = stop_sequences || stop;
382
+
383
+ // Build inference config with parameter restrictions
384
+ let inferenceConfig = {
385
+ [awsModel.max_tokens_param_name]: max_gen_tokens,
386
+ temperature: temperature,
387
+ topP: top_p,
388
+ ...(awsModel.stop_sequences_param_name && stopSequencesValue && {
389
+ [awsModel.stop_sequences_param_name]: Array.isArray(stopSequencesValue) ? stopSequencesValue : [stopSequencesValue]
390
+ })
391
+ };
392
+
393
+ // Apply parameter restrictions
394
+ inferenceConfig = applyParameterRestrictions(inferenceConfig, awsModel);
395
+
331
396
  const novaRequest = {
332
397
  ...awsModel.special_request_schema,
333
398
  messages: novaMessages,
334
- inferenceConfig: {
335
- [awsModel.max_tokens_param_name]: max_gen_tokens,
336
- temperature: temperature,
337
- topP: top_p,
338
- ...(awsModel.stop_sequences_param_name && stopSequencesValue && {
339
- [awsModel.stop_sequences_param_name]: Array.isArray(stopSequencesValue) ? stopSequencesValue : [stopSequencesValue]
340
- })
341
- }
399
+ inferenceConfig: inferenceConfig
342
400
  };
343
401
 
344
402
  // Add system message if present
@@ -350,7 +408,9 @@ function buildInvokeRequest(prompt, awsModel, max_gen_tokens, temperature, top_p
350
408
  } else {
351
409
  // Standard messages API format (Claude, etc.)
352
410
  const stopSequencesValue = stop_sequences || stop;
353
- return {
411
+
412
+ // Build request with parameter restrictions
413
+ let request = {
354
414
  messages: prompt,
355
415
  ...(awsModel.system_as_separate_field && system_message && { system: system_message }),
356
416
  [awsModel.max_tokens_param_name]: max_gen_tokens,
@@ -361,9 +421,15 @@ function buildInvokeRequest(prompt, awsModel, max_gen_tokens, temperature, top_p
361
421
  }),
362
422
  ...awsModel.special_request_schema
363
423
  };
424
+
425
+ // Apply parameter restrictions
426
+ request = applyParameterRestrictions(request, awsModel);
427
+
428
+ return request;
364
429
  }
365
430
  } else {
366
- return {
431
+ // Build request for non-messages API models (Llama, etc.)
432
+ let request = {
367
433
  prompt: typeof prompt === 'string' ? prompt : {
368
434
  messages: prompt.map(msg => ({
369
435
  role: msg.role,
@@ -386,6 +452,11 @@ function buildInvokeRequest(prompt, awsModel, max_gen_tokens, temperature, top_p
386
452
  })(),
387
453
  ...awsModel.special_request_schema
388
454
  };
455
+
456
+ // Apply parameter restrictions
457
+ request = applyParameterRestrictions(request, awsModel);
458
+
459
+ return request;
389
460
  }
390
461
  }
391
462
 
@@ -512,12 +583,15 @@ export async function* bedrockWrapper(awsCreds, openaiChatCompletionsCreateObjec
512
583
  const { messages: converseMessages, system: systemPrompts } = await convertToConverseFormat(messages);
513
584
 
514
585
  // Build inference configuration (handle thinking mode for Claude models)
515
- const inferenceConfig = {
586
+ let inferenceConfig = {
516
587
  maxTokens: max_gen_tokens,
517
588
  temperature: temperature,
518
589
  ...(top_p !== undefined && { topP: top_p })
519
590
  };
520
591
 
592
+ // Apply parameter restrictions for Converse API
593
+ inferenceConfig = applyParameterRestrictions(inferenceConfig, awsModel);
594
+
521
595
  // Handle thinking mode for Claude models
522
596
  let budget_tokens;
523
597
  if (awsModel.special_request_schema?.thinking?.type === "enabled") {
@@ -48,6 +48,20 @@
48
48
  "hook_event_name": "Notification",
49
49
  "message": "Claude is waiting for your input"
50
50
  },
51
+ {
52
+ "session_id": "e4cf59ef-9d22-45bf-9c6c-53e3cb9efda3",
53
+ "transcript_path": "C:\\Users\\Justin.Parker\\.claude\\projects\\C--git-bedrock-wrapper\\e4cf59ef-9d22-45bf-9c6c-53e3cb9efda3.jsonl",
54
+ "cwd": "C:\\git\\bedrock-wrapper",
55
+ "hook_event_name": "Notification",
56
+ "message": "Claude is waiting for your input"
57
+ },
58
+ {
59
+ "session_id": "e4cf59ef-9d22-45bf-9c6c-53e3cb9efda3",
60
+ "transcript_path": "C:\\Users\\Justin.Parker\\.claude\\projects\\C--git-bedrock-wrapper\\e4cf59ef-9d22-45bf-9c6c-53e3cb9efda3.jsonl",
61
+ "cwd": "C:\\git\\bedrock-wrapper",
62
+ "hook_event_name": "Notification",
63
+ "message": "Claude needs your permission to use "
64
+ },
51
65
  {
52
66
  "session_id": "e4cf59ef-9d22-45bf-9c6c-53e3cb9efda3",
53
67
  "transcript_path": "C:\\Users\\Justin.Parker\\.claude\\projects\\C--git-bedrock-wrapper\\e4cf59ef-9d22-45bf-9c6c-53e3cb9efda3.jsonl",