bedrock-wrapper 1.0.13 → 1.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,5 @@
1
1
  # 🪨 Bedrock Wrapper
2
- Bedrock Wrapper is an npm package that simplifies the integration of existing OpenAI-compatible API objects with AWS Bedrock's serverless inference LLMs. Follow the steps below to integrate into your own application, or alternativly use the 🪨 [Bedrock Proxy Endpoint](https://github.com/jparkerweb/bedrock-proxy-endpoint) project to spin up your own custom OpenAI server endpoint for even easier inference (using the standard `baseUrl`, and `apiKey` params).
2
+ Bedrock Wrapper is an npm package that simplifies the integration of existing OpenAI-compatible API objects with AWS Bedrock's serverless inference LLMs. Follow the steps below to integrate into your own application, or alternativly use the 🔀 [Bedrock Proxy Endpoint](https://github.com/jparkerweb/bedrock-proxy-endpoint) project to spin up your own custom OpenAI server endpoint for even easier inference (using the standard `baseUrl`, and `apiKey` params).
3
3
 
4
4
  ---
5
5
 
@@ -98,6 +98,7 @@ Bedrock Wrapper is an npm package that simplifies the integration of existing Op
98
98
  |----------------|------------------------------------|
99
99
  | Llama-3-8b | meta.llama3-8b-instruct-v1:0 |
100
100
  | Llama-3-70b | meta.llama3-70b-instruct-v1:0 |
101
+ | Mistral-7b | mistral.mistral-7b-instruct-v0:2 |
101
102
  | Mixtral-8x7b | mistral.mixtral-8x7b-instruct-v0:1 |
102
103
  | Mistral-Large | mistral.mistral-large-2402-v1:0 |
103
104
 
package/bedrock-models.js CHANGED
@@ -48,6 +48,30 @@ export const bedrock_models = [
48
48
  "max_tokens_param_name": "max_gen_len",
49
49
  "response_chunk_element": "generation",
50
50
  },
51
+ {
52
+ // ================
53
+ // == Mistral-7b ==
54
+ // ================
55
+ "modelName": "Mistral-7b",
56
+ "modelId": "mistral.mistral-7b-instruct-v0:2",
57
+ "bos_text": "<s>",
58
+ "role_system_message_prefix": "",
59
+ "role_system_message_suffix": "",
60
+ "role_system_prefix": "",
61
+ "role_system_suffix": "",
62
+ "role_user_message_prefix": "[INST]",
63
+ "role_user_message_suffix": "[/INST]",
64
+ "role_user_prefix": "",
65
+ "role_user_suffix": "",
66
+ "role_assistant_message_prefix": "",
67
+ "role_assistant_message_suffix": "",
68
+ "role_assistant_prefix": "",
69
+ "role_assistant_suffix": "",
70
+ "eom_text": "</s>",
71
+ "display_role_names": false,
72
+ "max_tokens_param_name": "max_tokens",
73
+ "response_chunk_element": "outputs[0].text",
74
+ },
51
75
  {
52
76
  // ==================
53
77
  // == Mixtral-8x7b ==
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bedrock-wrapper",
3
- "version": "1.0.13",
3
+ "version": "1.0.14",
4
4
  "description": "🪨 Bedrock Wrapper is an npm package that simplifies the integration of existing OpenAI-compatible API objects with AWS Bedrock's serverless inference LLMs.",
5
5
  "repository": {
6
6
  "type": "git",
Binary file