bedrock-wrapper 2.2.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/README.md +74 -21
- package/bedrock-models.js +507 -419
- package/bedrock-wrapper.js +223 -36
- package/docs/bedrock-wrapper.jpg +0 -0
- package/interactive-example.js +1 -0
- package/package.json +42 -38
- package/test-image.jpg +0 -0
- package/test-models-output.txt +22 -6
- package/test-models.js +145 -145
- package/test-vision.js +71 -0
- package/utils.js +27 -27
package/test-models.js
CHANGED
|
@@ -1,145 +1,145 @@
|
|
|
1
|
-
// ================================================================================
|
|
2
|
-
// == AWS Bedrock Example: Invoke a Model with a Streamed or Unstreamed Response ==
|
|
3
|
-
// ================================================================================
|
|
4
|
-
|
|
5
|
-
// ---------------------------------------------------------------------
|
|
6
|
-
// -- import environment variables from .env file or define them here --
|
|
7
|
-
// ---------------------------------------------------------------------
|
|
8
|
-
import dotenv from 'dotenv';
|
|
9
|
-
import fs from 'fs/promises';
|
|
10
|
-
import chalk from 'chalk';
|
|
11
|
-
|
|
12
|
-
dotenv.config();
|
|
13
|
-
|
|
14
|
-
const AWS_REGION = process.env.AWS_REGION;
|
|
15
|
-
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
|
|
16
|
-
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
|
|
17
|
-
const LLM_MAX_GEN_TOKENS = parseInt(process.env.LLM_MAX_GEN_TOKENS);
|
|
18
|
-
const LLM_TEMPERATURE = parseFloat(process.env.LLM_TEMPERATURE);
|
|
19
|
-
const LLM_TOP_P = parseFloat(process.env.LLM_TOP_P);
|
|
20
|
-
|
|
21
|
-
// --------------------------------------------
|
|
22
|
-
// -- import functions from bedrock-wrapper --
|
|
23
|
-
// -- - bedrockWrapper --
|
|
24
|
-
// -- - listBedrockWrapperSupportedModels --
|
|
25
|
-
// --------------------------------------------
|
|
26
|
-
import {
|
|
27
|
-
bedrockWrapper,
|
|
28
|
-
listBedrockWrapperSupportedModels
|
|
29
|
-
} from "./bedrock-wrapper.js";
|
|
30
|
-
|
|
31
|
-
async function logOutput(message, type = 'info', writeToFile = true ) {
|
|
32
|
-
if (writeToFile) {
|
|
33
|
-
// Log to file
|
|
34
|
-
await fs.appendFile('test-models-output.txt', message + '\n');
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
// Log to console with colors
|
|
38
|
-
switch(type) {
|
|
39
|
-
case 'success':
|
|
40
|
-
console.log(chalk.green('✓ ' + message));
|
|
41
|
-
break;
|
|
42
|
-
case 'error':
|
|
43
|
-
console.log(chalk.red('✗ ' + message));
|
|
44
|
-
break;
|
|
45
|
-
case 'info':
|
|
46
|
-
console.log(chalk.blue('ℹ ' + message));
|
|
47
|
-
break;
|
|
48
|
-
case 'running':
|
|
49
|
-
console.log(chalk.yellow(message));
|
|
50
|
-
break;
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
async function testModel(model, awsCreds, testMessage, isStreaming) {
|
|
55
|
-
const messages = [{ role: "user", content: testMessage }];
|
|
56
|
-
const openaiChatCompletionsCreateObject = {
|
|
57
|
-
messages,
|
|
58
|
-
model,
|
|
59
|
-
max_tokens: LLM_MAX_GEN_TOKENS,
|
|
60
|
-
stream: isStreaming,
|
|
61
|
-
temperature: LLM_TEMPERATURE,
|
|
62
|
-
top_p: LLM_TOP_P,
|
|
63
|
-
};
|
|
64
|
-
|
|
65
|
-
let completeResponse = "";
|
|
66
|
-
|
|
67
|
-
try {
|
|
68
|
-
if (isStreaming) {
|
|
69
|
-
for await (const chunk of bedrockWrapper(awsCreds, openaiChatCompletionsCreateObject, { logging:
|
|
70
|
-
completeResponse += chunk;
|
|
71
|
-
}
|
|
72
|
-
} else {
|
|
73
|
-
const response = await bedrockWrapper(awsCreds, openaiChatCompletionsCreateObject, { logging:
|
|
74
|
-
for await (const data of response) {
|
|
75
|
-
completeResponse += data;
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
// Check if response is empty or undefined
|
|
80
|
-
if (!completeResponse || completeResponse.trim() === '' || completeResponse.trim() === 'undefined') {
|
|
81
|
-
throw new Error('Empty or invalid response received');
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
return { success: true, response: completeResponse.trim() };
|
|
85
|
-
} catch (error) {
|
|
86
|
-
return { success: false, error: error.message };
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
async function main() {
|
|
91
|
-
const testMessage = "Respond with exactly one word: What is 1+1?";
|
|
92
|
-
|
|
93
|
-
// Clear output file and add header
|
|
94
|
-
await fs.writeFile('test-models-output.txt',
|
|
95
|
-
`Test Question: "${testMessage}"\n` +
|
|
96
|
-
`=`.repeat(50) + '\n\n'
|
|
97
|
-
);
|
|
98
|
-
|
|
99
|
-
const supportedModels = await listBedrockWrapperSupportedModels();
|
|
100
|
-
const availableModels = supportedModels.map(model => {
|
|
101
|
-
const fixedJson = model
|
|
102
|
-
.replace(/modelName": ([^,]+),/, 'modelName": "$1",')
|
|
103
|
-
.replace(/modelId": ([^}]+)}/, 'modelId": "$1"}');
|
|
104
|
-
return JSON.parse(fixedJson).modelName;
|
|
105
|
-
});
|
|
106
|
-
|
|
107
|
-
console.clear();
|
|
108
|
-
await logOutput(`Starting tests with ${availableModels.length} models...`, 'info');
|
|
109
|
-
await logOutput(`Each model will be tested with streaming and non-streaming calls\n`, 'info');
|
|
110
|
-
|
|
111
|
-
const awsCreds = {
|
|
112
|
-
region: AWS_REGION,
|
|
113
|
-
accessKeyId: AWS_ACCESS_KEY_ID,
|
|
114
|
-
secretAccessKey: AWS_SECRET_ACCESS_KEY,
|
|
115
|
-
};
|
|
116
|
-
|
|
117
|
-
for (const model of availableModels) {
|
|
118
|
-
await logOutput(`\n${'-'.repeat(50)}\nTesting ${model} ⇢`, 'running');
|
|
119
|
-
|
|
120
|
-
// Test streaming
|
|
121
|
-
const streamResult = await testModel(model, awsCreds, testMessage, true);
|
|
122
|
-
if (streamResult.success) {
|
|
123
|
-
await logOutput(`Streaming test passed for ${model}: "${streamResult.response}"`, 'success');
|
|
124
|
-
} else {
|
|
125
|
-
await logOutput(`Streaming test failed for ${model}: ${streamResult.error}`, 'error');
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
// Test non-streaming
|
|
129
|
-
const nonStreamResult = await testModel(model, awsCreds, testMessage, false);
|
|
130
|
-
if (nonStreamResult.success) {
|
|
131
|
-
await logOutput(`Non-streaming test passed for ${model}: "${nonStreamResult.response}"`, 'success');
|
|
132
|
-
} else {
|
|
133
|
-
await logOutput(`Non-streaming test failed for ${model}: ${nonStreamResult.error}`, 'error');
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
console.log(''); // Add blank line between models
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
await logOutput('Testing complete! Check test-models-output.txt for full test results.', 'info', false);
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
main().catch(async (error) => {
|
|
143
|
-
await logOutput(`Fatal Error: ${error.message}`, 'error');
|
|
144
|
-
console.error(error);
|
|
145
|
-
});
|
|
1
|
+
// ================================================================================
|
|
2
|
+
// == AWS Bedrock Example: Invoke a Model with a Streamed or Unstreamed Response ==
|
|
3
|
+
// ================================================================================
|
|
4
|
+
|
|
5
|
+
// ---------------------------------------------------------------------
|
|
6
|
+
// -- import environment variables from .env file or define them here --
|
|
7
|
+
// ---------------------------------------------------------------------
|
|
8
|
+
import dotenv from 'dotenv';
|
|
9
|
+
import fs from 'fs/promises';
|
|
10
|
+
import chalk from 'chalk';
|
|
11
|
+
|
|
12
|
+
dotenv.config();
|
|
13
|
+
|
|
14
|
+
const AWS_REGION = process.env.AWS_REGION;
|
|
15
|
+
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
|
|
16
|
+
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
|
|
17
|
+
const LLM_MAX_GEN_TOKENS = parseInt(process.env.LLM_MAX_GEN_TOKENS);
|
|
18
|
+
const LLM_TEMPERATURE = parseFloat(process.env.LLM_TEMPERATURE);
|
|
19
|
+
const LLM_TOP_P = parseFloat(process.env.LLM_TOP_P);
|
|
20
|
+
|
|
21
|
+
// --------------------------------------------
|
|
22
|
+
// -- import functions from bedrock-wrapper --
|
|
23
|
+
// -- - bedrockWrapper --
|
|
24
|
+
// -- - listBedrockWrapperSupportedModels --
|
|
25
|
+
// --------------------------------------------
|
|
26
|
+
import {
|
|
27
|
+
bedrockWrapper,
|
|
28
|
+
listBedrockWrapperSupportedModels
|
|
29
|
+
} from "./bedrock-wrapper.js";
|
|
30
|
+
|
|
31
|
+
async function logOutput(message, type = 'info', writeToFile = true ) {
|
|
32
|
+
if (writeToFile) {
|
|
33
|
+
// Log to file
|
|
34
|
+
await fs.appendFile('test-models-output.txt', message + '\n');
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Log to console with colors
|
|
38
|
+
switch(type) {
|
|
39
|
+
case 'success':
|
|
40
|
+
console.log(chalk.green('✓ ' + message));
|
|
41
|
+
break;
|
|
42
|
+
case 'error':
|
|
43
|
+
console.log(chalk.red('✗ ' + message));
|
|
44
|
+
break;
|
|
45
|
+
case 'info':
|
|
46
|
+
console.log(chalk.blue('ℹ ' + message));
|
|
47
|
+
break;
|
|
48
|
+
case 'running':
|
|
49
|
+
console.log(chalk.yellow(message));
|
|
50
|
+
break;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async function testModel(model, awsCreds, testMessage, isStreaming) {
|
|
55
|
+
const messages = [{ role: "user", content: testMessage }];
|
|
56
|
+
const openaiChatCompletionsCreateObject = {
|
|
57
|
+
messages,
|
|
58
|
+
model,
|
|
59
|
+
max_tokens: LLM_MAX_GEN_TOKENS,
|
|
60
|
+
stream: isStreaming,
|
|
61
|
+
temperature: LLM_TEMPERATURE,
|
|
62
|
+
top_p: LLM_TOP_P,
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
let completeResponse = "";
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
if (isStreaming) {
|
|
69
|
+
for await (const chunk of bedrockWrapper(awsCreds, openaiChatCompletionsCreateObject, { logging: true })) {
|
|
70
|
+
completeResponse += chunk;
|
|
71
|
+
}
|
|
72
|
+
} else {
|
|
73
|
+
const response = await bedrockWrapper(awsCreds, openaiChatCompletionsCreateObject, { logging: true });
|
|
74
|
+
for await (const data of response) {
|
|
75
|
+
completeResponse += data;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Check if response is empty or undefined
|
|
80
|
+
if (!completeResponse || completeResponse.trim() === '' || completeResponse.trim() === 'undefined') {
|
|
81
|
+
throw new Error('Empty or invalid response received');
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return { success: true, response: completeResponse.trim() };
|
|
85
|
+
} catch (error) {
|
|
86
|
+
return { success: false, error: error.message };
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async function main() {
|
|
91
|
+
const testMessage = "Respond with exactly one word: What is 1+1?";
|
|
92
|
+
|
|
93
|
+
// Clear output file and add header
|
|
94
|
+
await fs.writeFile('test-models-output.txt',
|
|
95
|
+
`Test Question: "${testMessage}"\n` +
|
|
96
|
+
`=`.repeat(50) + '\n\n'
|
|
97
|
+
);
|
|
98
|
+
|
|
99
|
+
const supportedModels = await listBedrockWrapperSupportedModels();
|
|
100
|
+
const availableModels = supportedModels.map(model => {
|
|
101
|
+
const fixedJson = model
|
|
102
|
+
.replace(/modelName": ([^,]+),/, 'modelName": "$1",')
|
|
103
|
+
.replace(/modelId": ([^}]+)}/, 'modelId": "$1"}');
|
|
104
|
+
return JSON.parse(fixedJson).modelName;
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
console.clear();
|
|
108
|
+
await logOutput(`Starting tests with ${availableModels.length} models...`, 'info');
|
|
109
|
+
await logOutput(`Each model will be tested with streaming and non-streaming calls\n`, 'info');
|
|
110
|
+
|
|
111
|
+
const awsCreds = {
|
|
112
|
+
region: AWS_REGION,
|
|
113
|
+
accessKeyId: AWS_ACCESS_KEY_ID,
|
|
114
|
+
secretAccessKey: AWS_SECRET_ACCESS_KEY,
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
for (const model of availableModels) {
|
|
118
|
+
await logOutput(`\n${'-'.repeat(50)}\nTesting ${model} ⇢`, 'running');
|
|
119
|
+
|
|
120
|
+
// Test streaming
|
|
121
|
+
const streamResult = await testModel(model, awsCreds, testMessage, true);
|
|
122
|
+
if (streamResult.success) {
|
|
123
|
+
await logOutput(`Streaming test passed for ${model}: "${streamResult.response}"`, 'success');
|
|
124
|
+
} else {
|
|
125
|
+
await logOutput(`Streaming test failed for ${model}: ${streamResult.error}`, 'error');
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Test non-streaming
|
|
129
|
+
const nonStreamResult = await testModel(model, awsCreds, testMessage, false);
|
|
130
|
+
if (nonStreamResult.success) {
|
|
131
|
+
await logOutput(`Non-streaming test passed for ${model}: "${nonStreamResult.response}"`, 'success');
|
|
132
|
+
} else {
|
|
133
|
+
await logOutput(`Non-streaming test failed for ${model}: ${nonStreamResult.error}`, 'error');
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
console.log(''); // Add blank line between models
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
await logOutput('Testing complete! Check test-models-output.txt for full test results.', 'info', false);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
main().catch(async (error) => {
|
|
143
|
+
await logOutput(`Fatal Error: ${error.message}`, 'error');
|
|
144
|
+
console.error(error);
|
|
145
|
+
});
|
package/test-vision.js
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { bedrockWrapper } from "./bedrock-wrapper.js";
|
|
2
|
+
import dotenv from 'dotenv';
|
|
3
|
+
import fs from 'fs/promises';
|
|
4
|
+
|
|
5
|
+
dotenv.config();
|
|
6
|
+
|
|
7
|
+
const awsCreds = {
|
|
8
|
+
region: process.env.AWS_REGION,
|
|
9
|
+
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
10
|
+
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
async function testVisionCapabilities() {
|
|
14
|
+
// Read and convert image to base64
|
|
15
|
+
const imageBuffer = await fs.readFile('./test-image.jpg');
|
|
16
|
+
const base64Image = imageBuffer.toString('base64');
|
|
17
|
+
|
|
18
|
+
const messages = [
|
|
19
|
+
{
|
|
20
|
+
role: "user",
|
|
21
|
+
content: [
|
|
22
|
+
{
|
|
23
|
+
type: "text",
|
|
24
|
+
text: "What's in this image? Please describe it in detail."
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
type: "image_url",
|
|
28
|
+
image_url: {
|
|
29
|
+
url: `data:image/jpeg;base64,${base64Image}`
|
|
30
|
+
// url: "https://github.com/jparkerweb/ref/blob/main/equill-labs/bedrock-proxy-endpoint/bedrock-proxy-endpoint.png?raw=true"
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
]
|
|
34
|
+
}
|
|
35
|
+
];
|
|
36
|
+
|
|
37
|
+
// Test with both Claude and Llama models that support vision
|
|
38
|
+
const visionModels = ["Claude-3-5-Sonnet-v2", "Claude-3-7-Sonnet"];
|
|
39
|
+
|
|
40
|
+
for (const model of visionModels) {
|
|
41
|
+
console.log(`\nTesting vision capabilities with ${model}...`);
|
|
42
|
+
|
|
43
|
+
const openaiChatCompletionsCreateObject = {
|
|
44
|
+
messages,
|
|
45
|
+
model,
|
|
46
|
+
max_tokens: 1000,
|
|
47
|
+
stream: true,
|
|
48
|
+
temperature: 0.7
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
console.log(`\nSending request to ${model} with format:`,
|
|
53
|
+
JSON.stringify(openaiChatCompletionsCreateObject, null, 2));
|
|
54
|
+
|
|
55
|
+
let response = "";
|
|
56
|
+
for await (const chunk of bedrockWrapper(awsCreds, openaiChatCompletionsCreateObject, { logging: true })) {
|
|
57
|
+
response += chunk;
|
|
58
|
+
process.stdout.write(chunk);
|
|
59
|
+
}
|
|
60
|
+
console.log("\n-------------------");
|
|
61
|
+
} catch (error) {
|
|
62
|
+
console.error(`Error with ${model}:`, error);
|
|
63
|
+
// Log the full error details
|
|
64
|
+
if (error.response) {
|
|
65
|
+
console.error('Response error:', error.response);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
testVisionCapabilities().catch(console.error);
|
package/utils.js
CHANGED
|
@@ -1,28 +1,28 @@
|
|
|
1
|
-
// **********************
|
|
2
|
-
// ** HELPER FUNCTIONS **
|
|
3
|
-
// **********************
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
// ----------------------------------------------------
|
|
7
|
-
// -- get a value from an object using a path string --
|
|
8
|
-
// ----------------------------------------------------
|
|
9
|
-
export function getValueByPath(obj, path) {
|
|
10
|
-
// Split the path into an array of keys
|
|
11
|
-
let keys = path.replace(/\[(\w+)\]/g, '.$1').split('.'); // Convert indexes into properties
|
|
12
|
-
// Reduce the keys array to the final value
|
|
13
|
-
return keys.reduce((acc, key) => acc && acc[key], obj);
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
// --------------------------
|
|
18
|
-
// -- write ascii art logo --
|
|
19
|
-
// --------------------------
|
|
20
|
-
export async function writeAsciiArt() {
|
|
21
|
-
console.log(`
|
|
22
|
-
___ _ _ _ _ _
|
|
23
|
-
| . > ___ _| | _ _ ___ ___ | |__ | | | | _ _ ___ ___ ___ ___ _ _
|
|
24
|
-
| . \\/ ._>/ . || '_>/ . \\/ | '| / / | | | || '_><_> || . \\| . \\/ ._>| '_>
|
|
25
|
-
|___/\\___.\\___||_| \\___/\\_|_.|_\\_\\ |__/_/ |_| <___|| _/| _/\\___.|_|
|
|
26
|
-
|_| |_|
|
|
27
|
-
`);
|
|
1
|
+
// **********************
|
|
2
|
+
// ** HELPER FUNCTIONS **
|
|
3
|
+
// **********************
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
// ----------------------------------------------------
|
|
7
|
+
// -- get a value from an object using a path string --
|
|
8
|
+
// ----------------------------------------------------
|
|
9
|
+
export function getValueByPath(obj, path) {
|
|
10
|
+
// Split the path into an array of keys
|
|
11
|
+
let keys = path.replace(/\[(\w+)\]/g, '.$1').split('.'); // Convert indexes into properties
|
|
12
|
+
// Reduce the keys array to the final value
|
|
13
|
+
return keys.reduce((acc, key) => acc && acc[key], obj);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
// --------------------------
|
|
18
|
+
// -- write ascii art logo --
|
|
19
|
+
// --------------------------
|
|
20
|
+
export async function writeAsciiArt() {
|
|
21
|
+
console.log(`
|
|
22
|
+
___ _ _ _ _ _
|
|
23
|
+
| . > ___ _| | _ _ ___ ___ | |__ | | | | _ _ ___ ___ ___ ___ _ _
|
|
24
|
+
| . \\/ ._>/ . || '_>/ . \\/ | '| / / | | | || '_><_> || . \\| . \\/ ._>| '_>
|
|
25
|
+
|___/\\___.\\___||_| \\___/\\_|_.|_\\_\\ |__/_/ |_| <___|| _/| _/\\___.|_|
|
|
26
|
+
|_| |_|
|
|
27
|
+
`);
|
|
28
28
|
}
|