llmjs2 1.1.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CONFIG_README.md +98 -0
- package/README.md +382 -357
- package/cli.js +195 -0
- package/config.yaml +149 -0
- package/docs/BASIC_USAGE.md +296 -0
- package/docs/CLI.md +455 -0
- package/docs/GET_STARTED.md +129 -0
- package/docs/GUARDRAILS_GUIDE.md +734 -0
- package/docs/README.md +47 -0
- package/docs/ROUTER_GUIDE.md +397 -0
- package/docs/SERVER_MODE.md +350 -0
- package/index.js +199 -228
- package/package.json +43 -28
- package/providers/ollama.js +120 -88
- package/providers/openai.js +104 -0
- package/providers/openrouter.js +113 -79
- package/router.js +248 -0
- package/server.js +186 -0
- package/test.js +246 -296
- package/validate-config.js +87 -0
- package/example.js +0 -298
package/providers/ollama.js
CHANGED
|
@@ -1,88 +1,120 @@
|
|
|
1
|
-
const
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
1
|
+
const https = require('https');
|
|
2
|
+
|
|
3
|
+
class OllamaProvider {
|
|
4
|
+
constructor(config = {}) {
|
|
5
|
+
this.baseURL = config.baseURL || process.env.OLLAMA_BASE_URL || 'https://ollama.com/api/chat';
|
|
6
|
+
this.apiKey = config.apiKey || process.env.OLLAMA_API_KEY;
|
|
7
|
+
this.defaultModel = config.defaultModel || process.env.OLLAMA_DEFAULT_MODEL || 'minimax-m2.5:cloud';
|
|
8
|
+
this.timeout = config.timeout || 120000; // 2 minutes (LLMs can be slow)
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
async makeRequest(data) {
|
|
12
|
+
if (!this.apiKey) {
|
|
13
|
+
throw new Error('Ollama API key is required. Set OLLAMA_API_KEY environment variable or pass apiKey in config.');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const postData = JSON.stringify(data);
|
|
17
|
+
|
|
18
|
+
const parsedUrl = new URL(this.baseURL);
|
|
19
|
+
|
|
20
|
+
const options = {
|
|
21
|
+
hostname: parsedUrl.hostname,
|
|
22
|
+
port: parsedUrl.port || 443,
|
|
23
|
+
path: parsedUrl.pathname + parsedUrl.search,
|
|
24
|
+
method: 'POST',
|
|
25
|
+
headers: {
|
|
26
|
+
'Content-Type': 'application/json',
|
|
27
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
28
|
+
'Content-Length': Buffer.byteLength(postData)
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
const req = https.request(options, (res) => {
|
|
34
|
+
let body = '';
|
|
35
|
+
|
|
36
|
+
res.on('data', (chunk) => {
|
|
37
|
+
body += chunk;
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
res.on('end', () => {
|
|
41
|
+
try {
|
|
42
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
43
|
+
const response = JSON.parse(body);
|
|
44
|
+
resolve(response);
|
|
45
|
+
} else {
|
|
46
|
+
const error = JSON.parse(body);
|
|
47
|
+
reject(new Error(`Ollama API error (${res.statusCode}): ${error.error?.message || error.message || body }`));
|
|
48
|
+
}
|
|
49
|
+
} catch (parseError) {
|
|
50
|
+
reject(new Error(`Failed to parse Ollama response: ${parseError.message}`));
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
req.on('error', (error) => {
|
|
56
|
+
reject(new Error(`Ollama request failed: ${error.message}`));
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
req.setTimeout(this.timeout, () => {
|
|
60
|
+
req.destroy();
|
|
61
|
+
reject(new Error('Ollama request timed out'));
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
req.write(postData);
|
|
65
|
+
req.end();
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async createCompletion(messages, options = {}) {
|
|
70
|
+
const data = {
|
|
71
|
+
model: options.model || this.defaultModel,
|
|
72
|
+
messages: messages,
|
|
73
|
+
stream: false, // We handle non-streaming for simplicity
|
|
74
|
+
format: options.format,
|
|
75
|
+
options: {
|
|
76
|
+
temperature: options.temperature || 0.7,
|
|
77
|
+
top_p: options.topP,
|
|
78
|
+
num_predict: options.maxTokens,
|
|
79
|
+
repeat_penalty: options.frequencyPenalty,
|
|
80
|
+
presence_penalty: options.presencePenalty,
|
|
81
|
+
stop: options.stop,
|
|
82
|
+
num_ctx: options.numCtx || 4096
|
|
83
|
+
},
|
|
84
|
+
tools: options.tools,
|
|
85
|
+
keep_alive: options.keepAlive || '5m'
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
// Remove undefined values
|
|
89
|
+
Object.keys(data).forEach(key => {
|
|
90
|
+
if (data[key] === undefined) {
|
|
91
|
+
delete data[key];
|
|
92
|
+
}
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
if (data.options) {
|
|
96
|
+
Object.keys(data.options).forEach(key => {
|
|
97
|
+
if (data.options[key] === undefined) {
|
|
98
|
+
delete data.options[key];
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const response = await this.makeRequest(data);
|
|
104
|
+
|
|
105
|
+
return {
|
|
106
|
+
content: response.message?.content || '',
|
|
107
|
+
role: response.message?.role || 'assistant',
|
|
108
|
+
toolCalls: response.message?.tool_calls,
|
|
109
|
+
usage: {
|
|
110
|
+
prompt_eval_count: response.prompt_eval_count,
|
|
111
|
+
eval_count: response.eval_count,
|
|
112
|
+
total_duration: response.total_duration
|
|
113
|
+
},
|
|
114
|
+
model: response.model,
|
|
115
|
+
finishReason: response.done_reason || (response.done ? 'stop' : null)
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
module.exports = OllamaProvider;
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
const https = require('https');
|
|
2
|
+
|
|
3
|
+
class OpenAIProvider {
|
|
4
|
+
constructor(config = {}) {
|
|
5
|
+
this.baseURL = config.baseURL || process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
|
|
6
|
+
this.apiKey = config.apiKey || process.env.OPENAI_API_KEY;
|
|
7
|
+
this.defaultModel = config.defaultModel || process.env.OPENAI_DEFAULT_MODEL || 'gpt-3.5-turbo';
|
|
8
|
+
this.timeout = config.timeout || 60000; // 60 seconds
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
async makeRequest(endpoint, data) {
|
|
12
|
+
if (!this.apiKey) {
|
|
13
|
+
throw new Error('OpenAI API key is required. Set OPENAI_API_KEY environment variable or pass apiKey in config.');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const requestUrl = `${this.baseURL}${endpoint}`;
|
|
17
|
+
const postData = JSON.stringify(data);
|
|
18
|
+
|
|
19
|
+
const parsedUrl = new URL(requestUrl);
|
|
20
|
+
|
|
21
|
+
const options = {
|
|
22
|
+
hostname: parsedUrl.hostname,
|
|
23
|
+
port: parsedUrl.port || 443,
|
|
24
|
+
path: parsedUrl.pathname + parsedUrl.search,
|
|
25
|
+
method: 'POST',
|
|
26
|
+
headers: {
|
|
27
|
+
'Content-Type': 'application/json',
|
|
28
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
29
|
+
'Content-Length': Buffer.byteLength(postData)
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
return new Promise((resolve, reject) => {
|
|
34
|
+
const req = https.request(options, (res) => {
|
|
35
|
+
let body = '';
|
|
36
|
+
|
|
37
|
+
res.on('data', (chunk) => {
|
|
38
|
+
body += chunk;
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
res.on('end', () => {
|
|
42
|
+
try {
|
|
43
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
44
|
+
const response = JSON.parse(body);
|
|
45
|
+
resolve(response);
|
|
46
|
+
} else {
|
|
47
|
+
const error = JSON.parse(body);
|
|
48
|
+
reject(new Error(`OpenAI API error (${res.statusCode}): ${error.error?.message || 'Unknown error'}`));
|
|
49
|
+
}
|
|
50
|
+
} catch (parseError) {
|
|
51
|
+
reject(new Error(`Failed to parse OpenAI response: ${parseError.message}`));
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
req.on('error', (error) => {
|
|
57
|
+
reject(new Error(`OpenAI request failed: ${error.message}`));
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
req.setTimeout(this.timeout, () => {
|
|
61
|
+
req.destroy();
|
|
62
|
+
reject(new Error('OpenAI request timed out'));
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
req.write(postData);
|
|
66
|
+
req.end();
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
async createCompletion(messages, options = {}) {
|
|
71
|
+
const data = {
|
|
72
|
+
model: options.model || this.defaultModel,
|
|
73
|
+
messages: messages,
|
|
74
|
+
temperature: options.temperature || 0.7,
|
|
75
|
+
max_tokens: options.maxTokens,
|
|
76
|
+
top_p: options.topP,
|
|
77
|
+
frequency_penalty: options.frequencyPenalty,
|
|
78
|
+
presence_penalty: options.presencePenalty,
|
|
79
|
+
stop: options.stop,
|
|
80
|
+
tools: options.tools,
|
|
81
|
+
tool_choice: options.toolChoice
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
// Remove undefined values
|
|
85
|
+
Object.keys(data).forEach(key => {
|
|
86
|
+
if (data[key] === undefined) {
|
|
87
|
+
delete data[key];
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
const response = await this.makeRequest('/chat/completions', data);
|
|
92
|
+
|
|
93
|
+
return {
|
|
94
|
+
content: response.choices[0]?.message?.content || '',
|
|
95
|
+
role: response.choices[0]?.message?.role || 'assistant',
|
|
96
|
+
toolCalls: response.choices[0]?.message?.tool_calls,
|
|
97
|
+
usage: response.usage,
|
|
98
|
+
model: response.model,
|
|
99
|
+
finishReason: response.choices[0]?.finish_reason
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
module.exports = OpenAIProvider;
|
package/providers/openrouter.js
CHANGED
|
@@ -1,79 +1,113 @@
|
|
|
1
|
-
const
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
1
|
+
const https = require('https');
|
|
2
|
+
|
|
3
|
+
class OpenRouterProvider {
|
|
4
|
+
constructor(config = {}) {
|
|
5
|
+
this.baseURL = config.baseURL || process.env.OPEN_ROUTER_BASE_URL || 'https://openrouter.ai/api/v1/chat/completions';
|
|
6
|
+
this.apiKey = config.apiKey || process.env.OPEN_ROUTER_API_KEY;
|
|
7
|
+
this.defaultModel = config.defaultModel || process.env.OPEN_ROUTER_DEFAULT_MODEL || 'openrouter/free';
|
|
8
|
+
this.timeout = config.timeout || 60000; // 60 seconds
|
|
9
|
+
this.config = config; // Store entire config for additional properties like referer, title
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
async makeRequest(data) {
|
|
13
|
+
if (!this.apiKey) {
|
|
14
|
+
throw new Error('OpenRouter API key is required. Set OPEN_ROUTER_API_KEY environment variable or pass apiKey in config.');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const postData = JSON.stringify(data);
|
|
18
|
+
|
|
19
|
+
const parsedUrl = new URL(this.baseURL);
|
|
20
|
+
|
|
21
|
+
const options = {
|
|
22
|
+
hostname: parsedUrl.hostname,
|
|
23
|
+
port: parsedUrl.port || 443,
|
|
24
|
+
path: parsedUrl.pathname + parsedUrl.search,
|
|
25
|
+
method: 'POST',
|
|
26
|
+
headers: {
|
|
27
|
+
'Content-Type': 'application/json',
|
|
28
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
29
|
+
'Content-Length': Buffer.byteLength(postData),
|
|
30
|
+
'HTTP-Referer': this.config.referer || process.env.OPEN_ROUTER_REFERER || '',
|
|
31
|
+
'X-Title': this.config.title || process.env.OPEN_ROUTER_TITLE || ''
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
// Remove empty headers
|
|
36
|
+
if (!options.headers['HTTP-Referer']) delete options.headers['HTTP-Referer'];
|
|
37
|
+
if (!options.headers['X-Title']) delete options.headers['X-Title'];
|
|
38
|
+
|
|
39
|
+
return new Promise((resolve, reject) => {
|
|
40
|
+
const req = https.request(options, (res) => {
|
|
41
|
+
let body = '';
|
|
42
|
+
|
|
43
|
+
res.on('data', (chunk) => {
|
|
44
|
+
body += chunk;
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
res.on('end', () => {
|
|
48
|
+
try {
|
|
49
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
50
|
+
const response = JSON.parse(body);
|
|
51
|
+
resolve(response);
|
|
52
|
+
} else {
|
|
53
|
+
const error = JSON.parse(body);
|
|
54
|
+
reject(new Error(`OpenRouter API error (${res.statusCode}): ${error.error?.message || 'Unknown error'}`));
|
|
55
|
+
}
|
|
56
|
+
} catch (parseError) {
|
|
57
|
+
reject(new Error(`Failed to parse OpenRouter response: ${parseError.message}`));
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
req.on('error', (error) => {
|
|
63
|
+
reject(new Error(`OpenRouter request failed: ${error.message}`));
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
req.setTimeout(this.timeout, () => {
|
|
67
|
+
req.destroy();
|
|
68
|
+
reject(new Error('OpenRouter request timed out'));
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
req.write(postData);
|
|
72
|
+
req.end();
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async createCompletion(messages, options = {}) {
|
|
77
|
+
const data = {
|
|
78
|
+
model: options.model || this.defaultModel,
|
|
79
|
+
messages: messages,
|
|
80
|
+
temperature: options.temperature || 0.7,
|
|
81
|
+
max_tokens: options.maxTokens,
|
|
82
|
+
top_p: options.topP,
|
|
83
|
+
frequency_penalty: options.frequencyPenalty,
|
|
84
|
+
presence_penalty: options.presencePenalty,
|
|
85
|
+
stop: options.stop,
|
|
86
|
+
tools: options.tools,
|
|
87
|
+
tool_choice: options.toolChoice,
|
|
88
|
+
transforms: options.transforms,
|
|
89
|
+
models: options.models,
|
|
90
|
+
route: options.route
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
// Remove undefined values
|
|
94
|
+
Object.keys(data).forEach(key => {
|
|
95
|
+
if (data[key] === undefined) {
|
|
96
|
+
delete data[key];
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
const response = await this.makeRequest(data);
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
content: response.choices[0]?.message?.content || '',
|
|
104
|
+
role: response.choices[0]?.message?.role || 'assistant',
|
|
105
|
+
toolCalls: response.choices[0]?.message?.tool_calls,
|
|
106
|
+
usage: response.usage,
|
|
107
|
+
model: response.model,
|
|
108
|
+
finishReason: response.choices[0]?.finish_reason
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
module.exports = OpenRouterProvider;
|