llmjs2 1.3.9 → 1.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/README.md +31 -476
  2. package/chain/AGENT_STEP_README.md +102 -0
  3. package/chain/README.md +257 -0
  4. package/chain/WORKFLOW_README.md +85 -0
  5. package/chain/agent-step-example.js +232 -0
  6. package/chain/docs/AGENT.md +126 -0
  7. package/chain/docs/GRAPH.md +490 -0
  8. package/chain/examples.js +314 -0
  9. package/chain/index.js +31 -0
  10. package/chain/lib/agent.js +338 -0
  11. package/chain/lib/flow/agent-step.js +119 -0
  12. package/chain/lib/flow/edge.js +24 -0
  13. package/chain/lib/flow/flow.js +76 -0
  14. package/chain/lib/flow/graph.js +331 -0
  15. package/chain/lib/flow/index.js +7 -0
  16. package/chain/lib/flow/step.js +63 -0
  17. package/chain/lib/memory/in-memory.js +117 -0
  18. package/chain/lib/memory/index.js +36 -0
  19. package/chain/lib/memory/lance-memory.js +225 -0
  20. package/chain/lib/memory/sqlite-memory.js +309 -0
  21. package/chain/simple-agent-step-example.js +168 -0
  22. package/chain/workflow-example-usage.js +70 -0
  23. package/chain/workflow-example.json +59 -0
  24. package/core/README.md +485 -0
  25. package/core/cli.js +275 -0
  26. package/core/docs/BASIC_USAGE.md +62 -0
  27. package/core/docs/CLI.md +104 -0
  28. package/{docs → core/docs}/GET_STARTED.md +129 -129
  29. package/{docs → core/docs}/GUARDRAILS_GUIDE.md +734 -734
  30. package/{docs → core/docs}/README.md +47 -47
  31. package/core/docs/ROUTER_GUIDE.md +199 -0
  32. package/{docs → core/docs}/SERVER_MODE.md +358 -350
  33. package/core/index.js +115 -0
  34. package/{providers → core/providers}/ollama.js +14 -6
  35. package/{providers → core/providers}/openai.js +14 -6
  36. package/core/providers/openrouter.js +206 -0
  37. package/core/router.js +252 -0
  38. package/{server.js → core/server.js} +15 -5
  39. package/package.json +46 -27
  40. package/cli.js +0 -195
  41. package/docs/BASIC_USAGE.md +0 -296
  42. package/docs/CLI.md +0 -455
  43. package/docs/ROUTER_GUIDE.md +0 -402
  44. package/index.js +0 -267
  45. package/providers/openrouter.js +0 -113
  46. package/router.js +0 -273
  47. package/test-completion.js +0 -99
  48. package/test.js +0 -246
  49. /package/{config.yaml → core/config.yaml} +0 -0
  50. /package/{logger.js → core/logger.js} +0 -0
package/core/index.js ADDED
@@ -0,0 +1,115 @@
1
+ const OpenAIProvider = require('./providers/openai');
2
+ const OllamaProvider = require('./providers/ollama');
3
+ const OpenRouterProvider = require('./providers/openrouter');
4
+ const { completion } = require('./completion');
5
+ const { router } = require('./router');
6
+ const { app } = require('./server');
7
+
8
+ class LLMJS2 {
9
+ constructor(config = {}) {
10
+ this.defaultProvider = config.defaultProvider;
11
+ this.providers = {
12
+ openai: new OpenAIProvider(config.openai || {}),
13
+ ollama: new OllamaProvider(config.ollama || {}),
14
+ openrouter: new OpenRouterProvider(config.openrouter || {})
15
+ };
16
+ }
17
+
18
+ parseModel(modelString) {
19
+ if (!modelString || typeof modelString !== 'string') {
20
+ return { provider: null, model: null };
21
+ }
22
+
23
+ const firstSlashIndex = modelString.indexOf('/');
24
+ if (firstSlashIndex !== -1) {
25
+ return {
26
+ provider: modelString.substring(0, firstSlashIndex),
27
+ model: modelString.substring(firstSlashIndex + 1)
28
+ };
29
+ }
30
+
31
+ return { provider: null, model: modelString };
32
+ }
33
+
34
+ getAvailableProviders() {
35
+ const available = [];
36
+
37
+ const openaiKey = process.env.OPENAI_API_KEY || this.providers.openai.apiKey;
38
+ const ollamaKey = process.env.OLLAMA_API_KEY || this.providers.ollama.apiKey;
39
+ const openrouterKey = process.env.OPEN_ROUTER_API_KEY || this.providers.openrouter.apiKey;
40
+
41
+ if (openaiKey && typeof openaiKey === 'string' && openaiKey.trim() && !openaiKey.startsWith(':')) {
42
+ available.push('openai');
43
+ }
44
+ if (ollamaKey && typeof ollamaKey === 'string' && ollamaKey.trim() && !ollamaKey.startsWith(':')) {
45
+ available.push('ollama');
46
+ }
47
+ if (openrouterKey && typeof openrouterKey === 'string' && openrouterKey.trim() && !openrouterKey.startsWith(':')) {
48
+ available.push('openrouter');
49
+ }
50
+
51
+ return available;
52
+ }
53
+
54
+ getAutoProvider() {
55
+ const availableProviders = this.getAvailableProviders();
56
+ if (availableProviders.length === 0) {
57
+ throw new Error('No API keys found. Set OLLAMA_API_KEY, OPEN_ROUTER_API_KEY, or OPENAI_API_KEY environment variables.');
58
+ }
59
+
60
+ const randomIndex = Math.floor(Math.random() * availableProviders.length);
61
+ const providerName = availableProviders[randomIndex];
62
+ const provider = this.providers[providerName];
63
+
64
+ return {
65
+ provider,
66
+ model: provider.defaultModel
67
+ };
68
+ }
69
+
70
+ getProvider(modelString) {
71
+ const { provider: specifiedProvider, model } = this.parseModel(modelString);
72
+
73
+ if (specifiedProvider) {
74
+ const provider = this.providers[specifiedProvider];
75
+ if (!provider) {
76
+ throw new Error(`Unknown provider: ${specifiedProvider}`);
77
+ }
78
+ return { provider, model };
79
+ }
80
+
81
+ const availableProviders = this.getAvailableProviders();
82
+ if (availableProviders.length === 0) {
83
+ throw new Error('No API keys configured. Set OPENAI_API_KEY, OLLAMA_API_KEY, or OPEN_ROUTER_API_KEY environment variables.');
84
+ }
85
+
86
+ const providerName = this.defaultProvider && availableProviders.includes(this.defaultProvider)
87
+ ? this.defaultProvider
88
+ : availableProviders[0];
89
+ const provider = this.providers[providerName];
90
+
91
+ return {
92
+ provider,
93
+ model: model || provider.defaultModel
94
+ };
95
+ }
96
+
97
+ completion(input) {
98
+ return completion(input, {
99
+ defaultProvider: this.defaultProvider,
100
+ openai: { apiKey: this.providers.openai.apiKey, baseURL: this.providers.openai.baseURL, timeout: this.providers.openai.timeout },
101
+ ollama: { apiKey: this.providers.ollama.apiKey, baseURL: this.providers.ollama.baseURL, timeout: this.providers.ollama.timeout },
102
+ openrouter: { apiKey: this.providers.openrouter.apiKey, baseURL: this.providers.openrouter.baseURL, timeout: this.providers.openrouter.timeout }
103
+ });
104
+ }
105
+ }
106
+
107
+ module.exports = {
108
+ completion,
109
+ LLMJS2,
110
+ router,
111
+ app,
112
+ OpenAIProvider,
113
+ OllamaProvider,
114
+ OpenRouterProvider
115
+ };
@@ -8,14 +8,18 @@ class OllamaProvider {
8
8
  this.timeout = config.timeout || 120000; // 2 minutes (LLMs can be slow)
9
9
  }
10
10
 
11
- async makeRequest(data) {
12
- if (!this.apiKey) {
11
+ async makeRequest(data, requestOptions = {}) {
12
+ const apiKey = requestOptions.apiKey || this.apiKey;
13
+ const baseURL = requestOptions.baseURL || this.baseURL;
14
+ const timeout = requestOptions.timeout || this.timeout;
15
+
16
+ if (!apiKey) {
13
17
  throw new Error('Ollama API key is required. Set OLLAMA_API_KEY environment variable or pass apiKey in config.');
14
18
  }
15
19
 
16
20
  const postData = JSON.stringify(data);
17
21
 
18
- const parsedUrl = new URL(this.baseURL);
22
+ const parsedUrl = new URL(baseURL);
19
23
 
20
24
  const options = {
21
25
  hostname: parsedUrl.hostname,
@@ -24,7 +28,7 @@ class OllamaProvider {
24
28
  method: 'POST',
25
29
  headers: {
26
30
  'Content-Type': 'application/json',
27
- 'Authorization': `Bearer ${this.apiKey}`,
31
+ 'Authorization': `Bearer ${apiKey}`,
28
32
  'Content-Length': Buffer.byteLength(postData)
29
33
  }
30
34
  };
@@ -56,7 +60,7 @@ class OllamaProvider {
56
60
  reject(new Error(`Ollama request failed: ${error.message}`));
57
61
  });
58
62
 
59
- req.setTimeout(this.timeout, () => {
63
+ req.setTimeout(timeout, () => {
60
64
  req.destroy();
61
65
  reject(new Error('Ollama request timed out'));
62
66
  });
@@ -100,7 +104,11 @@ class OllamaProvider {
100
104
  });
101
105
  }
102
106
 
103
- const response = await this.makeRequest(data);
107
+ const response = await this.makeRequest(data, {
108
+ apiKey: options.apiKey,
109
+ baseURL: options.baseURL,
110
+ timeout: options.timeout
111
+ });
104
112
 
105
113
  return {
106
114
  content: response.message?.content || '',
@@ -8,12 +8,16 @@ class OpenAIProvider {
8
8
  this.timeout = config.timeout || 60000; // 60 seconds
9
9
  }
10
10
 
11
- async makeRequest(endpoint, data) {
12
- if (!this.apiKey) {
11
+ async makeRequest(endpoint, data, requestOptions = {}) {
12
+ const apiKey = requestOptions.apiKey || this.apiKey;
13
+ const baseURL = requestOptions.baseURL || this.baseURL;
14
+ const timeout = requestOptions.timeout || this.timeout;
15
+
16
+ if (!apiKey) {
13
17
  throw new Error('OpenAI API key is required. Set OPENAI_API_KEY environment variable or pass apiKey in config.');
14
18
  }
15
19
 
16
- const requestUrl = `${this.baseURL}${endpoint}`;
20
+ const requestUrl = `${baseURL}${endpoint}`;
17
21
  const postData = JSON.stringify(data);
18
22
 
19
23
  const parsedUrl = new URL(requestUrl);
@@ -25,7 +29,7 @@ class OpenAIProvider {
25
29
  method: 'POST',
26
30
  headers: {
27
31
  'Content-Type': 'application/json',
28
- 'Authorization': `Bearer ${this.apiKey}`,
32
+ 'Authorization': `Bearer ${apiKey}`,
29
33
  'Content-Length': Buffer.byteLength(postData)
30
34
  }
31
35
  };
@@ -57,7 +61,7 @@ class OpenAIProvider {
57
61
  reject(new Error(`OpenAI request failed: ${error.message}`));
58
62
  });
59
63
 
60
- req.setTimeout(this.timeout, () => {
64
+ req.setTimeout(timeout, () => {
61
65
  req.destroy();
62
66
  reject(new Error('OpenAI request timed out'));
63
67
  });
@@ -88,7 +92,11 @@ class OpenAIProvider {
88
92
  }
89
93
  });
90
94
 
91
- const response = await this.makeRequest('/chat/completions', data);
95
+ const response = await this.makeRequest('/chat/completions', data, {
96
+ apiKey: options.apiKey,
97
+ baseURL: options.baseURL,
98
+ timeout: options.timeout
99
+ });
92
100
 
93
101
  return {
94
102
  content: response.choices[0]?.message?.content || '',
@@ -0,0 +1,206 @@
1
+ const https = require('https');
2
+
3
+ function asNonEmptyString(value) {
4
+ if (typeof value !== 'string') {
5
+ return '';
6
+ }
7
+ return value.trim();
8
+ }
9
+
10
+ function firstNonEmptyString(...values) {
11
+ for (const value of values) {
12
+ const normalized = asNonEmptyString(value);
13
+ if (normalized) {
14
+ return normalized;
15
+ }
16
+ }
17
+ return '';
18
+ }
19
+
20
+ function isValidHttpUrl(value) {
21
+ const normalized = asNonEmptyString(value);
22
+ if (!normalized) {
23
+ return false;
24
+ }
25
+
26
+ try {
27
+ const parsed = new URL(normalized);
28
+ return parsed.protocol === 'https:' || parsed.protocol === 'http:';
29
+ } catch (_error) {
30
+ return false;
31
+ }
32
+ }
33
+
34
+ function normalizeOpenRouterModel(model) {
35
+ let normalized = asNonEmptyString(model);
36
+
37
+ if (!normalized) {
38
+ return '';
39
+ }
40
+
41
+ // Only inspect the first provider separator and preserve the rest of the path.
42
+ const firstSlashIndex = normalized.indexOf('/');
43
+ if (firstSlashIndex === -1) {
44
+ return `openrouter/${normalized}`;
45
+ }
46
+
47
+ const provider = normalized.substring(0, firstSlashIndex).trim().toLowerCase();
48
+ if (provider === 'openrouter') {
49
+ const remainder = normalized.substring(firstSlashIndex + 1).trim();
50
+
51
+ if (!remainder) {
52
+ return '';
53
+ }
54
+
55
+ // Keep "openrouter/free" and collapse "openrouter/openrouter/free" to "openrouter/free".
56
+ if (remainder.includes('/')) {
57
+ normalized = remainder;
58
+ } else {
59
+ normalized = `openrouter/${remainder}`;
60
+ }
61
+ }
62
+
63
+ return normalized;
64
+ }
65
+
66
+ class OpenRouterProvider {
67
+ constructor(config = {}) {
68
+ const configuredBaseURL = firstNonEmptyString(config.baseURL, process.env.OPEN_ROUTER_BASE_URL);
69
+ this.baseURL = isValidHttpUrl(configuredBaseURL)
70
+ ? configuredBaseURL
71
+ : 'https://openrouter.ai/api/v1/chat/completions';
72
+
73
+ this.apiKey = firstNonEmptyString(config.apiKey, process.env.OPEN_ROUTER_API_KEY);
74
+ const configuredDefaultModel = firstNonEmptyString(config.defaultModel, process.env.OPEN_ROUTER_DEFAULT_MODEL) || 'openrouter/openrouter/free';
75
+ this.defaultModel = configuredDefaultModel;
76
+ this.timeout = config.timeout || 60000; // 60 seconds
77
+ this.config = config; // Store entire config for additional properties like referer, title
78
+ }
79
+
80
+ async makeRequest(data, requestOptions = {}) {
81
+ const apiKey = firstNonEmptyString(requestOptions.apiKey, this.apiKey);
82
+ const baseURL = firstNonEmptyString(requestOptions.baseURL, this.baseURL);
83
+ const timeout = requestOptions.timeout || this.timeout;
84
+ const referer = firstNonEmptyString(requestOptions.referer, this.config.referer, process.env.OPEN_ROUTER_REFERER);
85
+ const title = firstNonEmptyString(requestOptions.title, this.config.title, process.env.OPEN_ROUTER_TITLE);
86
+
87
+ if (!apiKey) {
88
+ throw new Error('OpenRouter API key is required. Set OPEN_ROUTER_API_KEY environment variable or pass apiKey in config.');
89
+ }
90
+
91
+ if (!isValidHttpUrl(baseURL)) {
92
+ throw new Error('OpenRouter base URL is invalid. Set OPEN_ROUTER_BASE_URL to a valid http(s) URL.');
93
+ }
94
+
95
+ const postData = JSON.stringify(data);
96
+
97
+ const parsedUrl = new URL(baseURL);
98
+
99
+ const headers = {
100
+ 'Content-Type': 'application/json',
101
+ 'Authorization': `Bearer ${apiKey}`,
102
+ 'Content-Length': Buffer.byteLength(postData)
103
+ };
104
+
105
+ if (isValidHttpUrl(referer)) {
106
+ headers['HTTP-Referer'] = referer;
107
+ }
108
+
109
+ if (title) {
110
+ headers['X-Title'] = title;
111
+ }
112
+
113
+ const options = {
114
+ hostname: parsedUrl.hostname,
115
+ port: parsedUrl.port || 443,
116
+ path: parsedUrl.pathname + parsedUrl.search,
117
+ method: 'POST',
118
+ headers
119
+ };
120
+
121
+
122
+ return new Promise((resolve, reject) => {
123
+ const req = https.request(options, (res) => {
124
+ let body = '';
125
+
126
+ res.on('data', (chunk) => {
127
+ body += chunk;
128
+ });
129
+
130
+ res.on('end', () => {
131
+ try {
132
+ if (res.statusCode >= 200 && res.statusCode < 300) {
133
+ const response = JSON.parse(body);
134
+ resolve(response);
135
+ } else {
136
+ const error = JSON.parse(body);
137
+ reject(new Error(`OpenRouter API error (${res.statusCode}): ${error.error?.message || 'Unknown error'}`));
138
+ }
139
+ } catch (parseError) {
140
+ reject(new Error(`Failed to parse OpenRouter response: ${parseError.message}`));
141
+ }
142
+ });
143
+ });
144
+
145
+ req.on('error', (error) => {
146
+ reject(new Error(`OpenRouter request failed: ${error.message}`));
147
+ });
148
+
149
+ req.setTimeout(timeout, () => {
150
+ req.destroy();
151
+ reject(new Error('OpenRouter request timed out'));
152
+ });
153
+
154
+ req.write(postData);
155
+ req.end();
156
+ });
157
+ }
158
+
159
+ async createCompletion(messages, options = {}) {
160
+ const normalizedModel = normalizeOpenRouterModel(options.model || this.defaultModel);
161
+
162
+ if (!normalizedModel) {
163
+ throw new Error('OpenRouter model is invalid. Provide a non-empty model, for example "openrouter/free" or "meta-llama/llama-3.1-8b-instruct:free".');
164
+ }
165
+
166
+ const data = {
167
+ model: normalizedModel,
168
+ messages: messages,
169
+ temperature: options.temperature || 0.7,
170
+ max_tokens: options.maxTokens,
171
+ top_p: options.topP,
172
+ frequency_penalty: options.frequencyPenalty,
173
+ presence_penalty: options.presencePenalty,
174
+ stop: options.stop,
175
+ tools: options.tools,
176
+ tool_choice: options.toolChoice,
177
+ transforms: options.transforms,
178
+ models: options.models,
179
+ route: options.route
180
+ };
181
+
182
+ // Remove undefined values
183
+ Object.keys(data).forEach(key => {
184
+ if (data[key] === undefined) {
185
+ delete data[key];
186
+ }
187
+ });
188
+
189
+ const response = await this.makeRequest(data, {
190
+ apiKey: options.apiKey,
191
+ baseURL: options.baseURL,
192
+ timeout: options.timeout
193
+ });
194
+
195
+ return {
196
+ content: response.choices[0]?.message?.content || '',
197
+ role: response.choices[0]?.message?.role || 'assistant',
198
+ tool_calls: response.choices[0]?.message?.tool_calls,
199
+ usage: response.usage,
200
+ model: response.model,
201
+ finishReason: response.choices[0]?.finish_reason
202
+ };
203
+ }
204
+ }
205
+
206
+ module.exports = OpenRouterProvider;
package/core/router.js ADDED
@@ -0,0 +1,252 @@
1
+ const logger = require('./logger');
2
+ const { completion } = require('./completion');
3
+
4
+ function normalizeStrategy(strategy) {
5
+ const supportedStrategies = new Set(['default', 'random', 'sequential']);
6
+ return supportedStrategies.has(strategy) ? strategy : 'default';
7
+ }
8
+
9
+ function resolveApiKey(apiKey) {
10
+ if (typeof apiKey === 'string' && apiKey.startsWith('os.environ/')) {
11
+ const envVar = apiKey.replace('os.environ/', '');
12
+ return process.env[envVar] || apiKey;
13
+ }
14
+ return apiKey;
15
+ }
16
+
17
+ function normalizeModelList(modelList) {
18
+ if (!Array.isArray(modelList) || modelList.length === 0) {
19
+ throw new Error('Router requires a non-empty model list');
20
+ }
21
+
22
+ return modelList.map((model, index) => {
23
+ if (!model || typeof model !== 'object') {
24
+ throw new Error(`Model at index ${index} must be an object`);
25
+ }
26
+ if (!model.model_name) {
27
+ throw new Error(`Model at index ${index} is missing model_name`);
28
+ }
29
+ if (!model.llm_params || typeof model.llm_params !== 'object') {
30
+ throw new Error(`Model '${model.model_name}' is missing llm_params`);
31
+ }
32
+ if (!model.llm_params.model) {
33
+ throw new Error(`Model '${model.model_name}' is missing llm_params.model`);
34
+ }
35
+
36
+ return {
37
+ model_name: model.model_name,
38
+ llm_params: {
39
+ model: model.llm_params.model,
40
+ api_key: resolveApiKey(model.llm_params.api_key),
41
+ api_base: model.llm_params.api_base
42
+ }
43
+ };
44
+ });
45
+ }
46
+
47
+ function buildModelsByName(modelList) {
48
+ return modelList.reduce((accumulator, model) => {
49
+ if (!accumulator[model.model_name]) {
50
+ accumulator[model.model_name] = [];
51
+ }
52
+ accumulator[model.model_name].push(model);
53
+ return accumulator;
54
+ }, {});
55
+ }
56
+
57
+ function router(modelList, strategy = 'default', options = {}) {
58
+ const normalizedModelList = normalizeModelList(modelList);
59
+ const normalizedStrategy = normalizeStrategy(strategy);
60
+ const allowUnsafeGuardrails = Boolean(options.allowUnsafeGuardrails) || process.env.LLMJS2_ALLOW_UNSAFE_GUARDRAILS === 'true';
61
+ const guardrails = [];
62
+ const modelsByName = buildModelsByName(normalizedModelList);
63
+ let sequentialIndex = 0;
64
+
65
+ function getRandomItem(items) {
66
+ const randomIndex = Math.floor(Math.random() * items.length);
67
+ return items[randomIndex];
68
+ }
69
+
70
+ function getNextSequentialModel() {
71
+ const selectedModel = normalizedModelList[sequentialIndex];
72
+ sequentialIndex = (sequentialIndex + 1) % normalizedModelList.length;
73
+ return selectedModel;
74
+ }
75
+
76
+ function ensureModelsAvailable() {
77
+ if (normalizedModelList.length === 0) {
78
+ throw new Error('Router has no configured models');
79
+ }
80
+ }
81
+
82
+ function selectModel(modelName) {
83
+ ensureModelsAvailable();
84
+
85
+ if (!modelName) {
86
+ return autoSelectModel();
87
+ }
88
+
89
+ const availableModels = modelsByName[modelName];
90
+ if (!availableModels || availableModels.length === 0) {
91
+ throw new Error(`Model '${modelName}' not found in router configuration`);
92
+ }
93
+
94
+ if (availableModels.length === 1) {
95
+ return availableModels[0];
96
+ }
97
+
98
+ return getRandomItem(availableModels);
99
+ }
100
+
101
+ function autoSelectModel() {
102
+ ensureModelsAvailable();
103
+
104
+ switch (normalizedStrategy) {
105
+ case 'random':
106
+ return getRandomItem(normalizedModelList);
107
+ case 'sequential':
108
+ case 'default':
109
+ default:
110
+ return getNextSequentialModel();
111
+ }
112
+ }
113
+
114
+ function normalizeCompletionInput(input = {}) {
115
+ return {
116
+ modelName: input.model,
117
+ messages: input.messages || [],
118
+ stop: input.stop,
119
+ tools: input.tools,
120
+ toolChoice: input.tool_choice ?? input.toolChoice,
121
+ timeout: input.timeout,
122
+ apiKey: input.apiKey,
123
+ host: input.host || input.baseURL || input.baseUrl,
124
+ final: input.final ?? true
125
+ };
126
+ }
127
+
128
+ async function executeGuardrail(guardrail, processId, data) {
129
+ if (typeof guardrail.code === 'string') {
130
+ if (!allowUnsafeGuardrails) {
131
+ throw new Error(
132
+ `Guardrail '${guardrail.name}' uses string code, but unsafe execution is disabled. Set router option allowUnsafeGuardrails=true or LLMJS2_ALLOW_UNSAFE_GUARDRAILS=true to enable.`
133
+ );
134
+ }
135
+
136
+ const func = new Function('processId', 'data', `return (${guardrail.code})(processId, data)`);
137
+ return await func(processId, data);
138
+ }
139
+
140
+ if (typeof guardrail.code === 'function') {
141
+ return await guardrail.code(processId, data);
142
+ }
143
+
144
+ throw new Error(`Invalid guardrail code for '${guardrail.name}'`);
145
+ }
146
+
147
+ async function applyGuardrails(mode, processId, data) {
148
+ const modeLabel = mode === 'pre_call' ? 'Pre-call' : 'Post-call';
149
+ let currentValue = data;
150
+
151
+ for (const guardrail of guardrails) {
152
+ if (guardrail.mode !== mode) {
153
+ continue;
154
+ }
155
+
156
+ try {
157
+ const processed = await executeGuardrail(guardrail, processId, currentValue);
158
+ if (processed === null || processed === undefined) {
159
+ throw new Error(`Guardrail '${guardrail.name}' returned null/undefined`);
160
+ }
161
+ currentValue = processed;
162
+ } catch (error) {
163
+ throw new Error(`${modeLabel} guardrail '${guardrail.name}' failed: ${error.message}`);
164
+ }
165
+ }
166
+
167
+ return currentValue;
168
+ }
169
+
170
+ async function completionWithModel(modelConfig, input) {
171
+ return completion({
172
+ ...input,
173
+ model: modelConfig.llm_params.model,
174
+ apiKey: input.apiKey || modelConfig.llm_params.api_key,
175
+ host: input.host || modelConfig.llm_params.api_base,
176
+ final: false
177
+ });
178
+ }
179
+
180
+ function generateProcessId() {
181
+ return `req_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
182
+ }
183
+
184
+ return {
185
+ setGuardrails(nextGuardrails) {
186
+ guardrails.length = 0;
187
+ if (Array.isArray(nextGuardrails)) {
188
+ guardrails.push(...nextGuardrails);
189
+ }
190
+ },
191
+
192
+ addGuardrail(guardrail) {
193
+ guardrails.push(guardrail);
194
+ },
195
+
196
+ getAvailableModels() {
197
+ return Object.keys(modelsByName);
198
+ },
199
+
200
+ getModelStats() {
201
+ const stats = {};
202
+ Object.keys(modelsByName).forEach((modelName) => {
203
+ stats[modelName] = modelsByName[modelName].length;
204
+ });
205
+ return stats;
206
+ },
207
+
208
+ selectModel,
209
+
210
+ autoSelectModel,
211
+
212
+ executeGuardrail,
213
+
214
+ async completion(input = {}) {
215
+ const processId = generateProcessId();
216
+
217
+ try {
218
+ const normalizedInput = normalizeCompletionInput(input);
219
+ const selectedModel = selectModel(normalizedInput.modelName);
220
+
221
+ logger.debug('Selected model', {
222
+ processId,
223
+ model: selectedModel.llm_params.model,
224
+ modelName: selectedModel.model_name
225
+ });
226
+
227
+ const { modelName, ...completionInput } = normalizedInput;
228
+ const preparedInput = {
229
+ model: selectedModel.llm_params.model,
230
+ ...completionInput
231
+ };
232
+
233
+ const processedInput = await applyGuardrails('pre_call', processId, preparedInput);
234
+ const result = await completionWithModel(selectedModel, processedInput);
235
+ const finalResult = await applyGuardrails('post_call', processId, result);
236
+
237
+ return {
238
+ result: finalResult,
239
+ selectedModel: selectedModel.llm_params.model,
240
+ selectedModelName: selectedModel.model_name
241
+ };
242
+ } catch (error) {
243
+ logger.error('Router error', { processId, error: error.message });
244
+ throw error;
245
+ }
246
+ }
247
+ };
248
+ }
249
+
250
+ module.exports = {
251
+ router
252
+ };