llmjs2 1.3.7 → 1.3.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +10 -6
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -138,7 +138,7 @@ class LLMJS2 {
138
138
  return {
139
139
  model: null,
140
140
  messages: [{ role: 'user', content: input }],
141
- options: {}
141
+ options: { final: true }
142
142
  };
143
143
  }
144
144
 
@@ -152,13 +152,15 @@ class LLMJS2 {
152
152
  throw new Error('messages array cannot be empty');
153
153
  }
154
154
 
155
+
156
+
155
157
  // Validate message format
156
158
  for (const msg of input.messages) {
157
159
  if (!msg.role || !msg.content) {
158
160
  throw new Error('Each message must have role and content properties');
159
161
  }
160
- if (!['system', 'user', 'assistant'].includes(msg.role)) {
161
- throw new Error('Message role must be system, user, or assistant');
162
+ if (!['system', 'user', 'assistant', 'tool'].includes(msg.role)) {
163
+ throw new Error('Message role must be system, user, tool or assistant');
162
164
  }
163
165
  }
164
166
 
@@ -175,7 +177,8 @@ class LLMJS2 {
175
177
  tools: input.tools,
176
178
  toolChoice: input.tool_choice || input.toolChoice,
177
179
  apiKey: input.apiKey,
178
- timeout: input.timeout
180
+ timeout: input.timeout,
181
+ final: input.final ?? true
179
182
  }
180
183
  };
181
184
  }
@@ -189,6 +192,7 @@ class LLMJS2 {
189
192
  async completion(input) {
190
193
  try {
191
194
  const { model, messages, options } = this.validateInput(input);
195
+ const { final, ...providerOptions } = options;
192
196
 
193
197
  let provider, finalModel;
194
198
 
@@ -224,7 +228,7 @@ class LLMJS2 {
224
228
  messages: messages
225
229
  });
226
230
 
227
- const result = await provider.createCompletion(messages, { ...options, model: finalModel });
231
+ const result = await provider.createCompletion(messages, { ...providerOptions, model: finalModel });
228
232
 
229
233
  // Log response information
230
234
  logger.info('LLMJS2 📥 Received from LLM provider', {
@@ -232,7 +236,7 @@ class LLMJS2 {
232
236
  ...result
233
237
  });
234
238
 
235
- return result.content;
239
+ return final ? result.content : result;
236
240
 
237
241
  } catch (error) {
238
242
  // Sanitize error message to avoid leaking sensitive information
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "llmjs2",
3
- "version": "1.3.7",
3
+ "version": "1.3.9",
4
4
  "description": "A unified Node.js library for connecting to multiple LLM providers: OpenAI, Ollama, and OpenRouter",
5
5
  "main": "index.js",
6
6
  "type": "commonjs",