modelmix 2.0.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -1
- package/demo/demo.mjs +0 -1
- package/demo/lmstudio.mjs +30 -0
- package/demo/stream.mjs +0 -10
- package/index.js +47 -22
- package/package.json +3 -2
package/README.md
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
|
|
7
7
|
- **Unified Interface**: Interact with multiple AI models through a single, coherent API.
|
|
8
8
|
- **Request Control**: Manage the number of parallel requests to adhere to provider limitations (`max_request`).
|
|
9
|
-
- **Flexible Integration**: Easily integrate popular models like OpenAI, Anthropic, Perplexity, Ollama, or custom models.
|
|
9
|
+
- **Flexible Integration**: Easily integrate popular models like OpenAI, Anthropic, Perplexity, Ollama, LM Studio or custom models.
|
|
10
10
|
- **History Tracking**: Automatically logs the conversation history with model responses, allowing you to limit the number of historical messages with `max_history`.
|
|
11
11
|
|
|
12
12
|
## 📦 Installation
|
|
@@ -192,6 +192,17 @@ new MixPerplexity(args = { config: {}, options: {} })
|
|
|
192
192
|
new MixOllama(args = { config: {}, options: {} })
|
|
193
193
|
```
|
|
194
194
|
|
|
195
|
+
- **args**: Configuration object with `config` and `options` properties.
|
|
196
|
+
- **config**: Specific configuration settings for Ollama.
|
|
197
|
+
- `url`: The endpoint URL to which the model sends requests.
|
|
198
|
+
- **options**: Default options for Ollama model instances.
|
|
199
|
+
|
|
200
|
+
### MixLMStudio Class Overview
|
|
201
|
+
|
|
202
|
+
```javascript
|
|
203
|
+
new MixLMStudio(args = { config: {}, options: {} })
|
|
204
|
+
```
|
|
205
|
+
|
|
195
206
|
- **args**: Configuration object with `config` and `options` properties.
|
|
196
207
|
- **config**: Specific configuration settings for Ollama.
|
|
197
208
|
- `url`: The endpoint URL to which the model sends requests.
|
package/demo/demo.mjs
CHANGED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { ModelMix, MixLMStudio } from '../index.js';
|
|
2
|
+
|
|
3
|
+
const mmix = new ModelMix({
|
|
4
|
+
options: {
|
|
5
|
+
max_tokens: -1,
|
|
6
|
+
},
|
|
7
|
+
config: {
|
|
8
|
+
max_history: 2,
|
|
9
|
+
max_request: 1,
|
|
10
|
+
}
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
const model = new MixLMStudio({
|
|
14
|
+
config: {
|
|
15
|
+
prefix: ['Orenguteng'],
|
|
16
|
+
},
|
|
17
|
+
options: {
|
|
18
|
+
repeat_penalty: 1,
|
|
19
|
+
}
|
|
20
|
+
});
|
|
21
|
+
console.log(model.config)
|
|
22
|
+
|
|
23
|
+
mmix.attach(model);
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
const LMS = mmix.create('Orenguteng/Llama-3-8B-Lexi-Uncensored-GGUF');
|
|
27
|
+
console.log(await LMS
|
|
28
|
+
.addImage('./watson.png')
|
|
29
|
+
.addText('describir')
|
|
30
|
+
.message());
|
package/demo/stream.mjs
CHANGED
|
@@ -75,13 +75,3 @@ await mmix.create('openhermes2-mistral:latest')
|
|
|
75
75
|
.stream((data) => { console.log(data.message); });
|
|
76
76
|
|
|
77
77
|
console.log(r)
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
// await mmix.create('claude-3-haiku-20240307')
|
|
82
|
-
// // .addImage("./watson.png")
|
|
83
|
-
// .addText("hola!").stream((data) => {
|
|
84
|
-
// console.log("Streaming data:", data);
|
|
85
|
-
// });
|
|
86
|
-
|
|
87
|
-
// console.log(await gpt.raw());
|
package/index.js
CHANGED
|
@@ -25,6 +25,7 @@ class ModelMix {
|
|
|
25
25
|
this.models[key] = modelInstance;
|
|
26
26
|
modelInstance.queue = [];
|
|
27
27
|
modelInstance.active_requests = 0;
|
|
28
|
+
return this;
|
|
28
29
|
}
|
|
29
30
|
|
|
30
31
|
create(modelKey, overOptions = {}) {
|
|
@@ -276,20 +277,20 @@ class MixCustom {
|
|
|
276
277
|
|
|
277
278
|
class MixOpenAI extends MixCustom {
|
|
278
279
|
getDefaultConfig(customConfig) {
|
|
279
|
-
return {
|
|
280
|
-
...super.getDefaultConfig(customConfig),
|
|
280
|
+
return super.getDefaultConfig({
|
|
281
281
|
url: 'https://api.openai.com/v1/chat/completions',
|
|
282
|
-
prefix: ['gpt']
|
|
283
|
-
|
|
282
|
+
prefix: ['gpt'],
|
|
283
|
+
...customConfig
|
|
284
|
+
});
|
|
284
285
|
}
|
|
285
286
|
|
|
286
287
|
create(args = { config: {}, options: {} }) {
|
|
287
288
|
args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
|
|
288
|
-
args.options.messages =
|
|
289
|
+
args.options.messages = MixOpenAI.convertMessages(args.options.messages);
|
|
289
290
|
return super.create(args);
|
|
290
291
|
}
|
|
291
292
|
|
|
292
|
-
convertMessages(messages) {
|
|
293
|
+
static convertMessages(messages) {
|
|
293
294
|
return messages.map(message => {
|
|
294
295
|
if (message.role === 'user' && message.content instanceof Array) {
|
|
295
296
|
message.content = message.content.map(content => {
|
|
@@ -312,19 +313,19 @@ class MixOpenAI extends MixCustom {
|
|
|
312
313
|
|
|
313
314
|
class MixAnthropic extends MixCustom {
|
|
314
315
|
getDefaultConfig(customConfig) {
|
|
315
|
-
return {
|
|
316
|
-
...super.getDefaultConfig(customConfig),
|
|
316
|
+
return super.getDefaultConfig({
|
|
317
317
|
url: 'https://api.anthropic.com/v1/messages',
|
|
318
|
-
prefix: ['claude']
|
|
319
|
-
|
|
318
|
+
prefix: ['claude'],
|
|
319
|
+
...customConfig
|
|
320
|
+
});
|
|
320
321
|
}
|
|
321
322
|
|
|
322
|
-
getDefaultHeaders() {
|
|
323
|
-
return {
|
|
324
|
-
...super.getDefaultHeaders(),
|
|
323
|
+
getDefaultHeaders(getDefaultHeaders) {
|
|
324
|
+
return super.getDefaultHeaders({
|
|
325
325
|
'x-api-key': this.config.apiKey,
|
|
326
|
-
'anthropic-version': '2023-06-01'
|
|
327
|
-
|
|
326
|
+
'anthropic-version': '2023-06-01',
|
|
327
|
+
...getDefaultHeaders
|
|
328
|
+
});
|
|
328
329
|
}
|
|
329
330
|
|
|
330
331
|
extractDelta(data) {
|
|
@@ -339,11 +340,11 @@ class MixAnthropic extends MixCustom {
|
|
|
339
340
|
|
|
340
341
|
class MixPerplexity extends MixCustom {
|
|
341
342
|
getDefaultConfig(customConfig) {
|
|
342
|
-
return {
|
|
343
|
-
...super.getDefaultConfig(customConfig),
|
|
343
|
+
return super.getDefaultConfig({
|
|
344
344
|
url: 'https://api.perplexity.ai/chat/completions',
|
|
345
|
-
prefix: ['pplx', 'llama', 'mixtral']
|
|
346
|
-
|
|
345
|
+
prefix: ['pplx', 'llama', 'mixtral'],
|
|
346
|
+
...customConfig
|
|
347
|
+
});
|
|
347
348
|
}
|
|
348
349
|
|
|
349
350
|
create(args = { config: {}, options: {} }) {
|
|
@@ -354,6 +355,13 @@ class MixPerplexity extends MixCustom {
|
|
|
354
355
|
|
|
355
356
|
class MixOllama extends MixCustom {
|
|
356
357
|
|
|
358
|
+
getDefaultConfig(customConfig) {
|
|
359
|
+
return super.getDefaultConfig({
|
|
360
|
+
url: 'http://localhost:11434/api/chat',
|
|
361
|
+
...customConfig
|
|
362
|
+
});
|
|
363
|
+
}
|
|
364
|
+
|
|
357
365
|
getDefaultOptions(customOptions) {
|
|
358
366
|
return {
|
|
359
367
|
options: customOptions,
|
|
@@ -367,7 +375,7 @@ class MixOllama extends MixCustom {
|
|
|
367
375
|
|
|
368
376
|
create(args = { config: {}, options: {} }) {
|
|
369
377
|
|
|
370
|
-
args.options.messages =
|
|
378
|
+
args.options.messages = MixOllama.convertMessages(args.options.messages);
|
|
371
379
|
args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
|
|
372
380
|
return super.create(args);
|
|
373
381
|
}
|
|
@@ -376,7 +384,7 @@ class MixOllama extends MixCustom {
|
|
|
376
384
|
return { response: response.data, message: response.data.message.content.trim() };
|
|
377
385
|
}
|
|
378
386
|
|
|
379
|
-
convertMessages(messages) {
|
|
387
|
+
static convertMessages(messages) {
|
|
380
388
|
return messages.map(entry => {
|
|
381
389
|
let content = '';
|
|
382
390
|
let images = [];
|
|
@@ -398,4 +406,21 @@ class MixOllama extends MixCustom {
|
|
|
398
406
|
}
|
|
399
407
|
}
|
|
400
408
|
|
|
401
|
-
|
|
409
|
+
class MixLMStudio extends MixCustom {
|
|
410
|
+
getDefaultConfig(customConfig) {
|
|
411
|
+
return super.getDefaultConfig({
|
|
412
|
+
url: 'http://localhost:1234/v1/chat/completions',
|
|
413
|
+
...customConfig
|
|
414
|
+
});
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
create(args = { config: {}, options: {} }) {
|
|
418
|
+
args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
|
|
419
|
+
args.options.messages = MixOpenAI.convertMessages(args.options.messages);
|
|
420
|
+
return super.create(args);
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "modelmix",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.2.0",
|
|
4
4
|
"description": "🧬 ModelMix - Unified API for Diverse AI Language Models.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"repository": {
|
|
@@ -24,7 +24,8 @@
|
|
|
24
24
|
"multimodal",
|
|
25
25
|
"omni",
|
|
26
26
|
"4o",
|
|
27
|
-
"ollama"
|
|
27
|
+
"ollama",
|
|
28
|
+
"lmstudio"
|
|
28
29
|
],
|
|
29
30
|
"author": "Martin Clasen",
|
|
30
31
|
"license": "MIT",
|