modelmix 2.9.4 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/demo/custom.mjs +3 -3
- package/demo/fallback.mjs +38 -0
- package/index.js +38 -12
- package/package.json +7 -5
package/demo/custom.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import 'dotenv/config'
|
|
2
2
|
|
|
3
|
-
import { ModelMix,
|
|
3
|
+
import { ModelMix, MixCerebras, MixTogether } from '../index.js';
|
|
4
4
|
|
|
5
5
|
const mmix = new ModelMix({
|
|
6
6
|
options: {
|
|
@@ -13,8 +13,8 @@ const mmix = new ModelMix({
|
|
|
13
13
|
}
|
|
14
14
|
});
|
|
15
15
|
|
|
16
|
-
mmix.attach(new
|
|
16
|
+
mmix.attach(new MixCerebras());
|
|
17
17
|
|
|
18
|
-
let r = mmix.create('
|
|
18
|
+
let r = mmix.create('llama-4-scout-17b-16e-instruct').addText('hi there');
|
|
19
19
|
r = await r.addText('do you like cats?').message();
|
|
20
20
|
console.log(r);
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { ModelMix, MixOpenAI, MixAnthropic, MixGrok } from '../index.js';
|
|
2
|
+
import dotenv from 'dotenv';
|
|
3
|
+
dotenv.config();
|
|
4
|
+
|
|
5
|
+
const mmix = new ModelMix({
|
|
6
|
+
config: {
|
|
7
|
+
max_history: 1,
|
|
8
|
+
debug: false,
|
|
9
|
+
bottleneck: {
|
|
10
|
+
minTime: 15000,
|
|
11
|
+
maxConcurrent: 1
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
options: {
|
|
15
|
+
max_tokens: 8192,
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
const an = new MixAnthropic();
|
|
19
|
+
an.config.url = 'fail';
|
|
20
|
+
mmix.attach(new MixOpenAI(), an, new MixGrok());
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
const modelOptionsRef = ['claude-3-5-sonnet-20241022', 'gpt-4.1-nano'];
|
|
24
|
+
|
|
25
|
+
async function main() {
|
|
26
|
+
const response = await generateThread(modelOptionsRef);
|
|
27
|
+
console.log(response);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function generateThread(modelOptionsRef) {
|
|
31
|
+
const model = mmix.create(modelOptionsRef, { options: { temperature: 0.5 } });
|
|
32
|
+
model.addText('hola, como estas?');
|
|
33
|
+
const response = await model.message();
|
|
34
|
+
|
|
35
|
+
return response.split('---').map(section => section.trim());
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
main();
|
package/index.js
CHANGED
|
@@ -293,20 +293,23 @@ class MessageHandler {
|
|
|
293
293
|
});
|
|
294
294
|
}
|
|
295
295
|
|
|
296
|
+
async prepareMessages() {
|
|
297
|
+
await this.processImageUrls();
|
|
298
|
+
this.applyTemplate();
|
|
299
|
+
this.messages = this.messages.slice(-this.config.max_history);
|
|
300
|
+
this.messages = this.groupByRoles(this.messages);
|
|
301
|
+
this.options.messages = this.messages;
|
|
302
|
+
}
|
|
303
|
+
|
|
296
304
|
async execute() {
|
|
297
305
|
return this.mix.limiter.schedule(async () => {
|
|
298
306
|
try {
|
|
299
|
-
await this.
|
|
300
|
-
this.applyTemplate();
|
|
301
|
-
this.messages = this.messages.slice(-this.config.max_history);
|
|
302
|
-
this.messages = this.groupByRoles(this.messages);
|
|
307
|
+
await this.prepareMessages();
|
|
303
308
|
|
|
304
309
|
if (this.messages.length === 0) {
|
|
305
310
|
throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
|
|
306
311
|
}
|
|
307
312
|
|
|
308
|
-
this.options.messages = this.messages;
|
|
309
|
-
|
|
310
313
|
try {
|
|
311
314
|
const result = await this.modelEntry.create({ options: this.options, config: this.config });
|
|
312
315
|
this.messages.push({ role: "assistant", content: result.message });
|
|
@@ -316,7 +319,8 @@ class MessageHandler {
|
|
|
316
319
|
if (this.fallbackModels.length > 0) {
|
|
317
320
|
const nextModelKey = this.fallbackModels[0];
|
|
318
321
|
log.warn(`Model ${this.options.model} failed, trying fallback model ${nextModelKey}...`);
|
|
319
|
-
|
|
322
|
+
error.details && log.warn(error.details);
|
|
323
|
+
|
|
320
324
|
// Create a completely new handler with the fallback model
|
|
321
325
|
const nextHandler = this.mix.create(
|
|
322
326
|
[nextModelKey, ...this.fallbackModels.slice(1)],
|
|
@@ -331,17 +335,23 @@ class MessageHandler {
|
|
|
331
335
|
}
|
|
332
336
|
);
|
|
333
337
|
|
|
334
|
-
//
|
|
338
|
+
// Assign all messages directly
|
|
335
339
|
nextHandler.messages = [...this.messages];
|
|
336
340
|
|
|
337
|
-
//
|
|
341
|
+
// Keep same system and replacements
|
|
338
342
|
nextHandler.setSystem(this.config.system);
|
|
339
343
|
if (this.config.replace) {
|
|
340
344
|
nextHandler.replace(this.config.replace);
|
|
341
345
|
}
|
|
342
346
|
|
|
343
|
-
|
|
344
|
-
|
|
347
|
+
await nextHandler.prepareMessages();
|
|
348
|
+
|
|
349
|
+
const result = await nextHandler.modelEntry.create({
|
|
350
|
+
options: nextHandler.options,
|
|
351
|
+
config: nextHandler.config
|
|
352
|
+
});
|
|
353
|
+
nextHandler.messages.push({ role: "assistant", content: result.message });
|
|
354
|
+
return result;
|
|
345
355
|
}
|
|
346
356
|
throw error;
|
|
347
357
|
}
|
|
@@ -719,4 +729,20 @@ class MixTogether extends MixCustom {
|
|
|
719
729
|
}
|
|
720
730
|
}
|
|
721
731
|
|
|
722
|
-
|
|
732
|
+
class MixCerebras extends MixCustom {
|
|
733
|
+
getDefaultConfig(customConfig) {
|
|
734
|
+
return super.getDefaultConfig({
|
|
735
|
+
url: 'https://api.cerebras.ai/v1/chat/completions',
|
|
736
|
+
prefix: ["llama"],
|
|
737
|
+
apiKey: process.env.CEREBRAS_API_KEY,
|
|
738
|
+
...customConfig
|
|
739
|
+
});
|
|
740
|
+
}
|
|
741
|
+
|
|
742
|
+
create(args = { config: {}, options: {} }) {
|
|
743
|
+
args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
|
|
744
|
+
return super.create(args);
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio, MixGroq, MixTogether, MixGrok, MixCerebras };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "modelmix",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "3.0.0",
|
|
4
4
|
"description": "🧬 ModelMix - Unified API for Diverse AI LLM.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"repository": {
|
|
@@ -27,15 +27,17 @@
|
|
|
27
27
|
"ollama",
|
|
28
28
|
"lmstudio",
|
|
29
29
|
"together",
|
|
30
|
-
"
|
|
30
|
+
"nano",
|
|
31
31
|
"deepseek",
|
|
32
|
-
"fallback",
|
|
33
32
|
"o3",
|
|
34
|
-
"
|
|
33
|
+
"4.1",
|
|
35
34
|
"nousresearch",
|
|
36
35
|
"reasoning",
|
|
37
36
|
"bottleneck",
|
|
38
37
|
"claude-3-7-sonnet",
|
|
38
|
+
"cerebras",
|
|
39
|
+
"scout",
|
|
40
|
+
"fallback",
|
|
39
41
|
"clasen"
|
|
40
42
|
],
|
|
41
43
|
"author": "Martin Clasen",
|
|
@@ -45,7 +47,7 @@
|
|
|
45
47
|
},
|
|
46
48
|
"homepage": "https://github.com/clasen/ModelMix#readme",
|
|
47
49
|
"dependencies": {
|
|
48
|
-
"axios": "^1.
|
|
50
|
+
"axios": "^1.8.4",
|
|
49
51
|
"bottleneck": "^2.19.5",
|
|
50
52
|
"lemonlog": "^1.1.2"
|
|
51
53
|
}
|