modelmix 2.8.8 → 2.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -8,6 +8,7 @@
8
8
  - **Request Rate Control**: Manage the rate of requests to adhere to provider limitations using Bottleneck.
9
9
  - **Flexible Integration**: Easily integrate popular models like OpenAI, Anthropic, Perplexity, Groq, Together AI, Ollama, LM Studio or custom models.
10
10
  - **History Tracking**: Automatically logs the conversation history with model responses, allowing you to limit the number of historical messages with `max_history`.
11
+ - **Model Fallbacks**: Automatically try different models if one fails or is unavailable.
11
12
 
12
13
  ## 📦 Installation
13
14
 
@@ -82,6 +83,21 @@ Here's a quick example to get you started:
82
83
 
83
84
  3. **Generate responses from different models**:
84
85
 
86
+ #### Model Fallbacks
87
+ ```javascript
88
+ // Create a message handler with multiple fallback models
89
+ const handler = mmix.create(['grok-2-latest', 'claude-3-7-sonnet-20250219']);
90
+
91
+ // If the first model (grok-2-latest) fails or is unavailable,
92
+ // ModelMix will automatically try the next model (claude-3-7-sonnet)
93
+ const response = await handler.addText('do you like cats?').message();
94
+ ```
95
+
96
+ The `create()` method accepts either a single model name as a string or an array of model names. When an array is provided, ModelMix will attempt to use each model in order until a successful response is received. This is useful for:
97
+ - Implementing fallback options when a primary model is unavailable
98
+ - Load balancing across different providers
99
+ - Ensuring high availability in your application
100
+
85
101
  #### gpt-4o-mini
86
102
  ```javascript
87
103
  const gpt = mmix.create('gpt-4o-mini', { options: { temperature: 0 } });
@@ -247,7 +263,7 @@ new ModelMix(args = { options: {}, config: {} })
247
263
  - ...(Additional default options can be added as needed)
248
264
  - **config**: This object contains configuration settings that control the behavior of the `ModelMix` instance. These settings can also be overridden for specific model instances. Examples of configuration settings include:
249
265
  - `system`: Sets the default system message for the model, e.g., "You are an assistant."
250
- - `max_history`: Limits the number of historical messages to retain, e.g., 5.
266
+ - `max_history`: Limits the number of historical messages to retain, e.g., 1.
251
267
  - `bottleneck`: Configures the rate limiting behavior using Bottleneck. For example:
252
268
  - `maxConcurrent`: Maximum number of concurrent requests
253
269
  - `minTime`: Minimum time between requests (in ms)
package/demo/default.env CHANGED
@@ -1,4 +1,6 @@
1
1
  ANTHROPIC_API_KEY=""
2
2
  OPENAI_API_KEY=""
3
3
  PPLX_API_KEY=""
4
- GROQ_API_KEY=""
4
+ GROQ_API_KEY=""
5
+ TOGETHER_API_KEY=""
6
+ XAI_API_KEY=""
package/demo/grok.mjs ADDED
@@ -0,0 +1,19 @@
1
+ import 'dotenv/config'
2
+
3
+ import { ModelMix, MixGrok, MixAnthropic } from '../index.js';
4
+
5
+ const mmix = new ModelMix({
6
+ options: {
7
+ max_tokens: 2000,
8
+ },
9
+ config: {
10
+ system: 'You are ALF from Melmac.',
11
+ max_history: 2
12
+ }
13
+ });
14
+
15
+ mmix.attach(new MixGrok());
16
+ mmix.attach(new MixAnthropic());
17
+
18
+ const r = await mmix.create(['grok-2-latest', 'claude-3-7-sonnet-20250219']).addText('do you like cats?').message();
19
+ console.log(r)
package/index.js CHANGED
@@ -23,7 +23,7 @@ class ModelMix {
23
23
 
24
24
  this.config = {
25
25
  system: 'You are an assistant.',
26
- max_history: 5, // Default max history
26
+ max_history: 1, // Default max history
27
27
  debug: false,
28
28
  bottleneck: defaultBottleneckConfig,
29
29
  ...args.config
@@ -43,15 +43,31 @@ class ModelMix {
43
43
  return this;
44
44
  }
45
45
 
46
- create(modelKey, args = { config: {}, options: {} }) {
46
+ create(modelKeys, args = { config: {}, options: {} }) {
47
+ // If modelKeys is a string, convert it to an array for backwards compatibility
48
+ const modelArray = Array.isArray(modelKeys) ? modelKeys : [modelKeys];
49
+
50
+ if (modelArray.length === 0) {
51
+ throw new Error('No model keys provided');
52
+ }
53
+
54
+ // Verificar que todos los modelos estén disponibles
55
+ const unavailableModels = modelArray.filter(modelKey => {
56
+ return !Object.values(this.models).some(entry =>
57
+ entry.config.prefix.some(p => modelKey.startsWith(p))
58
+ );
59
+ });
60
+
61
+ if (unavailableModels.length > 0) {
62
+ throw new Error(`The following models are not available: ${unavailableModels.join(', ')}`);
63
+ }
64
+
65
+ // Una vez verificado que todos están disponibles, obtener el primer modelo
66
+ const modelKey = modelArray[0];
47
67
  const modelEntry = Object.values(this.models).find(entry =>
48
68
  entry.config.prefix.some(p => modelKey.startsWith(p))
49
69
  );
50
70
 
51
- if (!modelEntry) {
52
- throw new Error(`Model with prefix matching ${modelKey} is not attached.`);
53
- }
54
-
55
71
  const options = {
56
72
  ...this.defaultOptions,
57
73
  ...modelEntry.options,
@@ -65,7 +81,8 @@ class ModelMix {
65
81
  ...args.config
66
82
  };
67
83
 
68
- return new MessageHandler(this, modelEntry, options, config);
84
+ // Pass remaining models array for fallback
85
+ return new MessageHandler(this, modelEntry, options, config, modelArray.slice(1));
69
86
  }
70
87
 
71
88
  setSystem(text) {
@@ -81,7 +98,7 @@ class ModelMix {
81
98
 
82
99
  readFile(filePath, options = { encoding: 'utf8' }) {
83
100
  try {
84
- const absolutePath = path.resolve(process.cwd(), filePath);
101
+ const absolutePath = path.resolve(filePath);
85
102
  return fs.readFileSync(absolutePath, options);
86
103
  } catch (error) {
87
104
  if (error.code === 'ENOENT') {
@@ -96,13 +113,13 @@ class ModelMix {
96
113
  }
97
114
 
98
115
  class MessageHandler {
99
- constructor(mix, modelEntry, options, config) {
116
+ constructor(mix, modelEntry, options, config, fallbackModels = []) {
100
117
  this.mix = mix;
101
118
  this.modelEntry = modelEntry;
102
119
  this.options = options;
103
120
  this.config = config;
104
121
  this.messages = [];
105
-
122
+ this.fallbackModels = fallbackModels;
106
123
  this.imagesToProcess = [];
107
124
  }
108
125
 
@@ -276,22 +293,42 @@ class MessageHandler {
276
293
 
277
294
  async execute() {
278
295
  return this.mix.limiter.schedule(async () => {
279
- await this.processImageUrls();
280
-
281
- this.applyTemplate();
282
- this.messages = this.messages.slice(-this.config.max_history);
283
- this.messages = this.groupByRoles(this.messages);
296
+ try {
297
+ await this.processImageUrls();
298
+ this.applyTemplate();
299
+ this.messages = this.messages.slice(-this.config.max_history);
300
+ this.messages = this.groupByRoles(this.messages);
284
301
 
285
- if (this.messages.length === 0) {
286
- throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
287
- }
302
+ if (this.messages.length === 0) {
303
+ throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
304
+ }
288
305
 
289
- this.options.messages = this.messages;
306
+ this.options.messages = this.messages;
290
307
 
291
- try {
292
- const result = await this.modelEntry.create({ options: this.options, config: this.config });
293
- this.messages.push({ role: "assistant", content: result.message });
294
- return result;
308
+ try {
309
+ const result = await this.modelEntry.create({ options: this.options, config: this.config });
310
+ this.messages.push({ role: "assistant", content: result.message });
311
+ return result;
312
+ } catch (error) {
313
+ // If there are fallback models available, try the next one
314
+ if (this.fallbackModels.length > 0) {
315
+ const nextModelKey = this.fallbackModels[0];
316
+ log.warn(`Model ${this.options.model} failed, trying fallback model ${nextModelKey}...`);
317
+
318
+ // Create new handler with remaining fallback models
319
+ const nextHandler = this.mix.create(nextModelKey, {
320
+ options: this.options,
321
+ config: this.config
322
+ });
323
+
324
+ // Copy current messages to new handler
325
+ nextHandler.messages = [...this.messages];
326
+
327
+ // Try with next model
328
+ return nextHandler.execute();
329
+ }
330
+ throw error;
331
+ }
295
332
  } catch (error) {
296
333
  throw error;
297
334
  }
@@ -581,6 +618,17 @@ class MixOllama extends MixCustom {
581
618
  }
582
619
  }
583
620
 
621
+ class MixGrok extends MixOpenAI {
622
+ getDefaultConfig(customConfig) {
623
+ return super.getDefaultConfig({
624
+ url: 'https://api.x.ai/v1/chat/completions',
625
+ prefix: ['grok'],
626
+ apiKey: process.env.XAI_API_KEY,
627
+ ...customConfig
628
+ });
629
+ }
630
+ }
631
+
584
632
  class MixLMStudio extends MixCustom {
585
633
  getDefaultConfig(customConfig) {
586
634
  return super.getDefaultConfig({
@@ -655,4 +703,4 @@ class MixTogether extends MixCustom {
655
703
  }
656
704
  }
657
705
 
658
- module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio, MixGroq, MixTogether };
706
+ module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio, MixGroq, MixTogether, MixGrok };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modelmix",
3
- "version": "2.8.8",
3
+ "version": "2.9.2",
4
4
  "description": "🧬 ModelMix - Unified API for Diverse AI LLM.",
5
5
  "main": "index.js",
6
6
  "repository": {
@@ -15,6 +15,7 @@
15
15
  "anthropic",
16
16
  "agent",
17
17
  "perplexity",
18
+ "grok",
18
19
  "sonnet-3",
19
20
  "gpt",
20
21
  "claude",
@@ -28,6 +29,7 @@
28
29
  "together",
29
30
  "o1",
30
31
  "deepseek",
32
+ "fallback",
31
33
  "o3",
32
34
  "o3-mini",
33
35
  "nousresearch",