modelmix 4.0.6 → 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -118,6 +118,7 @@ Here's a comprehensive list of available methods:
118
118
 
119
119
  | Method | Provider | Model | Price (I/O) per 1 M tokens |
120
120
  | ------------------ | ---------- | ------------------------------ | -------------------------- |
121
+ | `gpt52()` | OpenAI | gpt-5.2 | [\$1.75 / \$14.00][1] |
121
122
  | `gpt51()` | OpenAI | gpt-5.1 | [\$1.25 / \$10.00][1] |
122
123
  | `gpt5()` | OpenAI | gpt-5 | [\$1.25 / \$10.00][1] |
123
124
  | `gpt5mini()` | OpenAI | gpt-5-mini | [\$0.25 / \$2.00][1] |
@@ -125,7 +126,6 @@ Here's a comprehensive list of available methods:
125
126
  | `gpt41()` | OpenAI | gpt-4.1 | [\$2.00 / \$8.00][1] |
126
127
  | `gpt41mini()` | OpenAI | gpt-4.1-mini | [\$0.40 / \$1.60][1] |
127
128
  | `gpt41nano()` | OpenAI | gpt-4.1-nano | [\$0.10 / \$0.40][1] |
128
- | `o3()` | OpenAI | o3 | [\$10.00 / \$40.00][1] |
129
129
  | `gptOss()` | Together | gpt-oss-120B | [\$0.15 / \$0.60][7] |
130
130
  | `opus45[think]()` | Anthropic | claude-opus-4-5-20251101 | [\$5.00 / \$25.00][2] |
131
131
  | `opus41[think]()` | Anthropic | claude-opus-4-1-20250805 | [\$15.00 / \$75.00][2] |
@@ -149,7 +149,7 @@ Here's a comprehensive list of available methods:
149
149
  | `kimiK2()` | Together | Kimi-K2-Instruct | [\$1.00 / \$3.00][7] |
150
150
  | `kimiK2think()` | Together | moonshotai/Kimi-K2-Thinking | [\$1.20 / \$4.00][7] |
151
151
 
152
- [1]: https://openai.com/api/pricing/ "Pricing | OpenAI"
152
+ [1]: https://platform.openai.com/docs/pricing "Pricing | OpenAI"
153
153
  [2]: https://docs.anthropic.com/en/docs/about-claude/pricing "Pricing - Anthropic"
154
154
  [3]: https://ai.google.dev/gemini-api/docs/pricing "Google AI for Developers"
155
155
  [4]: https://docs.perplexity.ai/guides/pricing "Pricing - Perplexity"
package/demo/demo.js CHANGED
@@ -1,5 +1,5 @@
1
- process.loadEnvFile();
2
- import { ModelMix, MixOpenAI, MixAnthropic, MixPerplexity, MixOllama } from '../index.js';
1
+ import 'dotenv/config';
2
+ import { ModelMix } from '../index.js';
3
3
 
4
4
 
5
5
  const mmix = new ModelMix({
@@ -7,7 +7,7 @@ const mmix = new ModelMix({
7
7
  temperature: 0.5,
8
8
  },
9
9
  config: {
10
- // system: 'You are {name} from Melmac.',
10
+ system: 'You are {name} from Melmac.',
11
11
  max_history: 2,
12
12
  bottleneck: { maxConcurrent: 1 },
13
13
  debug: true,
@@ -26,25 +26,19 @@ const pplxSettings = {
26
26
 
27
27
  mmix.replace({ '{name}': 'ALF' });
28
28
 
29
- console.log("\n" + '--------| gpt5nano() |--------');
30
- const gpt = mmix.gpt5nano({ options: { temperature: 0 } }).addText("Have you ever eaten a {animal}?");
29
+ console.log("\n" + '--------| gpt51() |--------');
30
+ const opt = { reasoning_effort: 'none', verbosity: 'low' };
31
+ const gpt = mmix.gpt51(opt).addText("Have you ever eaten a {animal}?");
31
32
  gpt.replace({ '{animal}': 'cat' });
32
- console.log(await gpt.json({ time: '24:00:00', message: 'Hello' }, { time: 'Time in format HH:MM:SS' }));
33
+ await gpt.json({ time: '24:00:00', message: 'Hello' }, { time: 'Time in format HH:MM:SS' });
33
34
 
34
- console.log("\n" + '--------| sonnet4() |--------');
35
- const claude = mmix.new({ config: { debug: true } }).sonnet4();
35
+ console.log("\n" + '--------| sonnet45() |--------');
36
+ const claude = mmix.new({ config: { debug: true } }).sonnet45();
36
37
  claude.addImageFromUrl('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAFUlEQVR42mP8z8BQz0AEYBxVSF+FABJADveWkH6oAAAAAElFTkSuQmCC');
37
38
  claude.addText('in one word, which is the main color of the image?');
38
39
  const imageDescription = await claude.message();
39
40
  console.log(imageDescription);
40
41
 
41
- console.log("\n" + '--------| claude-3-7-sonnet-20250219 |--------');
42
- const writer = ModelMix.new().attach('claude-3-7-sonnet-20250219', new MixAnthropic());
43
- writer.setSystem('You are a writer like Stephen King');
44
- writer.replaceKeyFromFile('{story_title}', './title.md');
45
- const story = await writer.addTextFromFile('./prompt.md').message();
46
- console.log(story);
47
-
48
42
  console.log("\n" + '--------| sonar |--------');
49
43
  const pplx = ModelMix.new().sonar(pplxSettings);
50
44
  pplx.addText('How much is ETH trading in USD?');
package/demo/gemini.js ADDED
@@ -0,0 +1,41 @@
1
+ process.loadEnvFile();
2
+
3
+ import { ModelMix, MixGoogle } from '../index.js';
4
+ const mmix = new ModelMix({
5
+ options: {
6
+ max_tokens: 2000,
7
+ },
8
+ config: {
9
+ system: 'You are ALF from Melmac.',
10
+ max_history: 2,
11
+ debug: false
12
+ }
13
+ });
14
+
15
+ // Using gemini25flash (Gemini 2.5 Flash) with built-in method
16
+ console.log("\n" + '--------| gemini25flash() |--------');
17
+ const flash = await mmix.gemini25flash()
18
+ .addText('Hi there! Do you like cats?')
19
+ .message();
20
+
21
+ console.log(flash);
22
+
23
+ // Using gemini3pro (Gemini 3 Pro) with custom config
24
+ console.log("\n" + '--------| gemini3pro() with JSON response |--------');
25
+ const pro = mmix.new().gemini3pro();
26
+
27
+ pro.addText('Give me a fun fact about cats');
28
+ const jsonResponse = await pro.json({
29
+ fact: 'A fun fact about cats',
30
+ category: 'animal behavior'
31
+ });
32
+
33
+ console.log(jsonResponse);
34
+
35
+ // Using attach method with MixGoogle for custom model
36
+ console.log("\n" + '--------| Custom Gemini with attach() |--------');
37
+ mmix.attach('gemini-2.5-flash', new MixGoogle());
38
+
39
+ const custom = await mmix.addText('Tell me a short joke about cats.').message();
40
+ console.log(custom);
41
+
package/demo/package.json CHANGED
@@ -11,6 +11,7 @@
11
11
  "license": "ISC",
12
12
  "dependencies": {
13
13
  "@anthropic-ai/sdk": "^0.20.9",
14
+ "dotenv": "^17.2.3",
14
15
  "lemonlog": "^1.1.4"
15
16
  }
16
17
  }
package/index.js CHANGED
@@ -1,6 +1,7 @@
1
1
  const axios = require('axios');
2
2
  const fs = require('fs');
3
3
  const { fromBuffer } = require('file-type');
4
+ const { inspect } = require('util');
4
5
  const log = require('lemonlog')('ModelMix');
5
6
  const Bottleneck = require('bottleneck');
6
7
  const path = require('path');
@@ -46,7 +47,7 @@ class ModelMix {
46
47
  replace(keyValues) {
47
48
  this.config.replace = { ...this.config.replace, ...keyValues };
48
49
  return this;
49
- }
50
+ }
50
51
 
51
52
  static new({ options = {}, config = {} } = {}) {
52
53
  return new ModelMix({ options, config });
@@ -56,6 +57,26 @@ class ModelMix {
56
57
  return new ModelMix({ options: this.options, config: this.config });
57
58
  }
58
59
 
60
+ static formatJSON(obj) {
61
+ return inspect(obj, {
62
+ depth: null,
63
+ colors: true,
64
+ maxArrayLength: null,
65
+ breakLength: 80,
66
+ compact: false
67
+ });
68
+ }
69
+
70
+ static formatMessage(message) {
71
+ if (typeof message !== 'string') return message;
72
+
73
+ try {
74
+ return ModelMix.formatJSON(JSON.parse(message.trim()));
75
+ } catch (e) {
76
+ return message;
77
+ }
78
+ }
79
+
59
80
  attach(key, provider) {
60
81
 
61
82
  if (this.models.some(model => model.key === key)) {
@@ -102,7 +123,13 @@ class ModelMix {
102
123
  }
103
124
  gpt51({ options = {}, config = {} } = {}) {
104
125
  return this.attach('gpt-5.1', new MixOpenAI({ options, config }));
105
- }
126
+ }
127
+ gpt52({ options = {}, config = {} } = {}) {
128
+ return this.attach('gpt-5.2', new MixOpenAI({ options, config }));
129
+ }
130
+ gpt52chat({ options = {}, config = {} } = {}) {
131
+ return this.attach('gpt-5.2-chat-latest', new MixOpenAI({ options, config }));
132
+ }
106
133
  gptOss({ options = {}, config = {}, mix = { together: false, cerebras: false, groq: true } } = {}) {
107
134
  if (mix.together) return this.attach('openai/gpt-oss-120b', new MixTogether({ options, config }));
108
135
  if (mix.cerebras) return this.attach('gpt-oss-120b', new MixCerebras({ options, config }));
@@ -391,8 +418,11 @@ class ModelMix {
391
418
  stream: false,
392
419
  }
393
420
 
421
+ // Apply template replacements to system before adding extra instructions
422
+ let systemWithReplacements = this._template(this.config.system, this.config.replace);
423
+
394
424
  let config = {
395
- system: this.config.system,
425
+ system: systemWithReplacements,
396
426
  }
397
427
 
398
428
  if (schemaExample) {
@@ -418,8 +448,11 @@ class ModelMix {
418
448
  }
419
449
 
420
450
  async block({ addSystemExtra = true } = {}) {
451
+ // Apply template replacements to system before adding extra instructions
452
+ let systemWithReplacements = this._template(this.config.system, this.config.replace);
453
+
421
454
  let config = {
422
- system: this.config.system,
455
+ system: systemWithReplacements,
423
456
  }
424
457
 
425
458
  if (addSystemExtra) {
@@ -607,8 +640,24 @@ class ModelMix {
607
640
  }
608
641
 
609
642
  if (currentConfig.debug) {
610
- log.debug(`Request successful with model: ${currentModelKey}`);
611
- log.inspect(result.response);
643
+ console.log(`\nRequest successful: ${currentModelKey}`);
644
+
645
+ if (result.response) {
646
+ console.log('\nRAW RESPONSE:');
647
+ console.log(ModelMix.formatJSON(result.response));
648
+ }
649
+
650
+ if (result.message) {
651
+ console.log('\nMESSAGE:');
652
+ console.log(ModelMix.formatMessage(result.message));
653
+ }
654
+
655
+ if (result.think) {
656
+ console.log('\nTHINKING:');
657
+ console.log(result.think);
658
+ }
659
+
660
+ console.log('');
612
661
  }
613
662
 
614
663
  return result;
@@ -618,10 +667,10 @@ class ModelMix {
618
667
  log.warn(`Model ${currentModelKey} failed (Attempt #${i + 1}/${this.models.length}).`);
619
668
  if (error.message) log.warn(`Error: ${error.message}`);
620
669
  if (error.statusCode) log.warn(`Status Code: ${error.statusCode}`);
621
- if (error.details) log.warn(`Details: ${JSON.stringify(error.details)}`);
670
+ if (error.details) log.warn(`Details:\n${ModelMix.formatJSON(error.details)}`);
622
671
 
623
672
  if (i === this.models.length - 1) {
624
- log.error(`All ${this.models.length} model(s) failed. Throwing last error from ${currentModelKey}.`);
673
+ console.error(`All ${this.models.length} model(s) failed. Throwing last error from ${currentModelKey}.`);
625
674
  throw lastError;
626
675
  } else {
627
676
  const nextModelKey = this.models[i + 1].key;
@@ -656,13 +705,13 @@ class ModelMix {
656
705
  toolArgs = toolCall.input || toolCall.arguments || {};
657
706
  toolId = toolCall.id;
658
707
  } else {
659
- console.error('Unknown tool call format:', JSON.stringify(toolCall, null, 2));
708
+ log.error('Unknown tool call format:\n', toolCall);
660
709
  continue;
661
710
  }
662
711
 
663
712
  // Validar que tenemos los datos necesarios
664
713
  if (!toolName) {
665
- console.error('Tool call missing name:', JSON.stringify(toolCall, null, 2));
714
+ log.error('Tool call missing name:\n', toolCall);
666
715
  continue;
667
716
  }
668
717
 
@@ -839,10 +888,15 @@ class MixCustom {
839
888
  options.messages = this.convertMessages(options.messages, config);
840
889
 
841
890
  if (config.debug) {
842
- log.debug("config");
843
- log.info(config);
844
- log.debug("options");
845
- log.inspect(options);
891
+ console.log('\nREQUEST:');
892
+
893
+ console.log('\nCONFIG:');
894
+ const configToLog = { ...config };
895
+ delete configToLog.debug;
896
+ console.log(ModelMix.formatJSON(configToLog));
897
+
898
+ console.log('\nOPTIONS:');
899
+ console.log(ModelMix.formatJSON(options));
846
900
  }
847
901
 
848
902
  if (options.stream) {
@@ -1114,7 +1168,7 @@ class MixAnthropic extends MixCustom {
1114
1168
  } catch (error) {
1115
1169
  // Log the error details for debugging
1116
1170
  if (error.response && error.response.data) {
1117
- console.error('Anthropic API Error:', JSON.stringify(error.response.data, null, 2));
1171
+ log.error('Anthropic API Error:\n', error.response.data);
1118
1172
  }
1119
1173
  throw error;
1120
1174
  }
@@ -1630,10 +1684,15 @@ class MixGoogle extends MixCustom {
1630
1684
 
1631
1685
  try {
1632
1686
  if (config.debug) {
1633
- log.debug("config");
1634
- log.info(config);
1635
- log.debug("payload");
1636
- log.inspect(payload);
1687
+ console.log('\nREQUEST (GOOGLE):');
1688
+
1689
+ console.log('\nCONFIG:');
1690
+ const configToLog = { ...config };
1691
+ delete configToLog.debug;
1692
+ console.log(ModelMix.formatJSON(configToLog));
1693
+
1694
+ console.log('\nPAYLOAD:');
1695
+ console.log(ModelMix.formatJSON(payload));
1637
1696
  }
1638
1697
 
1639
1698
  if (options.stream) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modelmix",
3
- "version": "4.0.6",
3
+ "version": "4.1.0",
4
4
  "description": "🧬 ModelMix - Unified API for Diverse AI LLM.",
5
5
  "main": "index.js",
6
6
  "repository": {