modelmix 3.4.0 → 3.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -5,11 +5,17 @@ const log = require('lemonlog')('ModelMix');
5
5
  const Bottleneck = require('bottleneck');
6
6
  const path = require('path');
7
7
  const generateJsonSchema = require('./schema');
8
+ const { Client } = require("@modelcontextprotocol/sdk/client/index.js");
9
+ const { StdioClientTransport } = require("@modelcontextprotocol/sdk/client/stdio.js");
8
10
 
9
11
  class ModelMix {
12
+
10
13
  constructor({ options = {}, config = {} } = {}) {
11
14
  this.models = [];
12
15
  this.messages = [];
16
+ this.tools = {};
17
+ this.toolClient = {};
18
+ this.mcp = {};
13
19
  this.options = {
14
20
  max_tokens: 5000,
15
21
  temperature: 1, // 1 --> More creative, 0 --> More deterministic.
@@ -85,16 +91,25 @@ class ModelMix {
85
91
  gpt45({ options = {}, config = {} } = {}) {
86
92
  return this.attach('gpt-4.5-preview', new MixOpenAI({ options, config }));
87
93
  }
94
+ opus4think({ options = {}, config = {} } = {}) {
95
+ options = { ...MixAnthropic.thinkingOptions, ...options };
96
+ return this.attach('claude-opus-4-20250514', new MixAnthropic({ options, config }));
97
+ }
98
+ opus4({ options = {}, config = {} } = {}) {
99
+ return this.attach('claude-opus-4-20250514', new MixAnthropic({ options, config }));
100
+ }
101
+ sonnet4({ options = {}, config = {} } = {}) {
102
+ return this.attach('claude-sonnet-4-20250514', new MixAnthropic({ options, config }));
103
+ }
104
+ sonnet4think({ options = {}, config = {} } = {}) {
105
+ options = { ...MixAnthropic.thinkingOptions, ...options };
106
+ return this.attach('claude-sonnet-4-20250514', new MixAnthropic({ options, config }));
107
+ }
88
108
  sonnet37({ options = {}, config = {} } = {}) {
89
109
  return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
90
110
  }
91
- sonnet37think({ options = {
92
- thinking: {
93
- "type": "enabled",
94
- "budget_tokens": 1024
95
- },
96
- temperature: 1
97
- }, config = {} } = {}) {
111
+ sonnet37think({ options = {}, config = {} } = {}) {
112
+ options = { ...MixAnthropic.thinkingOptions, ...options };
98
113
  return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
99
114
  }
100
115
  sonnet35({ options = {}, config = {} } = {}) {
@@ -129,8 +144,9 @@ class ModelMix {
129
144
  return this.attach('grok-3-mini-beta', new MixGrok({ options, config }));
130
145
  }
131
146
 
132
- qwen3({ options = {}, config = {}, mix = { together: true } } = {}) {
147
+ qwen3({ options = {}, config = {}, mix = { together: true, cerebras: false } } = {}) {
133
148
  if (mix.together) this.attach('Qwen/Qwen3-235B-A22B-fp8-tput', new MixTogether({ options, config }));
149
+ if (mix.cerebras) this.attach('qwen-3-32b', new MixCerebras({ options, config }));
134
150
  return this;
135
151
  }
136
152
 
@@ -140,9 +156,10 @@ class ModelMix {
140
156
  if (mix.cerebras) this.attach('llama-4-scout-17b-16e-instruct', new MixCerebras({ options, config }));
141
157
  return this;
142
158
  }
143
- maverick({ options = {}, config = {}, mix = { groq: true, together: false } } = {}) {
159
+ maverick({ options = {}, config = {}, mix = { groq: true, together: false, lambda: false } } = {}) {
144
160
  if (mix.groq) this.attach('meta-llama/llama-4-maverick-17b-128e-instruct', new MixGroq({ options, config }));
145
161
  if (mix.together) this.attach('meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8', new MixTogether({ options, config }));
162
+ if (mix.lambda) this.attach('llama-4-maverick-17b-128e-instruct-fp8', new MixLambda({ options, config }));
146
163
  return this;
147
164
  }
148
165
 
@@ -153,6 +170,11 @@ class ModelMix {
153
170
  return this;
154
171
  }
155
172
 
173
+ hermes3({ options = {}, config = {}, mix = { lambda: true } } = {}) {
174
+ this.attach('Hermes-3-Llama-3.1-405B-FP8', new MixLambda({ options, config }));
175
+ return this;
176
+ }
177
+
156
178
  addText(text, { role = "user" } = {}) {
157
179
  const content = [{
158
180
  type: "text",
@@ -304,11 +326,11 @@ class ModelMix {
304
326
 
305
327
  replaceKeyFromFile(key, filePath) {
306
328
  const content = this.readFile(filePath);
307
- this.replace({ [key]: this.template(content, this.config.replace) });
329
+ this.replace({ [key]: this._template(content, this.config.replace) });
308
330
  return this;
309
331
  }
310
332
 
311
- template(input, replace) {
333
+ _template(input, replace) {
312
334
  if (!replace) return input;
313
335
  for (const k in replace) {
314
336
  input = input.split(/([¿?¡!,"';:\(\)\.\s])/).map(x => x === k ? replace[k] : x).join("");
@@ -319,10 +341,11 @@ class ModelMix {
319
341
  groupByRoles(messages) {
320
342
  return messages.reduce((acc, currentMessage, index) => {
321
343
  if (index === 0 || currentMessage.role !== messages[index - 1].role) {
322
- acc.push({
323
- role: currentMessage.role,
324
- content: currentMessage.content
325
- });
344
+ // acc.push({
345
+ // role: currentMessage.role,
346
+ // content: currentMessage.content
347
+ // });
348
+ acc.push(currentMessage);
326
349
  } else {
327
350
  acc[acc.length - 1].content = acc[acc.length - 1].content.concat(currentMessage.content);
328
351
  }
@@ -333,13 +356,13 @@ class ModelMix {
333
356
  applyTemplate() {
334
357
  if (!this.config.replace) return;
335
358
 
336
- this.config.system = this.template(this.config.system, this.config.replace);
359
+ this.config.system = this._template(this.config.system, this.config.replace);
337
360
 
338
361
  this.messages = this.messages.map(message => {
339
362
  if (message.content instanceof Array) {
340
363
  message.content = message.content.map(content => {
341
364
  if (content.type === 'text') {
342
- content.text = this.template(content.text, this.config.replace);
365
+ content.text = this._template(content.text, this.config.replace);
343
366
  }
344
367
  return content;
345
368
  });
@@ -390,10 +413,12 @@ class ModelMix {
390
413
  const currentModel = this.models[i];
391
414
  const currentModelKey = currentModel.key;
392
415
  const providerInstance = currentModel.provider;
416
+ const optionsTools = providerInstance.getOptionsTools(this.tools);
393
417
 
394
418
  let options = {
395
419
  ...this.options,
396
420
  ...providerInstance.options,
421
+ ...optionsTools,
397
422
  model: currentModelKey
398
423
  };
399
424
 
@@ -414,7 +439,29 @@ class ModelMix {
414
439
 
415
440
  const result = await providerInstance.create({ options, config });
416
441
 
417
- this.messages.push({ role: "assistant", content: result.message });
442
+ if (result.toolCalls.length > 0) {
443
+
444
+ if (result.message) {
445
+ if (result.signature) {
446
+ this.messages.push({
447
+ role: "assistant", content: [{
448
+ type: "thinking",
449
+ thinking: result.think,
450
+ signature: result.signature
451
+ }]
452
+ });
453
+ } else {
454
+ this.addText(result.message, { role: "assistant" });
455
+ }
456
+ }
457
+
458
+ this.messages.push({ role: "assistant", content: result.toolCalls, tool_calls: result.toolCalls });
459
+
460
+ const content = await this.processToolCalls(result.toolCalls);
461
+ this.messages.push({ role: 'tool', content });
462
+
463
+ return this.execute();
464
+ }
418
465
 
419
466
  if (config.debug) {
420
467
  log.debug(`Request successful with model: ${currentModelKey}`);
@@ -444,6 +491,69 @@ class ModelMix {
444
491
  throw lastError || new Error("Failed to get response from any model, and no specific error was caught.");
445
492
  });
446
493
  }
494
+
495
+ async processToolCalls(toolCalls) {
496
+ const result = []
497
+
498
+ for (const toolCall of toolCalls) {
499
+ const client = this.toolClient[toolCall.function.name];
500
+
501
+ const response = await client.callTool({
502
+ name: toolCall.function.name,
503
+ arguments: JSON.parse(toolCall.function.arguments)
504
+ });
505
+
506
+ result.push({
507
+ name: toolCall.function.name,
508
+ tool_call_id: toolCall.id,
509
+ content: response.content.map(item => item.text).join("\n")
510
+ });
511
+ }
512
+ return result;
513
+ }
514
+
515
+ async addMCP() {
516
+
517
+ const key = arguments[0];
518
+
519
+ if (this.mcp[key]) {
520
+ log.info(`MCP ${key} already attached.`);
521
+ return;
522
+ }
523
+
524
+ if (this.config.max_history < 3) {
525
+ log.warn(`MCP ${key} requires at least 3 max_history. Setting to 3.`);
526
+ this.config.max_history = 3;
527
+ }
528
+
529
+ const env = {}
530
+ for (const key in process.env) {
531
+ if (['OPENAI', 'ANTHR', 'GOOGLE', 'GROQ', 'TOGET', 'LAMBDA', 'PPLX', 'XAI', 'CEREBR'].some(prefix => key.startsWith(prefix))) continue;
532
+ env[key] = process.env[key];
533
+ }
534
+
535
+ const transport = new StdioClientTransport({
536
+ command: "npx",
537
+ args: ["-y", ...arguments],
538
+ env
539
+ });
540
+
541
+ // Crear el cliente MCP
542
+ this.mcp[key] = new Client({
543
+ name: key,
544
+ version: "1.0.0"
545
+ });
546
+
547
+ await this.mcp[key].connect(transport);
548
+
549
+ const { tools } = await this.mcp[key].listTools();
550
+ this.tools[key] = tools;
551
+
552
+ for (const tool of tools) {
553
+ this.toolClient[tool.name] = this.mcp[key];
554
+ }
555
+
556
+ }
447
557
  }
448
558
 
449
559
  class MixCustom {
@@ -477,8 +587,15 @@ class MixCustom {
477
587
  };
478
588
  }
479
589
 
590
+ convertMessages(messages, config) {
591
+ return MixOpenAI.convertMessages(messages, config);
592
+ }
593
+
480
594
  async create({ config = {}, options = {} } = {}) {
481
595
  try {
596
+
597
+ options.messages = this.convertMessages(options.messages, config);
598
+
482
599
  if (config.debug) {
483
600
  log.debug("config");
484
601
  log.info(config);
@@ -562,33 +679,64 @@ class MixCustom {
562
679
  }
563
680
 
564
681
  extractDelta(data) {
565
- if (data.choices && data.choices[0].delta.content) return data.choices[0].delta.content;
566
- return '';
682
+ return data.choices[0].delta.content;
567
683
  }
568
684
 
569
- extractMessage(data) {
570
- if (data.choices && data.choices[0].message.content) return data.choices[0].message.content.trim();
571
- return '';
685
+ static extractMessage(data) {
686
+ const message = data.choices[0].message?.content?.trim() || '';
687
+ const endTagIndex = message.indexOf('</think>');
688
+ if (message.startsWith('<think>') && endTagIndex !== -1) {
689
+ return message.substring(endTagIndex + 8).trim();
690
+ }
691
+ return message;
572
692
  }
573
693
 
574
- processResponse(response) {
575
- let message = this.extractMessage(response.data);
576
-
577
- if (message.startsWith('<think>')) {
578
- const endTagIndex = message.indexOf('</think>');
579
- if (endTagIndex !== -1) {
580
- const think = message.substring(7, endTagIndex).trim();
581
- message = message.substring(endTagIndex + 8).trim();
582
- return { response: response.data, message, think };
694
+ static extractThink(data) {
695
+
696
+ if (data.choices[0].message?.reasoning_content) {
697
+ return data.choices[0].message.reasoning_content;
698
+ }
699
+
700
+ const message = data.choices[0].message?.content?.trim() || '';
701
+ const endTagIndex = message.indexOf('</think>');
702
+ if (message.startsWith('<think>') && endTagIndex !== -1) {
703
+ return message.substring(7, endTagIndex).trim();
704
+ }
705
+ return null;
706
+ }
707
+
708
+ static extractToolCalls(data) {
709
+ return data.choices[0].message?.tool_calls?.map(call => ({
710
+ id: call.id,
711
+ type: 'function',
712
+ function: {
713
+ name: call.function.name,
714
+ arguments: call.function.arguments
583
715
  }
716
+ })) || []
717
+ }
718
+
719
+ processResponse(response) {
720
+ return {
721
+ message: MixCustom.extractMessage(response.data),
722
+ think: MixCustom.extractThink(response.data),
723
+ toolCalls: MixCustom.extractToolCalls(response.data),
724
+ response: response.data
584
725
  }
726
+ }
585
727
 
586
- return { response: response.data, message };
728
+ getOptionsTools(tools) {
729
+ return MixOpenAI.getOptionsTools(tools);
587
730
  }
588
731
  }
589
732
 
590
733
  class MixOpenAI extends MixCustom {
591
734
  getDefaultConfig(customConfig) {
735
+
736
+ if (!process.env.OPENAI_API_KEY) {
737
+ throw new Error('OpenAI API key not found. Please provide it in config or set OPENAI_API_KEY environment variable.');
738
+ }
739
+
592
740
  return super.getDefaultConfig({
593
741
  url: 'https://api.openai.com/v1/chat/completions',
594
742
  apiKey: process.env.OPENAI_API_KEY,
@@ -597,9 +745,6 @@ class MixOpenAI extends MixCustom {
597
745
  }
598
746
 
599
747
  async create({ config = {}, options = {} } = {}) {
600
- if (!this.config.apiKey) {
601
- throw new Error('OpenAI API key not found. Please provide it in config or set OPENAI_API_KEY environment variable.');
602
- }
603
748
 
604
749
  // Remove max_tokens and temperature for o1/o3 models
605
750
  if (options.model?.startsWith('o')) {
@@ -607,35 +752,85 @@ class MixOpenAI extends MixCustom {
607
752
  delete options.temperature;
608
753
  }
609
754
 
610
- const content = config.system + config.systemExtra;
611
- options.messages = [{ role: 'system', content }, ...options.messages || []];
612
- options.messages = MixOpenAI.convertMessages(options.messages);
613
755
  return super.create({ config, options });
614
756
  }
615
757
 
616
- static convertMessages(messages) {
617
- return messages.map(message => {
618
- if (message.role === 'user' && message.content instanceof Array) {
619
- message.content = message.content.map(content => {
758
+ static convertMessages(messages, config) {
759
+
760
+ const content = config.system + config.systemExtra;
761
+ messages = [{ role: 'system', content }, ...messages || []];
762
+
763
+ const results = []
764
+ for (const message of messages) {
765
+
766
+ if (message.tool_calls) {
767
+ results.push({ role: 'assistant', tool_calls: message.tool_calls })
768
+ continue;
769
+ }
770
+
771
+ if (message.role === 'tool') {
772
+ for (const content of message.content) {
773
+ results.push({ role: 'tool', ...content })
774
+ }
775
+ continue;
776
+ }
777
+
778
+ if (Array.isArray(message.content))
779
+ for (const content of message.content) {
620
780
  if (content.type === 'image') {
621
781
  const { type, media_type, data } = content.source;
622
- return {
782
+ message.content = {
623
783
  type: 'image_url',
624
784
  image_url: {
625
785
  url: `data:${media_type};${type},${data}`
626
786
  }
627
787
  };
628
788
  }
629
- return content;
789
+ }
790
+
791
+ results.push(message);
792
+ }
793
+ return results;
794
+ }
795
+
796
+ static getOptionsTools(tools) {
797
+ const options = {};
798
+ options.tools = [];
799
+ for (const tool in tools) {
800
+ for (const item of tools[tool]) {
801
+ options.tools.push({
802
+ type: 'function',
803
+ function: {
804
+ name: item.name,
805
+ description: item.description,
806
+ parameters: item.inputSchema
807
+ }
630
808
  });
631
809
  }
632
- return message;
633
- });
810
+ }
811
+
812
+ // options.tool_choice = "auto";
813
+
814
+ return options;
634
815
  }
635
816
  }
636
817
 
637
818
  class MixAnthropic extends MixCustom {
819
+
820
+ static thinkingOptions = {
821
+ thinking: {
822
+ "type": "enabled",
823
+ "budget_tokens": 1024
824
+ },
825
+ temperature: 1
826
+ };
827
+
638
828
  getDefaultConfig(customConfig) {
829
+
830
+ if (!process.env.ANTHROPIC_API_KEY) {
831
+ throw new Error('Anthropic API key not found. Please provide it in config or set ANTHROPIC_API_KEY environment variable.');
832
+ }
833
+
639
834
  return super.getDefaultConfig({
640
835
  url: 'https://api.anthropic.com/v1/messages',
641
836
  apiKey: process.env.ANTHROPIC_API_KEY,
@@ -644,9 +839,6 @@ class MixAnthropic extends MixCustom {
644
839
  }
645
840
 
646
841
  async create({ config = {}, options = {} } = {}) {
647
- if (!this.config.apiKey) {
648
- throw new Error('Anthropic API key not found. Please provide it in config or set ANTHROPIC_API_KEY environment variable.');
649
- }
650
842
 
651
843
  // Remove top_p for thinking
652
844
  if (options.thinking) {
@@ -659,6 +851,39 @@ class MixAnthropic extends MixCustom {
659
851
  return super.create({ config, options });
660
852
  }
661
853
 
854
+ convertMessages(messages, config) {
855
+ return MixAnthropic.convertMessages(messages, config);
856
+ }
857
+
858
+ static convertMessages(messages, config) {
859
+ return messages.map(message => {
860
+ if (message.role === 'tool') {
861
+ return {
862
+ role: "user",
863
+ content: message.content.map(content => ({
864
+ type: "tool_result",
865
+ tool_use_id: content.tool_call_id,
866
+ content: content.content
867
+ }))
868
+ }
869
+ }
870
+
871
+ message.content = message.content.map(content => {
872
+ if (content.type === 'function') {
873
+ return {
874
+ type: 'tool_use',
875
+ id: content.id,
876
+ name: content.function.name,
877
+ input: JSON.parse(content.function.arguments)
878
+ }
879
+ }
880
+ return content;
881
+ });
882
+
883
+ return message;
884
+ });
885
+ }
886
+
662
887
  getDefaultHeaders(customHeaders) {
663
888
  return super.getDefaultHeaders({
664
889
  'x-api-key': this.config.apiKey,
@@ -672,29 +897,77 @@ class MixAnthropic extends MixCustom {
672
897
  return '';
673
898
  }
674
899
 
675
- processResponse(response) {
676
- if (response.data.content) {
900
+ static extractToolCalls(data) {
677
901
 
678
- if (response.data.content?.[1]?.text) {
902
+ return data.content.map(item => {
903
+ if (item.type === 'tool_use') {
679
904
  return {
680
- think: response.data.content[0]?.thinking,
681
- message: response.data.content[1].text,
682
- response: response.data
683
- }
905
+ id: item.id,
906
+ type: 'function',
907
+ function: {
908
+ name: item.name,
909
+ arguments: JSON.stringify(item.input)
910
+ }
911
+ };
684
912
  }
913
+ return null;
914
+ }).filter(item => item !== null);
915
+ }
685
916
 
686
- if (response.data.content[0].text) {
687
- return {
688
- message: response.data.content[0].text,
689
- response: response.data
690
- }
917
+ static extractMessage(data) {
918
+ if (data.content?.[1]?.text) {
919
+ return data.content[1].text;
920
+ }
921
+ return data.content[0].text;
922
+ }
923
+
924
+ static extractThink(data) {
925
+ return data.content[0]?.thinking || null;
926
+ }
927
+
928
+ static extractSignature(data) {
929
+ return data.content[0]?.signature || null;
930
+ }
931
+
932
+ processResponse(response) {
933
+ return {
934
+ message: MixAnthropic.extractMessage(response.data),
935
+ think: MixAnthropic.extractThink(response.data),
936
+ toolCalls: MixAnthropic.extractToolCalls(response.data),
937
+ response: response.data,
938
+ signature: MixAnthropic.extractSignature(response.data)
939
+ }
940
+ }
941
+
942
+ getOptionsTools(tools) {
943
+ return MixAnthropic.getOptionsTools(tools);
944
+ }
945
+
946
+ static getOptionsTools(tools) {
947
+ const options = {};
948
+ options.tools = [];
949
+ for (const tool in tools) {
950
+ for (const item of tools[tool]) {
951
+ options.tools.push({
952
+ type: 'custom',
953
+ name: item.name,
954
+ description: item.description,
955
+ input_schema: item.inputSchema
956
+ });
691
957
  }
692
958
  }
959
+
960
+ return options;
693
961
  }
694
962
  }
695
963
 
696
964
  class MixPerplexity extends MixCustom {
697
965
  getDefaultConfig(customConfig) {
966
+
967
+ if (!process.env.PPLX_API_KEY) {
968
+ throw new Error('Perplexity API key not found. Please provide it in config or set PPLX_API_KEY environment variable.');
969
+ }
970
+
698
971
  return super.getDefaultConfig({
699
972
  url: 'https://api.perplexity.ai/chat/completions',
700
973
  apiKey: process.env.PPLX_API_KEY,
@@ -713,10 +986,6 @@ class MixPerplexity extends MixCustom {
713
986
  };
714
987
  }
715
988
 
716
- if (!this.config.apiKey) {
717
- throw new Error('Perplexity API key not found. Please provide it in config or set PPLX_API_KEY environment variable.');
718
- }
719
-
720
989
  const content = config.system + config.systemExtra;
721
990
  options.messages = [{ role: 'system', content }, ...options.messages || []];
722
991
  return super.create({ config, options });
@@ -743,19 +1012,18 @@ class MixOllama extends MixCustom {
743
1012
  return '';
744
1013
  }
745
1014
 
746
- async create({ config = {}, options = {} } = {}) {
747
-
748
- options.messages = MixOllama.convertMessages(options.messages);
749
- const content = config.system + config.systemExtra;
750
- options.messages = [{ role: 'system', content }, ...options.messages || []];
751
- return super.create({ config, options });
752
- }
753
-
754
1015
  extractMessage(data) {
755
1016
  return data.message.content.trim();
756
1017
  }
757
1018
 
758
- static convertMessages(messages) {
1019
+ convertMessages(messages, config) {
1020
+ return MixOllama.convertMessages(messages, config);
1021
+ }
1022
+
1023
+ static convertMessages(messages, config) {
1024
+ const content = config.system + config.systemExtra;
1025
+ messages = [{ role: 'system', content }, ...messages || []];
1026
+
759
1027
  return messages.map(entry => {
760
1028
  let content = '';
761
1029
  let images = [];
@@ -779,26 +1047,31 @@ class MixOllama extends MixCustom {
779
1047
 
780
1048
  class MixGrok extends MixOpenAI {
781
1049
  getDefaultConfig(customConfig) {
1050
+
1051
+ if (!process.env.XAI_API_KEY) {
1052
+ throw new Error('Grok API key not found. Please provide it in config or set XAI_API_KEY environment variable.');
1053
+ }
1054
+
782
1055
  return super.getDefaultConfig({
783
1056
  url: 'https://api.x.ai/v1/chat/completions',
784
1057
  apiKey: process.env.XAI_API_KEY,
785
1058
  ...customConfig
786
1059
  });
787
1060
  }
1061
+ }
788
1062
 
789
- processResponse(response) {
790
- const message = this.extractMessage(response.data);
791
-
792
- const output = {
793
- message: message,
794
- response: response.data
795
- }
1063
+ class MixLambda extends MixCustom {
1064
+ getDefaultConfig(customConfig) {
796
1065
 
797
- if (response.data.choices[0].message.reasoning_content) {
798
- output.think = response.data.choices[0].message.reasoning_content;
1066
+ if (!process.env.LAMBDA_API_KEY) {
1067
+ throw new Error('Lambda API key not found. Please provide it in config or set LAMBDA_API_KEY environment variable.');
799
1068
  }
800
1069
 
801
- return output;
1070
+ return super.getDefaultConfig({
1071
+ url: 'https://api.lambda.ai/v1/chat/completions',
1072
+ apiKey: process.env.LAMBDA_API_KEY,
1073
+ ...customConfig
1074
+ });
802
1075
  }
803
1076
  }
804
1077
 
@@ -809,38 +1082,30 @@ class MixLMStudio extends MixCustom {
809
1082
  ...customConfig
810
1083
  });
811
1084
  }
812
-
813
- async create({ config = {}, options = {} } = {}) {
814
- const content = config.system + config.systemExtra;
815
- options.messages = [{ role: 'system', content }, ...options.messages || []];
816
- options.messages = MixOpenAI.convertMessages(options.messages);
817
- return super.create({ config, options });
818
- }
819
1085
  }
820
1086
 
821
1087
  class MixGroq extends MixCustom {
822
1088
  getDefaultConfig(customConfig) {
1089
+
1090
+ if (!process.env.GROQ_API_KEY) {
1091
+ throw new Error('Groq API key not found. Please provide it in config or set GROQ_API_KEY environment variable.');
1092
+ }
1093
+
823
1094
  return super.getDefaultConfig({
824
1095
  url: 'https://api.groq.com/openai/v1/chat/completions',
825
1096
  apiKey: process.env.GROQ_API_KEY,
826
1097
  ...customConfig
827
1098
  });
828
1099
  }
829
-
830
- async create({ config = {}, options = {} } = {}) {
831
- if (!this.config.apiKey) {
832
- throw new Error('Groq API key not found. Please provide it in config or set GROQ_API_KEY environment variable.');
833
- }
834
-
835
- const content = config.system + config.systemExtra;
836
- options.messages = [{ role: 'system', content }, ...options.messages || []];
837
- options.messages = MixOpenAI.convertMessages(options.messages);
838
- return super.create({ config, options });
839
- }
840
1100
  }
841
1101
 
842
1102
  class MixTogether extends MixCustom {
843
1103
  getDefaultConfig(customConfig) {
1104
+
1105
+ if (!process.env.TOGETHER_API_KEY) {
1106
+ throw new Error('Together API key not found. Please provide it in config or set TOGETHER_API_KEY environment variable.');
1107
+ }
1108
+
844
1109
  return super.getDefaultConfig({
845
1110
  url: 'https://api.together.xyz/v1/chat/completions',
846
1111
  apiKey: process.env.TOGETHER_API_KEY,
@@ -854,44 +1119,21 @@ class MixTogether extends MixCustom {
854
1119
  ...customOptions
855
1120
  };
856
1121
  }
857
-
858
- static convertMessages(messages) {
859
- return messages.map(message => {
860
- if (message.content instanceof Array) {
861
- message.content = message.content.map(content => content.text).join("\n\n");
862
- }
863
- return message;
864
- });
865
- }
866
-
867
- async create({ config = {}, options = {} } = {}) {
868
- if (!this.config.apiKey) {
869
- throw new Error('Together API key not found. Please provide it in config or set TOGETHER_API_KEY environment variable.');
870
- }
871
-
872
- const content = config.system + config.systemExtra;
873
- options.messages = [{ role: 'system', content }, ...options.messages || []];
874
- options.messages = MixTogether.convertMessages(options.messages);
875
-
876
- return super.create({ config, options });
877
- }
878
1122
  }
879
1123
 
880
1124
  class MixCerebras extends MixCustom {
881
1125
  getDefaultConfig(customConfig) {
1126
+
1127
+ if (!process.env.CEREBRAS_API_KEY) {
1128
+ throw new Error('Together API key not found. Please provide it in config or set CEREBRAS_API_KEY environment variable.');
1129
+ }
1130
+
882
1131
  return super.getDefaultConfig({
883
1132
  url: 'https://api.cerebras.ai/v1/chat/completions',
884
1133
  apiKey: process.env.CEREBRAS_API_KEY,
885
1134
  ...customConfig
886
1135
  });
887
1136
  }
888
-
889
- async create({ config = {}, options = {} } = {}) {
890
- const content = config.system + config.systemExtra;
891
- options.messages = [{ role: 'system', content }, ...options.messages || []];
892
- options.messages = MixTogether.convertMessages(options.messages);
893
- return super.create({ config, options });
894
- }
895
1137
  }
896
1138
 
897
1139
  class MixGoogle extends MixCustom {
@@ -899,7 +1141,6 @@ class MixGoogle extends MixCustom {
899
1141
  return super.getDefaultConfig({
900
1142
  url: 'https://generativelanguage.googleapis.com/v1beta/models',
901
1143
  apiKey: process.env.GOOGLE_API_KEY,
902
- ...customConfig
903
1144
  });
904
1145
  }
905
1146
 
@@ -910,40 +1151,54 @@ class MixGoogle extends MixCustom {
910
1151
  };
911
1152
  }
912
1153
 
913
- getDefaultOptions(customOptions) {
914
- return {
915
- generationConfig: {
916
- responseMimeType: "text/plain"
917
- },
918
- ...customOptions
919
- };
920
- }
921
-
922
- static convertMessages(messages) {
1154
+ static convertMessages(messages, config) {
923
1155
  return messages.map(message => {
924
- const parts = [];
925
1156
 
926
- if (message.content instanceof Array) {
927
- message.content.forEach(content => {
1157
+ if (!Array.isArray(message.content)) return message;
1158
+ const role = (message.role === 'assistant' || message.role === 'tool') ? 'model' : 'user'
1159
+
1160
+ if (message.role === 'tool') {
1161
+ return {
1162
+ role,
1163
+ parts: message.content.map(content => ({
1164
+ functionResponse: {
1165
+ name: content.name,
1166
+ response: {
1167
+ output: content.content,
1168
+ },
1169
+ }
1170
+ }))
1171
+ }
1172
+ }
1173
+
1174
+ return {
1175
+ role,
1176
+ parts: message.content.map(content => {
928
1177
  if (content.type === 'text') {
929
- parts.push({ text: content.text });
930
- } else if (content.type === 'image') {
931
- parts.push({
1178
+ return { text: content.text };
1179
+ }
1180
+
1181
+ if (content.type === 'image') {
1182
+ return {
932
1183
  inline_data: {
933
1184
  mime_type: content.source.media_type,
934
1185
  data: content.source.data
935
1186
  }
936
- });
1187
+ }
937
1188
  }
938
- });
939
- } else {
940
- parts.push({ text: message.content });
941
- }
942
1189
 
943
- return {
944
- role: message.role === 'assistant' ? 'model' : 'user',
945
- parts
946
- };
1190
+ if (content.type === 'function') {
1191
+ return {
1192
+ functionCall: {
1193
+ name: content.function.name,
1194
+ args: JSON.parse(content.function.arguments)
1195
+ }
1196
+ }
1197
+ }
1198
+
1199
+ return content;
1200
+ })
1201
+ }
947
1202
  });
948
1203
  }
949
1204
 
@@ -952,30 +1207,38 @@ class MixGoogle extends MixCustom {
952
1207
  throw new Error('Google API key not found. Please provide it in config or set GOOGLE_API_KEY environment variable.');
953
1208
  }
954
1209
 
955
- const modelId = options.model || 'gemini-2.5-flash-preview-04-17';
956
1210
  const generateContentApi = options.stream ? 'streamGenerateContent' : 'generateContent';
957
1211
 
958
- // Construct the full URL with model ID, API endpoint, and API key
959
- const fullUrl = `${this.config.url}/${modelId}:${generateContentApi}?key=${this.config.apiKey}`;
1212
+ const fullUrl = `${this.config.url}/${options.model}:${generateContentApi}?key=${this.config.apiKey}`;
960
1213
 
961
- // Convert messages to Gemini format
962
- const contents = MixGoogle.convertMessages(options.messages);
963
1214
 
964
- // Add system message if present
965
- if (config.system || config.systemExtra) {
966
- contents.unshift({
967
- role: 'user',
968
- parts: [{ text: (config.system || '') + (config.systemExtra || '') }]
969
- });
1215
+ const content = config.system + config.systemExtra;
1216
+ const systemInstruction = { parts: [{ text: content }] };
1217
+
1218
+ options.messages = MixGoogle.convertMessages(options.messages);
1219
+
1220
+ const generationConfig = {
1221
+ topP: options.top_p,
1222
+ maxOutputTokens: options.max_tokens,
970
1223
  }
971
1224
 
972
- // Prepare the request payload
1225
+ generationConfig.responseMimeType = "text/plain";
1226
+
973
1227
  const payload = {
974
- contents,
975
- generationConfig: options.generationConfig || this.getDefaultOptions().generationConfig
1228
+ generationConfig,
1229
+ systemInstruction,
1230
+ contents: options.messages,
1231
+ tools: options.tools
976
1232
  };
977
1233
 
978
1234
  try {
1235
+ if (config.debug) {
1236
+ log.debug("config");
1237
+ log.info(config);
1238
+ log.debug("payload");
1239
+ log.inspect(payload);
1240
+ }
1241
+
979
1242
  if (options.stream) {
980
1243
  throw new Error('Stream is not supported for Gemini');
981
1244
  } else {
@@ -988,9 +1251,59 @@ class MixGoogle extends MixCustom {
988
1251
  }
989
1252
  }
990
1253
 
991
- extractMessage(data) {
1254
+ processResponse(response) {
1255
+ return {
1256
+ message: MixGoogle.extractMessage(response.data),
1257
+ think: null,
1258
+ toolCalls: MixGoogle.extractToolCalls(response.data),
1259
+ response: response.data
1260
+ }
1261
+ }
1262
+
1263
+ static extractToolCalls(data) {
1264
+ return data.candidates?.[0]?.content?.parts?.map(part => {
1265
+ if (part.functionCall) {
1266
+ return {
1267
+ id: part.functionCall.id,
1268
+ type: 'function',
1269
+ function: {
1270
+ name: part.functionCall.name,
1271
+ arguments: JSON.stringify(part.functionCall.args)
1272
+ }
1273
+ };
1274
+ }
1275
+ return null;
1276
+ }).filter(item => item !== null) || [];
1277
+ }
1278
+
1279
+ static extractMessage(data) {
992
1280
  return data.candidates?.[0]?.content?.parts?.[0]?.text;
993
1281
  }
1282
+
1283
+ static getOptionsTools(tools) {
1284
+ const functionDeclarations = [];
1285
+ for (const tool in tools) {
1286
+ for (const item of tools[tool]) {
1287
+ functionDeclarations.push({
1288
+ name: item.name,
1289
+ description: item.description,
1290
+ parameters: item.inputSchema
1291
+ });
1292
+ }
1293
+ }
1294
+
1295
+ const options = {
1296
+ tools: [{
1297
+ functionDeclarations
1298
+ }]
1299
+ };
1300
+
1301
+ return options;
1302
+ }
1303
+
1304
+ getOptionsTools(tools) {
1305
+ return MixGoogle.getOptionsTools(tools);
1306
+ }
994
1307
  }
995
1308
 
996
1309
  module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio, MixGroq, MixTogether, MixGrok, MixCerebras, MixGoogle };