modelmix 3.8.4 → 3.8.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -8
- package/demo/mcp-simple.mjs +166 -0
- package/demo/mcp-tools.mjs +344 -0
- package/index.js +237 -64
- package/mcp-tools.js +96 -0
- package/package.json +4 -3
- package/test/bottleneck.test.js +26 -0
- package/test/fallback.test.js +5 -0
- package/test/live.mcp.js +555 -0
- package/test/live.test.js +3 -3
- package/test/setup.js +25 -0
- package/test/templates.test.js +5 -0
- package/test/test-runner.js +2 -0
package/index.js
CHANGED
|
@@ -7,6 +7,7 @@ const path = require('path');
|
|
|
7
7
|
const generateJsonSchema = require('./schema');
|
|
8
8
|
const { Client } = require("@modelcontextprotocol/sdk/client/index.js");
|
|
9
9
|
const { StdioClientTransport } = require("@modelcontextprotocol/sdk/client/stdio.js");
|
|
10
|
+
const { MCPToolsManager } = require('./mcp-tools');
|
|
10
11
|
|
|
11
12
|
class ModelMix {
|
|
12
13
|
|
|
@@ -16,10 +17,10 @@ class ModelMix {
|
|
|
16
17
|
this.tools = {};
|
|
17
18
|
this.toolClient = {};
|
|
18
19
|
this.mcp = {};
|
|
20
|
+
this.mcpToolsManager = new MCPToolsManager();
|
|
19
21
|
this.options = {
|
|
20
22
|
max_tokens: 5000,
|
|
21
23
|
temperature: 1, // 1 --> More creative, 0 --> More deterministic.
|
|
22
|
-
top_p: 1, // 100% --> The model considers all possible tokens.
|
|
23
24
|
...options
|
|
24
25
|
};
|
|
25
26
|
|
|
@@ -98,22 +99,14 @@ class ModelMix {
|
|
|
98
99
|
}
|
|
99
100
|
gpt5nano({ options = {}, config = {} } = {}) {
|
|
100
101
|
return this.attach('gpt-5-nano', new MixOpenAI({ options, config }));
|
|
101
|
-
}
|
|
102
|
-
gptOss({ options = {}, config = {}, mix = { together: false, cerebras: false, groq: true
|
|
102
|
+
}
|
|
103
|
+
gptOss({ options = {}, config = {}, mix = { together: false, cerebras: false, groq: true } } = {}) {
|
|
103
104
|
if (mix.together) return this.attach('openai/gpt-oss-120b', new MixTogether({ options, config }));
|
|
104
105
|
if (mix.cerebras) return this.attach('gpt-oss-120b', new MixCerebras({ options, config }));
|
|
105
106
|
if (mix.groq) return this.attach('openai/gpt-oss-120b', new MixGroq({ options, config }));
|
|
106
|
-
if (mix.lmstudio) return this.attach('openai/gpt-oss-120b', new MixLMStudio({ options, config }));
|
|
107
107
|
return this;
|
|
108
108
|
}
|
|
109
|
-
|
|
110
|
-
opus4think({ options = {}, config = {} } = {}) {
|
|
111
|
-
options = { ...MixAnthropic.thinkingOptions, ...options };
|
|
112
|
-
return this.attach('claude-opus-4-20250514', new MixAnthropic({ options, config }));
|
|
113
|
-
}
|
|
114
|
-
opus4({ options = {}, config = {} } = {}) {
|
|
115
|
-
return this.attach('claude-opus-4-20250514', new MixAnthropic({ options, config }));
|
|
116
|
-
}
|
|
109
|
+
|
|
117
110
|
opus41({ options = {}, config = {} } = {}) {
|
|
118
111
|
return this.attach('claude-opus-4-1-20250805', new MixAnthropic({ options, config }));
|
|
119
112
|
}
|
|
@@ -128,6 +121,13 @@ class ModelMix {
|
|
|
128
121
|
options = { ...MixAnthropic.thinkingOptions, ...options };
|
|
129
122
|
return this.attach('claude-sonnet-4-20250514', new MixAnthropic({ options, config }));
|
|
130
123
|
}
|
|
124
|
+
sonnet45({ options = {}, config = {} } = {}) {
|
|
125
|
+
return this.attach('claude-sonnet-4-5-20250929', new MixAnthropic({ options, config }));
|
|
126
|
+
}
|
|
127
|
+
sonnet45think({ options = {}, config = {} } = {}) {
|
|
128
|
+
options = { ...MixAnthropic.thinkingOptions, ...options };
|
|
129
|
+
return this.attach('claude-sonnet-4-5-20250929', new MixAnthropic({ options, config }));
|
|
130
|
+
}
|
|
131
131
|
sonnet37({ options = {}, config = {} } = {}) {
|
|
132
132
|
return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
|
|
133
133
|
}
|
|
@@ -135,9 +135,6 @@ class ModelMix {
|
|
|
135
135
|
options = { ...MixAnthropic.thinkingOptions, ...options };
|
|
136
136
|
return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
|
|
137
137
|
}
|
|
138
|
-
sonnet35({ options = {}, config = {} } = {}) {
|
|
139
|
-
return this.attach('claude-3-5-sonnet-20241022', new MixAnthropic({ options, config }));
|
|
140
|
-
}
|
|
141
138
|
haiku35({ options = {}, config = {} } = {}) {
|
|
142
139
|
return this.attach('claude-3-5-haiku-20241022', new MixAnthropic({ options, config }));
|
|
143
140
|
}
|
|
@@ -199,11 +196,15 @@ class ModelMix {
|
|
|
199
196
|
}
|
|
200
197
|
|
|
201
198
|
kimiK2({ options = {}, config = {}, mix = { together: false, groq: true } } = {}) {
|
|
202
|
-
if (mix.together) this.attach('moonshotai/Kimi-K2-Instruct', new MixTogether({ options, config }));
|
|
203
|
-
if (mix.groq) this.attach('moonshotai/kimi-k2-instruct', new MixGroq({ options, config }));
|
|
199
|
+
if (mix.together) this.attach('moonshotai/Kimi-K2-Instruct-0905', new MixTogether({ options, config }));
|
|
200
|
+
if (mix.groq) this.attach('moonshotai/kimi-k2-instruct-0905', new MixGroq({ options, config }));
|
|
204
201
|
return this;
|
|
205
202
|
}
|
|
206
203
|
|
|
204
|
+
lmstudio({ options = {}, config = {} } = {}) {
|
|
205
|
+
return this.attach('lmstudio', new MixLMStudio({ options, config }));
|
|
206
|
+
}
|
|
207
|
+
|
|
207
208
|
addText(text, { role = "user" } = {}) {
|
|
208
209
|
const content = [{
|
|
209
210
|
type: "text",
|
|
@@ -247,7 +248,7 @@ class ModelMix {
|
|
|
247
248
|
|
|
248
249
|
addImage(filePath, { role = "user" } = {}) {
|
|
249
250
|
const absolutePath = path.resolve(filePath);
|
|
250
|
-
|
|
251
|
+
|
|
251
252
|
if (!fs.existsSync(absolutePath)) {
|
|
252
253
|
throw new Error(`Image file not found: ${filePath}`);
|
|
253
254
|
}
|
|
@@ -281,11 +282,11 @@ class ModelMix {
|
|
|
281
282
|
}
|
|
282
283
|
} else {
|
|
283
284
|
source = {
|
|
284
|
-
type: "url",
|
|
285
|
+
type: "url",
|
|
285
286
|
data: url
|
|
286
287
|
};
|
|
287
288
|
}
|
|
288
|
-
|
|
289
|
+
|
|
289
290
|
this.messages.push({
|
|
290
291
|
role,
|
|
291
292
|
content: [{
|
|
@@ -293,7 +294,7 @@ class ModelMix {
|
|
|
293
294
|
source
|
|
294
295
|
}]
|
|
295
296
|
});
|
|
296
|
-
|
|
297
|
+
|
|
297
298
|
return this;
|
|
298
299
|
}
|
|
299
300
|
|
|
@@ -468,7 +469,28 @@ class ModelMix {
|
|
|
468
469
|
async prepareMessages() {
|
|
469
470
|
await this.processImages();
|
|
470
471
|
this.applyTemplate();
|
|
471
|
-
|
|
472
|
+
|
|
473
|
+
// Smart message slicing to preserve tool call sequences
|
|
474
|
+
if (this.config.max_history > 0) {
|
|
475
|
+
let sliceStart = Math.max(0, this.messages.length - this.config.max_history);
|
|
476
|
+
|
|
477
|
+
// If we're slicing and there's a tool message at the start,
|
|
478
|
+
// ensure we include the preceding assistant message with tool_calls
|
|
479
|
+
while (sliceStart > 0 &&
|
|
480
|
+
sliceStart < this.messages.length &&
|
|
481
|
+
this.messages[sliceStart].role === 'tool') {
|
|
482
|
+
sliceStart--;
|
|
483
|
+
// Also need to include the assistant message with tool_calls
|
|
484
|
+
if (sliceStart > 0 &&
|
|
485
|
+
this.messages[sliceStart].role === 'assistant' &&
|
|
486
|
+
this.messages[sliceStart].tool_calls) {
|
|
487
|
+
break;
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
this.messages = this.messages.slice(sliceStart);
|
|
492
|
+
}
|
|
493
|
+
|
|
472
494
|
this.messages = this.groupByRoles(this.messages);
|
|
473
495
|
this.options.messages = this.messages;
|
|
474
496
|
}
|
|
@@ -490,7 +512,7 @@ class ModelMix {
|
|
|
490
512
|
|
|
491
513
|
async execute({ config = {}, options = {} } = {}) {
|
|
492
514
|
if (!this.models || this.models.length === 0) {
|
|
493
|
-
throw new Error("No models specified. Use methods like .
|
|
515
|
+
throw new Error("No models specified. Use methods like .gpt5(), .sonnet4() first.");
|
|
494
516
|
}
|
|
495
517
|
|
|
496
518
|
return this.limiter.schedule(async () => {
|
|
@@ -552,7 +574,7 @@ class ModelMix {
|
|
|
552
574
|
}
|
|
553
575
|
}
|
|
554
576
|
|
|
555
|
-
this.messages.push({ role: "assistant", content:
|
|
577
|
+
this.messages.push({ role: "assistant", content: null, tool_calls: result.toolCalls });
|
|
556
578
|
|
|
557
579
|
const content = await this.processToolCalls(result.toolCalls);
|
|
558
580
|
this.messages.push({ role: 'tool', content });
|
|
@@ -593,18 +615,67 @@ class ModelMix {
|
|
|
593
615
|
const result = []
|
|
594
616
|
|
|
595
617
|
for (const toolCall of toolCalls) {
|
|
596
|
-
|
|
618
|
+
// Handle different tool call formats more robustly
|
|
619
|
+
let toolName, toolArgs, toolId;
|
|
620
|
+
|
|
621
|
+
try {
|
|
622
|
+
if (toolCall.function) {
|
|
623
|
+
// Formato OpenAI/normalizado
|
|
624
|
+
toolName = toolCall.function.name;
|
|
625
|
+
toolArgs = typeof toolCall.function.arguments === 'string'
|
|
626
|
+
? JSON.parse(toolCall.function.arguments)
|
|
627
|
+
: toolCall.function.arguments;
|
|
628
|
+
toolId = toolCall.id;
|
|
629
|
+
} else if (toolCall.name) {
|
|
630
|
+
// Formato directo (posible formato alternativo)
|
|
631
|
+
toolName = toolCall.name;
|
|
632
|
+
toolArgs = toolCall.input || toolCall.arguments || {};
|
|
633
|
+
toolId = toolCall.id;
|
|
634
|
+
} else {
|
|
635
|
+
console.error('Unknown tool call format:', JSON.stringify(toolCall, null, 2));
|
|
636
|
+
continue;
|
|
637
|
+
}
|
|
597
638
|
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
639
|
+
// Validar que tenemos los datos necesarios
|
|
640
|
+
if (!toolName) {
|
|
641
|
+
console.error('Tool call missing name:', JSON.stringify(toolCall, null, 2));
|
|
642
|
+
continue;
|
|
643
|
+
}
|
|
602
644
|
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
645
|
+
// Verificar si es una herramienta local registrada
|
|
646
|
+
if (this.mcpToolsManager.hasTool(toolName)) {
|
|
647
|
+
const response = await this.mcpToolsManager.executeTool(toolName, toolArgs);
|
|
648
|
+
result.push({
|
|
649
|
+
name: toolName,
|
|
650
|
+
tool_call_id: toolId,
|
|
651
|
+
content: response.content.map(item => item.text).join("\n")
|
|
652
|
+
});
|
|
653
|
+
} else {
|
|
654
|
+
// Usar el cliente MCP externo
|
|
655
|
+
const client = this.toolClient[toolName];
|
|
656
|
+
if (!client) {
|
|
657
|
+
throw new Error(`No client found for tool: ${toolName}`);
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
const response = await client.callTool({
|
|
661
|
+
name: toolName,
|
|
662
|
+
arguments: toolArgs
|
|
663
|
+
});
|
|
664
|
+
|
|
665
|
+
result.push({
|
|
666
|
+
name: toolName,
|
|
667
|
+
tool_call_id: toolId,
|
|
668
|
+
content: response.content.map(item => item.text).join("\n")
|
|
669
|
+
});
|
|
670
|
+
}
|
|
671
|
+
} catch (error) {
|
|
672
|
+
console.error(`Error processing tool call ${toolName}:`, error);
|
|
673
|
+
result.push({
|
|
674
|
+
name: toolName || 'unknown',
|
|
675
|
+
tool_call_id: toolId || 'unknown',
|
|
676
|
+
content: `Error: ${error.message}`
|
|
677
|
+
});
|
|
678
|
+
}
|
|
608
679
|
}
|
|
609
680
|
return result;
|
|
610
681
|
}
|
|
@@ -651,6 +722,56 @@ class ModelMix {
|
|
|
651
722
|
}
|
|
652
723
|
|
|
653
724
|
}
|
|
725
|
+
|
|
726
|
+
addTool(toolDefinition, callback) {
|
|
727
|
+
|
|
728
|
+
if (this.config.max_history < 3) {
|
|
729
|
+
log.warn(`MCP ${toolDefinition.name} requires at least 3 max_history. Setting to 3.`);
|
|
730
|
+
this.config.max_history = 3;
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
this.mcpToolsManager.registerTool(toolDefinition, callback);
|
|
734
|
+
|
|
735
|
+
// Agregar la herramienta al sistema de tools para que sea incluida en las requests
|
|
736
|
+
if (!this.tools.local) {
|
|
737
|
+
this.tools.local = [];
|
|
738
|
+
}
|
|
739
|
+
this.tools.local.push({
|
|
740
|
+
name: toolDefinition.name,
|
|
741
|
+
description: toolDefinition.description,
|
|
742
|
+
inputSchema: toolDefinition.inputSchema
|
|
743
|
+
});
|
|
744
|
+
|
|
745
|
+
return this;
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
addTools(toolsWithCallbacks) {
|
|
749
|
+
for (const { tool, callback } of toolsWithCallbacks) {
|
|
750
|
+
this.addTool(tool, callback);
|
|
751
|
+
}
|
|
752
|
+
return this;
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
removeTool(toolName) {
|
|
756
|
+
this.mcpToolsManager.removeTool(toolName);
|
|
757
|
+
|
|
758
|
+
// Also remove from the tools system
|
|
759
|
+
if (this.tools.local) {
|
|
760
|
+
this.tools.local = this.tools.local.filter(tool => tool.name !== toolName);
|
|
761
|
+
}
|
|
762
|
+
|
|
763
|
+
return this;
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
listTools() {
|
|
767
|
+
const localTools = this.mcpToolsManager.getToolsForMCP();
|
|
768
|
+
const mcpTools = Object.values(this.tools).flat();
|
|
769
|
+
|
|
770
|
+
return {
|
|
771
|
+
local: localTools,
|
|
772
|
+
mcp: mcpTools.filter(tool => !localTools.find(local => local.name === tool.name))
|
|
773
|
+
};
|
|
774
|
+
}
|
|
654
775
|
}
|
|
655
776
|
|
|
656
777
|
class MixCustom {
|
|
@@ -658,7 +779,7 @@ class MixCustom {
|
|
|
658
779
|
this.config = this.getDefaultConfig(config);
|
|
659
780
|
this.options = this.getDefaultOptions(options);
|
|
660
781
|
this.headers = this.getDefaultHeaders(headers);
|
|
661
|
-
this.streamCallback = null; //
|
|
782
|
+
this.streamCallback = null; // Define streamCallback here
|
|
662
783
|
}
|
|
663
784
|
|
|
664
785
|
getDefaultOptions(customOptions) {
|
|
@@ -850,7 +971,7 @@ class MixOpenAI extends MixCustom {
|
|
|
850
971
|
delete options.max_tokens;
|
|
851
972
|
delete options.temperature;
|
|
852
973
|
}
|
|
853
|
-
|
|
974
|
+
|
|
854
975
|
// Use max_completion_tokens and remove temperature for GPT-5 models
|
|
855
976
|
if (options.model?.includes('gpt-5')) {
|
|
856
977
|
if (options.max_tokens) {
|
|
@@ -878,14 +999,18 @@ class MixOpenAI extends MixCustom {
|
|
|
878
999
|
|
|
879
1000
|
if (message.role === 'tool') {
|
|
880
1001
|
for (const content of message.content) {
|
|
881
|
-
results.push({
|
|
1002
|
+
results.push({
|
|
1003
|
+
role: 'tool',
|
|
1004
|
+
tool_call_id: content.tool_call_id,
|
|
1005
|
+
content: content.content
|
|
1006
|
+
})
|
|
882
1007
|
}
|
|
883
1008
|
continue;
|
|
884
1009
|
}
|
|
885
1010
|
|
|
886
1011
|
if (Array.isArray(message.content)) {
|
|
887
|
-
message.content = message.content.map(content => {
|
|
888
|
-
if (content.type === 'image') {
|
|
1012
|
+
message.content = message.content.filter(content => content !== null && content !== undefined).map(content => {
|
|
1013
|
+
if (content && content.type === 'image') {
|
|
889
1014
|
const { media_type, data } = content.source;
|
|
890
1015
|
return {
|
|
891
1016
|
type: 'image_url',
|
|
@@ -900,6 +1025,7 @@ class MixOpenAI extends MixCustom {
|
|
|
900
1025
|
|
|
901
1026
|
results.push(message);
|
|
902
1027
|
}
|
|
1028
|
+
|
|
903
1029
|
return results;
|
|
904
1030
|
}
|
|
905
1031
|
|
|
@@ -950,21 +1076,19 @@ class MixAnthropic extends MixCustom {
|
|
|
950
1076
|
|
|
951
1077
|
async create({ config = {}, options = {} } = {}) {
|
|
952
1078
|
|
|
953
|
-
// Remove top_p for thinking
|
|
954
|
-
if (options.thinking) {
|
|
955
|
-
delete options.top_p;
|
|
956
|
-
}
|
|
957
|
-
|
|
958
|
-
if (options.model && options.model.includes('claude-opus-4-1')) {
|
|
959
|
-
if (options.temperature !== undefined && options.top_p !== undefined) {
|
|
960
|
-
delete options.top_p;
|
|
961
|
-
}
|
|
962
|
-
}
|
|
963
|
-
|
|
964
1079
|
delete options.response_format;
|
|
965
1080
|
|
|
966
1081
|
options.system = config.system;
|
|
967
|
-
|
|
1082
|
+
|
|
1083
|
+
try {
|
|
1084
|
+
return await super.create({ config, options });
|
|
1085
|
+
} catch (error) {
|
|
1086
|
+
// Log the error details for debugging
|
|
1087
|
+
if (error.response && error.response.data) {
|
|
1088
|
+
console.error('Anthropic API Error:', JSON.stringify(error.response.data, null, 2));
|
|
1089
|
+
}
|
|
1090
|
+
throw error;
|
|
1091
|
+
}
|
|
968
1092
|
}
|
|
969
1093
|
|
|
970
1094
|
convertMessages(messages, config) {
|
|
@@ -972,7 +1096,27 @@ class MixAnthropic extends MixCustom {
|
|
|
972
1096
|
}
|
|
973
1097
|
|
|
974
1098
|
static convertMessages(messages, config) {
|
|
975
|
-
|
|
1099
|
+
// Filter out orphaned tool results for Anthropic
|
|
1100
|
+
const filteredMessages = [];
|
|
1101
|
+
for (let i = 0; i < messages.length; i++) {
|
|
1102
|
+
if (messages[i].role === 'tool') {
|
|
1103
|
+
// Check if there's a preceding assistant message with tool_calls
|
|
1104
|
+
let foundToolCall = false;
|
|
1105
|
+
for (let j = i - 1; j >= 0; j--) {
|
|
1106
|
+
if (messages[j].role === 'assistant' && messages[j].tool_calls) {
|
|
1107
|
+
foundToolCall = true;
|
|
1108
|
+
break;
|
|
1109
|
+
}
|
|
1110
|
+
}
|
|
1111
|
+
if (!foundToolCall) {
|
|
1112
|
+
// Skip orphaned tool results
|
|
1113
|
+
continue;
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
filteredMessages.push(messages[i]);
|
|
1117
|
+
}
|
|
1118
|
+
|
|
1119
|
+
return filteredMessages.map(message => {
|
|
976
1120
|
if (message.role === 'tool') {
|
|
977
1121
|
return {
|
|
978
1122
|
role: "user",
|
|
@@ -984,17 +1128,31 @@ class MixAnthropic extends MixCustom {
|
|
|
984
1128
|
}
|
|
985
1129
|
}
|
|
986
1130
|
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
1131
|
+
// Handle messages with tool_calls (assistant messages that call tools)
|
|
1132
|
+
if (message.tool_calls) {
|
|
1133
|
+
const content = message.tool_calls.map(call => ({
|
|
1134
|
+
type: 'tool_use',
|
|
1135
|
+
id: call.id,
|
|
1136
|
+
name: call.function.name,
|
|
1137
|
+
input: JSON.parse(call.function.arguments)
|
|
1138
|
+
}));
|
|
1139
|
+
return { role: 'assistant', content };
|
|
1140
|
+
}
|
|
1141
|
+
|
|
1142
|
+
// Handle content conversion for other messages
|
|
1143
|
+
if (message.content && Array.isArray(message.content)) {
|
|
1144
|
+
message.content = message.content.filter(content => content !== null && content !== undefined).map(content => {
|
|
1145
|
+
if (content && content.type === 'function') {
|
|
1146
|
+
return {
|
|
1147
|
+
type: 'tool_use',
|
|
1148
|
+
id: content.id,
|
|
1149
|
+
name: content.function.name,
|
|
1150
|
+
input: JSON.parse(content.function.arguments)
|
|
1151
|
+
}
|
|
994
1152
|
}
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
}
|
|
1153
|
+
return content;
|
|
1154
|
+
});
|
|
1155
|
+
}
|
|
998
1156
|
|
|
999
1157
|
return message;
|
|
1000
1158
|
});
|
|
@@ -1065,7 +1223,6 @@ class MixAnthropic extends MixCustom {
|
|
|
1065
1223
|
for (const tool in tools) {
|
|
1066
1224
|
for (const item of tools[tool]) {
|
|
1067
1225
|
options.tools.push({
|
|
1068
|
-
type: 'custom',
|
|
1069
1226
|
name: item.name,
|
|
1070
1227
|
description: item.description,
|
|
1071
1228
|
input_schema: item.inputSchema
|
|
@@ -1326,6 +1483,19 @@ class MixGoogle extends MixCustom {
|
|
|
1326
1483
|
static convertMessages(messages, config) {
|
|
1327
1484
|
return messages.map(message => {
|
|
1328
1485
|
|
|
1486
|
+
// Handle assistant messages with tool_calls (content is null)
|
|
1487
|
+
if (message.role === 'assistant' && message.tool_calls) {
|
|
1488
|
+
return {
|
|
1489
|
+
role: 'model',
|
|
1490
|
+
parts: message.tool_calls.map(toolCall => ({
|
|
1491
|
+
functionCall: {
|
|
1492
|
+
name: toolCall.function.name,
|
|
1493
|
+
args: JSON.parse(toolCall.function.arguments)
|
|
1494
|
+
}
|
|
1495
|
+
}))
|
|
1496
|
+
}
|
|
1497
|
+
}
|
|
1498
|
+
|
|
1329
1499
|
if (!Array.isArray(message.content)) return message;
|
|
1330
1500
|
const role = (message.role === 'assistant' || message.role === 'tool') ? 'model' : 'user'
|
|
1331
1501
|
|
|
@@ -1390,10 +1560,13 @@ class MixGoogle extends MixCustom {
|
|
|
1390
1560
|
options.messages = MixGoogle.convertMessages(options.messages);
|
|
1391
1561
|
|
|
1392
1562
|
const generationConfig = {
|
|
1393
|
-
topP: options.top_p,
|
|
1394
1563
|
maxOutputTokens: options.max_tokens,
|
|
1395
1564
|
}
|
|
1396
1565
|
|
|
1566
|
+
if (options.top_p) {
|
|
1567
|
+
generationConfig.topP = options.top_p;
|
|
1568
|
+
}
|
|
1569
|
+
|
|
1397
1570
|
generationConfig.responseMimeType = "text/plain";
|
|
1398
1571
|
|
|
1399
1572
|
const payload = {
|
package/mcp-tools.js
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
const log = require('lemonlog')('ModelMix:MCP-Tools');
|
|
2
|
+
|
|
3
|
+
class MCPToolsManager {
|
|
4
|
+
constructor() {
|
|
5
|
+
this.tools = new Map();
|
|
6
|
+
this.callbacks = new Map();
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
registerTool(toolDefinition, callback) {
|
|
10
|
+
const { name, description, inputSchema } = toolDefinition;
|
|
11
|
+
|
|
12
|
+
if (!name || !description || !inputSchema) {
|
|
13
|
+
throw new Error('Tool definition must include name, description, and inputSchema');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
if (typeof callback !== 'function') {
|
|
17
|
+
throw new Error('Callback must be a function');
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Registrar la herramienta
|
|
21
|
+
this.tools.set(name, {
|
|
22
|
+
name,
|
|
23
|
+
description,
|
|
24
|
+
inputSchema
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
// Registrar el callback
|
|
28
|
+
this.callbacks.set(name, callback);
|
|
29
|
+
|
|
30
|
+
log.debug(`Tool registered: ${name}`);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
registerTools(toolsWithCallbacks) {
|
|
34
|
+
for (const { tool, callback } of toolsWithCallbacks) {
|
|
35
|
+
this.registerTool(tool, callback);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async executeTool(name, args) {
|
|
40
|
+
const callback = this.callbacks.get(name);
|
|
41
|
+
if (!callback) {
|
|
42
|
+
throw new Error(`Tool not found: ${name}`);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
try {
|
|
46
|
+
const result = await callback(args);
|
|
47
|
+
// For primitive values (numbers, booleans), convert to string
|
|
48
|
+
// For objects/arrays, stringify them
|
|
49
|
+
let textResult;
|
|
50
|
+
if (typeof result === 'string') {
|
|
51
|
+
textResult = result;
|
|
52
|
+
} else if (typeof result === 'number' || typeof result === 'boolean') {
|
|
53
|
+
textResult = String(result);
|
|
54
|
+
} else {
|
|
55
|
+
textResult = JSON.stringify(result, null, 2);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
content: [{
|
|
60
|
+
type: "text",
|
|
61
|
+
text: textResult
|
|
62
|
+
}]
|
|
63
|
+
};
|
|
64
|
+
} catch (error) {
|
|
65
|
+
log.error(`Error executing tool ${name}:`, error);
|
|
66
|
+
return {
|
|
67
|
+
content: [{
|
|
68
|
+
type: "text",
|
|
69
|
+
text: `Error executing ${name}: ${error.message}`
|
|
70
|
+
}]
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
getToolsForMCP() {
|
|
76
|
+
return Array.from(this.tools.values());
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
hasTool(name) {
|
|
80
|
+
return this.tools.has(name);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
removeTool(name) {
|
|
84
|
+
this.tools.delete(name);
|
|
85
|
+
this.callbacks.delete(name);
|
|
86
|
+
log.debug(`Tool removed: ${name}`);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
clear() {
|
|
90
|
+
this.tools.clear();
|
|
91
|
+
this.callbacks.clear();
|
|
92
|
+
log.debug('All tools cleared');
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
module.exports = { MCPToolsManager };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "modelmix",
|
|
3
|
-
"version": "3.8.
|
|
3
|
+
"version": "3.8.8",
|
|
4
4
|
"description": "🧬 ModelMix - Unified API for Diverse AI LLM.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"repository": {
|
|
@@ -52,7 +52,7 @@
|
|
|
52
52
|
"bottleneck": "^2.19.5",
|
|
53
53
|
"file-type": "^16.5.4",
|
|
54
54
|
"form-data": "^4.0.4",
|
|
55
|
-
"lemonlog": "^1.
|
|
55
|
+
"lemonlog": "^1.2.0"
|
|
56
56
|
},
|
|
57
57
|
"devDependencies": {
|
|
58
58
|
"chai": "^5.2.1",
|
|
@@ -69,6 +69,7 @@
|
|
|
69
69
|
"test:templates": "mocha test/templates.test.js --timeout 10000 --require test/setup.js",
|
|
70
70
|
"test:images": "mocha test/images.test.js --timeout 10000 --require test/setup.js",
|
|
71
71
|
"test:bottleneck": "mocha test/bottleneck.test.js --timeout 10000 --require test/setup.js",
|
|
72
|
-
"test:live": "mocha test/live.test.js --timeout 10000 --require dotenv/config --require test/setup.js"
|
|
72
|
+
"test:live": "mocha test/live.test.js --timeout 10000 --require dotenv/config --require test/setup.js",
|
|
73
|
+
"test:live.mcp": "mocha test/live.mcp.js --timeout 60000 --require dotenv/config --require test/setup.js"
|
|
73
74
|
}
|
|
74
75
|
}
|
package/test/bottleneck.test.js
CHANGED
|
@@ -6,6 +6,11 @@ const Bottleneck = require('bottleneck');
|
|
|
6
6
|
|
|
7
7
|
describe('Rate Limiting with Bottleneck Tests', () => {
|
|
8
8
|
|
|
9
|
+
// Setup test hooks
|
|
10
|
+
if (global.setupTestHooks) {
|
|
11
|
+
global.setupTestHooks();
|
|
12
|
+
}
|
|
13
|
+
|
|
9
14
|
afterEach(() => {
|
|
10
15
|
nock.cleanAll();
|
|
11
16
|
sinon.restore();
|
|
@@ -57,6 +62,13 @@ describe('Rate Limiting with Bottleneck Tests', () => {
|
|
|
57
62
|
});
|
|
58
63
|
});
|
|
59
64
|
|
|
65
|
+
afterEach(async () => {
|
|
66
|
+
// Clean up bottleneck state
|
|
67
|
+
if (model && model.limiter) {
|
|
68
|
+
await model.limiter.stop({ dropWaitingJobs: true });
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
|
|
60
72
|
it('should enforce minimum time between requests', async () => {
|
|
61
73
|
const startTimes = [];
|
|
62
74
|
|
|
@@ -162,6 +174,13 @@ describe('Rate Limiting with Bottleneck Tests', () => {
|
|
|
162
174
|
});
|
|
163
175
|
});
|
|
164
176
|
|
|
177
|
+
afterEach(async () => {
|
|
178
|
+
// Clean up bottleneck state
|
|
179
|
+
if (model && model.limiter) {
|
|
180
|
+
await model.limiter.stop({ dropWaitingJobs: true });
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
|
|
165
184
|
it('should apply rate limiting to OpenAI requests', async () => {
|
|
166
185
|
const requestTimes = [];
|
|
167
186
|
|
|
@@ -240,6 +259,13 @@ describe('Rate Limiting with Bottleneck Tests', () => {
|
|
|
240
259
|
});
|
|
241
260
|
});
|
|
242
261
|
|
|
262
|
+
afterEach(async () => {
|
|
263
|
+
// Clean up bottleneck state
|
|
264
|
+
if (model && model.limiter) {
|
|
265
|
+
await model.limiter.stop({ dropWaitingJobs: true });
|
|
266
|
+
}
|
|
267
|
+
});
|
|
268
|
+
|
|
243
269
|
it('should handle rate limiting with API errors', async () => {
|
|
244
270
|
model.gpt4o();
|
|
245
271
|
|
package/test/fallback.test.js
CHANGED
|
@@ -4,6 +4,11 @@ const nock = require('nock');
|
|
|
4
4
|
const { ModelMix } = require('../index.js');
|
|
5
5
|
|
|
6
6
|
describe('Provider Fallback Chain Tests', () => {
|
|
7
|
+
|
|
8
|
+
// Setup test hooks
|
|
9
|
+
if (global.setupTestHooks) {
|
|
10
|
+
global.setupTestHooks();
|
|
11
|
+
}
|
|
7
12
|
|
|
8
13
|
afterEach(() => {
|
|
9
14
|
nock.cleanAll();
|