modelmix 3.1.8 → 3.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -7,13 +7,14 @@ const path = require('path');
7
7
  const generateJsonSchema = require('./schema');
8
8
 
9
9
  class ModelMix {
10
- constructor(args = { options: {}, config: {} }) {
11
- this.models = {};
12
- this.defaultOptions = {
13
- max_tokens: 2000,
14
- temperature: 1,
15
- top_p: 1,
16
- ...args.options
10
+ constructor({ options = {}, config = {} } = {}) {
11
+ this.models = [];
12
+ this.messages = [];
13
+ this.options = {
14
+ max_tokens: 5000,
15
+ temperature: 1, // 1 --> More creative, 0 --> More deterministic.
16
+ top_p: 1, // 100% --> The model considers all possible tokens.
17
+ ...options
17
18
  };
18
19
 
19
20
  // Standard Bottleneck configuration
@@ -28,10 +29,11 @@ class ModelMix {
28
29
  max_history: 1, // Default max history
29
30
  debug: false,
30
31
  bottleneck: defaultBottleneckConfig,
31
- ...args.config
32
+ ...config
32
33
  }
33
34
 
34
35
  this.limiter = new Bottleneck(this.config.bottleneck);
36
+
35
37
  }
36
38
 
37
39
  replace(keyValues) {
@@ -39,112 +41,132 @@ class ModelMix {
39
41
  return this;
40
42
  }
41
43
 
42
- attach(...modelInstances) {
43
- for (const modelInstance of modelInstances) {
44
- const key = modelInstance.config.prefix.join("_");
45
- this.models[key] = modelInstance;
46
- }
47
- return this;
44
+ static new({ options = {}, config = {} } = {}) {
45
+ return new ModelMix({ options, config });
48
46
  }
49
47
 
50
- create(modelKeys, args = { config: {}, options: {} }) {
51
- // If modelKeys is a string, convert it to an array for backwards compatibility
52
- const modelArray = Array.isArray(modelKeys) ? modelKeys : [modelKeys];
53
-
54
- if (modelArray.length === 0) {
55
- throw new Error('No model keys provided');
56
- }
48
+ new() {
49
+ return new ModelMix({ options: this.options, config: this.config });
50
+ }
57
51
 
58
- // Verificar que todos los modelos estén disponibles
59
- const unavailableModels = modelArray.filter(modelKey => {
60
- return !Object.values(this.models).some(entry =>
61
- entry.config.prefix.some(p => modelKey.startsWith(p))
62
- );
63
- });
52
+ attach(key, provider) {
64
53
 
65
- if (unavailableModels.length > 0) {
66
- throw new Error(`The following models are not available: ${unavailableModels.join(', ')}`);
54
+ if (this.models.some(model => model.key === key)) {
55
+ return this;
67
56
  }
68
57
 
69
- // Una vez verificado que todos están disponibles, obtener el primer modelo
70
- const modelKey = modelArray[0];
71
- const modelEntry = Object.values(this.models).find(entry =>
72
- entry.config.prefix.some(p => modelKey.startsWith(p))
73
- );
74
-
75
- const options = {
76
- ...this.defaultOptions,
77
- ...modelEntry.options,
78
- ...args.options,
79
- model: modelKey
80
- };
81
-
82
- const config = {
83
- ...this.config,
84
- ...modelEntry.config,
85
- ...args.config
86
- };
58
+ if (this.messages.length > 0) {
59
+ throw new Error("Cannot add models after message generation has started.");
60
+ }
87
61
 
88
- // Pass remaining models array for fallback
89
- return new MessageHandler(this, modelEntry, options, config, modelArray.slice(1));
62
+ this.models.push({ key, provider });
63
+ return this;
90
64
  }
91
65
 
92
- setSystem(text) {
93
- this.config.system = text;
94
- return this;
66
+ // --- Model addition methods ---
67
+ gpt41({ options = {}, config = {} } = {}) {
68
+ return this.attach('gpt-4.1', new MixOpenAI({ options, config }));
69
+ }
70
+ gpt41mini({ options = {}, config = {} } = {}) {
71
+ return this.attach('gpt-4.1-mini', new MixOpenAI({ options, config }));
72
+ }
73
+ gpt41nano({ options = {}, config = {} } = {}) {
74
+ return this.attach('gpt-4.1-nano', new MixOpenAI({ options, config }));
75
+ }
76
+ gpt4o({ options = {}, config = {} } = {}) {
77
+ return this.attach('gpt-4o', new MixOpenAI({ options, config }));
78
+ }
79
+ o4mini({ options = {}, config = {} } = {}) {
80
+ return this.attach('o4-mini', new MixOpenAI({ options, config }));
81
+ }
82
+ o3({ options = {}, config = {} } = {}) {
83
+ return this.attach('o3', new MixOpenAI({ options, config }));
84
+ }
85
+ gpt45({ options = {}, config = {} } = {}) {
86
+ return this.attach('gpt-4.5-preview', new MixOpenAI({ options, config }));
87
+ }
88
+ sonnet37({ options = {}, config = {} } = {}) {
89
+ return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
90
+ }
91
+ sonnet37think({ options = {
92
+ thinking: {
93
+ "type": "enabled",
94
+ "budget_tokens": 1024
95
+ },
96
+ temperature: 1
97
+ }, config = {} } = {}) {
98
+ return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
99
+ }
100
+ sonnet35({ options = {}, config = {} } = {}) {
101
+ return this.attach('claude-3-5-sonnet-20241022', new MixAnthropic({ options, config }));
102
+ }
103
+ haiku35({ options = {}, config = {} } = {}) {
104
+ return this.attach('claude-3-5-haiku-20241022', new MixAnthropic({ options, config }));
105
+ }
106
+ gemini25flash({ options = {}, config = {} } = {}) {
107
+ return this.attach('gemini-2.5-flash-preview-04-17', new MixGoogle({ options, config }));
108
+ }
109
+ gemini25proExp({ options = {}, config = {} } = {}) {
110
+ return this.attach('gemini-2.5-pro-exp-03-25', new MixGoogle({ options, config }));
111
+ }
112
+ gemini25pro({ options = {}, config = {} } = {}) {
113
+ return this.attach('gemini-2.5-pro-preview-05-06', new MixGoogle({ options, config }));
114
+ }
115
+ sonarPro({ options = {}, config = {} } = {}) {
116
+ return this.attach('sonar-pro', new MixPerplexity({ options, config }));
117
+ }
118
+ sonar({ options = {}, config = {} } = {}) {
119
+ return this.attach('sonar', new MixPerplexity({ options, config }));
95
120
  }
96
121
 
97
- setSystemFromFile(filePath) {
98
- const content = this.readFile(filePath);
99
- this.setSystem(content);
100
- return this;
122
+ grok2({ options = {}, config = {} } = {}) {
123
+ return this.attach('grok-2-latest', new MixGrok({ options, config }));
124
+ }
125
+ grok3({ options = {}, config = {} } = {}) {
126
+ return this.attach('grok-3-beta', new MixGrok({ options, config }));
127
+ }
128
+ grok3mini({ options = {}, config = {} } = {}) {
129
+ return this.attach('grok-3-mini-beta', new MixGrok({ options, config }));
101
130
  }
102
131
 
103
- readFile(filePath, options = { encoding: 'utf8' }) {
104
- try {
105
- const absolutePath = path.resolve(filePath);
106
- return fs.readFileSync(absolutePath, options);
107
- } catch (error) {
108
- if (error.code === 'ENOENT') {
109
- throw new Error(`File not found: ${filePath}`);
110
- } else if (error.code === 'EACCES') {
111
- throw new Error(`Permission denied: ${filePath}`);
112
- } else {
113
- throw new Error(`Error reading file ${filePath}: ${error.message}`);
114
- }
115
- }
132
+ qwen3({ options = {}, config = {}, mix = { groq: true, together: false } } = {}) {
133
+ if (mix.groq) this.attach('qwen-qwq-32b', new MixGroq({ options, config }));
134
+ if (mix.together) this.attach('Qwen/Qwen3-235B-A22B-fp8-tput', new MixTogether({ options, config }));
135
+ return this;
116
136
  }
117
- }
118
137
 
119
- class MessageHandler {
120
- constructor(mix, modelEntry, options, config, fallbackModels = []) {
121
- this.mix = mix;
122
- this.modelEntry = modelEntry;
123
- this.options = options;
124
- this.config = config;
125
- this.messages = [];
126
- this.fallbackModels = fallbackModels;
127
- this.imagesToProcess = [];
138
+ scout({ options = {}, config = {}, mix = { groq: true, together: false, cerebras: false } } = {}) {
139
+ if (mix.groq) this.attach('meta-llama/llama-4-scout-17b-16e-instruct', new MixGroq({ options, config }));
140
+ if (mix.together) this.attach('meta-llama/Llama-4-Scout-17B-16E-Instruct', new MixTogether({ options, config }));
141
+ if (mix.cerebras) this.attach('llama-4-scout-17b-16e-instruct', new MixCerebras({ options, config }));
142
+ return this;
143
+ }
144
+ maverick({ options = {}, config = {}, mix = { groq: true, together: false } } = {}) {
145
+ if (mix.groq) this.attach('meta-llama/llama-4-maverick-17b-128e-instruct', new MixGroq({ options, config }));
146
+ if (mix.together) this.attach('meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8', new MixTogether({ options, config }));
147
+ return this;
128
148
  }
129
149
 
130
- new() {
131
- this.messages = [];
150
+ deepseekR1({ options = {}, config = {}, mix = { groq: true, together: false, cerebras: false } } = {}) {
151
+ if (mix.groq) this.attach('deepseek-r1-distill-llama-70b', new MixGroq({ options, config }));
152
+ if (mix.together) this.attach('deepseek-ai/DeepSeek-R1', new MixTogether({ options, config }));
153
+ if (mix.cerebras) this.attach('deepseek-r1-distill-llama-70b', new MixCerebras({ options, config }));
132
154
  return this;
133
155
  }
134
156
 
135
- addText(text, config = { role: "user" }) {
157
+ addText(text, { role = "user" } = {}) {
136
158
  const content = [{
137
159
  type: "text",
138
160
  text
139
161
  }];
140
162
 
141
- this.messages.push({ ...config, content });
163
+ this.messages.push({ role, content });
142
164
  return this;
143
165
  }
144
166
 
145
- addTextFromFile(filePath, config = { role: "user" }) {
146
- const content = this.mix.readFile(filePath);
147
- this.addText(content, config);
167
+ addTextFromFile(filePath, { role = "user" } = {}) {
168
+ const content = this.readFile(filePath);
169
+ this.addText(content, { role });
148
170
  return this;
149
171
  }
150
172
 
@@ -154,13 +176,13 @@ class MessageHandler {
154
176
  }
155
177
 
156
178
  setSystemFromFile(filePath) {
157
- const content = this.mix.readFile(filePath);
179
+ const content = this.readFile(filePath);
158
180
  this.setSystem(content);
159
181
  return this;
160
182
  }
161
183
 
162
- addImage(filePath, config = { role: "user" }) {
163
- const imageBuffer = this.mix.readFile(filePath, { encoding: null });
184
+ addImage(filePath, { role = "user" } = {}) {
185
+ const imageBuffer = this.readFile(filePath, { encoding: null });
164
186
  const mimeType = mime.lookup(filePath);
165
187
 
166
188
  if (!mimeType || !mimeType.startsWith('image/')) {
@@ -170,7 +192,7 @@ class MessageHandler {
170
192
  const data = imageBuffer.toString('base64');
171
193
 
172
194
  const imageMessage = {
173
- ...config,
195
+ ...{ role },
174
196
  content: [
175
197
  {
176
198
  type: "image",
@@ -184,16 +206,20 @@ class MessageHandler {
184
206
  };
185
207
 
186
208
  this.messages.push(imageMessage);
187
-
188
209
  return this;
189
210
  }
190
211
 
191
212
  addImageFromUrl(url, config = { role: "user" }) {
213
+ if (!this.imagesToProcess) {
214
+ this.imagesToProcess = [];
215
+ }
192
216
  this.imagesToProcess.push({ url, config });
193
217
  return this;
194
218
  }
195
219
 
196
220
  async processImageUrls() {
221
+ if (!this.imagesToProcess) return;
222
+
197
223
  const imageContents = await Promise.all(
198
224
  this.imagesToProcess.map(async (image) => {
199
225
  try {
@@ -202,7 +228,7 @@ class MessageHandler {
202
228
  const mimeType = response.headers['content-type'];
203
229
  return { base64, mimeType, config: image.config };
204
230
  } catch (error) {
205
- console.error(`Error descargando imagen desde ${image.url}:`, error);
231
+ console.error(`Error downloading image from ${image.url}:`, error);
206
232
  return null;
207
233
  }
208
234
  })
@@ -230,18 +256,21 @@ class MessageHandler {
230
256
 
231
257
  async message() {
232
258
  this.options.stream = false;
233
- const response = await this.execute();
234
- return response.message;
259
+ let raw = await this.execute();
260
+ return raw.message;
235
261
  }
236
262
 
237
- async json(schemaExample = null, schemaDescription = {}, { type = 'json_object', addExample = false } = {}) {
263
+ async json(schemaExample = null, schemaDescription = {}, { type = 'json_object', addExample = false, addSchema = true } = {}) {
238
264
  this.options.response_format = { type };
265
+
239
266
  if (schemaExample) {
240
- const schema = generateJsonSchema(schemaExample, schemaDescription);
241
- this.config.systemExtra = "\nOutput JSON Schema: \n```\n" + JSON.stringify(schema) + "\n```";
267
+ this.config.schema = generateJsonSchema(schemaExample, schemaDescription);
242
268
 
269
+ if (addSchema) {
270
+ this.config.systemExtra = "\nOutput JSON Schema: \n```\n" + JSON.stringify(this.config.schema) + "\n```";
271
+ }
243
272
  if (addExample) {
244
- this.config.systemExtra += "\nOutput Example: \n```\n" + JSON.stringify(schemaExample) + "\n```";
273
+ this.config.systemExtra += "\nOutput JSON Example: \n```\n" + JSON.stringify(schemaExample) + "\n```";
245
274
  }
246
275
  }
247
276
  const response = await this.message();
@@ -270,22 +299,18 @@ class MessageHandler {
270
299
 
271
300
  async stream(callback) {
272
301
  this.options.stream = true;
273
- this.modelEntry.streamCallback = callback;
302
+ this.streamCallback = callback;
274
303
  return this.execute();
275
304
  }
276
305
 
277
- replace(keyValues) {
278
- this.config.replace = { ...this.config.replace, ...keyValues };
279
- return this;
280
- }
281
-
282
306
  replaceKeyFromFile(key, filePath) {
283
- const content = this.mix.readFile(filePath);
307
+ const content = this.readFile(filePath);
284
308
  this.replace({ [key]: this.template(content, this.config.replace) });
285
309
  return this;
286
310
  }
287
311
 
288
312
  template(input, replace) {
313
+ if (!replace) return input;
289
314
  for (const k in replace) {
290
315
  input = input.split(/([¿?¡!,"';:\(\)\.\s])/).map(x => x === k ? replace[k] : x).join("");
291
316
  }
@@ -309,13 +334,13 @@ class MessageHandler {
309
334
  applyTemplate() {
310
335
  if (!this.config.replace) return;
311
336
 
312
- this.config.system = this.template(this.config.system, this.config.replace)
337
+ this.config.system = this.template(this.config.system, this.config.replace);
313
338
 
314
339
  this.messages = this.messages.map(message => {
315
340
  if (message.content instanceof Array) {
316
341
  message.content = message.content.map(content => {
317
342
  if (content.type === 'text') {
318
- content.text = this.template(content.text, this.config.replace)
343
+ content.text = this.template(content.text, this.config.replace);
319
344
  }
320
345
  return content;
321
346
  });
@@ -332,72 +357,101 @@ class MessageHandler {
332
357
  this.options.messages = this.messages;
333
358
  }
334
359
 
360
+ readFile(filePath, { encoding = 'utf8' } = {}) {
361
+ try {
362
+ const absolutePath = path.resolve(filePath);
363
+ return fs.readFileSync(absolutePath, { encoding });
364
+ } catch (error) {
365
+ if (error.code === 'ENOENT') {
366
+ throw new Error(`File not found: ${filePath}`);
367
+ } else if (error.code === 'EACCES') {
368
+ throw new Error(`Permission denied: ${filePath}`);
369
+ } else {
370
+ throw new Error(`Error reading file ${filePath}: ${error.message}`);
371
+ }
372
+ }
373
+ }
374
+
335
375
  async execute() {
336
- return this.mix.limiter.schedule(async () => {
337
- try {
338
- await this.prepareMessages();
376
+ if (!this.models || this.models.length === 0) {
377
+ throw new Error("No models specified. Use methods like .gpt(), .sonnet() first.");
378
+ }
379
+
380
+ return this.limiter.schedule(async () => {
381
+ await this.prepareMessages();
382
+
383
+ if (this.messages.length === 0) {
384
+ throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
385
+ }
386
+
387
+ let lastError = null;
339
388
 
340
- if (this.messages.length === 0) {
341
- throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
389
+ for (let i = 0; i < this.models.length; i++) {
390
+
391
+ const currentModel = this.models[i];
392
+ const currentModelKey = currentModel.key;
393
+ const providerInstance = currentModel.provider;
394
+
395
+ let options = {
396
+ ...this.options,
397
+ ...providerInstance.options,
398
+ model: currentModelKey
399
+ };
400
+
401
+ const config = {
402
+ ...this.config,
403
+ ...providerInstance.config,
404
+ };
405
+
406
+ if (config.debug) {
407
+ const isPrimary = i === 0;
408
+ log.debug(`[${currentModelKey}] Attempt #${i + 1}` + (isPrimary ? ' (Primary)' : ' (Fallback)'));
342
409
  }
343
410
 
344
411
  try {
345
- const result = await this.modelEntry.create({ options: this.options, config: this.config });
346
- this.messages.push({ role: "assistant", content: result.message });
347
- return result;
348
- } catch (error) {
349
- // If there are fallback models available, try the next one
350
- if (this.fallbackModels.length > 0) {
351
- const nextModelKey = this.fallbackModels[0];
352
- log.warn(`Model ${this.options.model} failed, trying fallback model ${nextModelKey}...`);
353
- error.details && log.warn(error.details);
354
-
355
- // Create a completely new handler with the fallback model
356
- const nextHandler = this.mix.create(
357
- [nextModelKey, ...this.fallbackModels.slice(1)],
358
- {
359
- options: {
360
- // Keep only generic options, not model-specific ones
361
- max_tokens: this.options.max_tokens,
362
- temperature: this.options.temperature,
363
- top_p: this.options.top_p,
364
- stream: this.options.stream
365
- }
366
- }
367
- );
412
+ if (options.stream && this.streamCallback) {
413
+ providerInstance.streamCallback = this.streamCallback;
414
+ }
368
415
 
369
- // Assign all messages directly
370
- nextHandler.messages = [...this.messages];
416
+ const result = await providerInstance.create({ options, config });
371
417
 
372
- // Keep same system and replacements
373
- nextHandler.setSystem(this.config.system);
374
- nextHandler.config.systemExtra = this.config.systemExtra;
375
- if (this.config.replace) {
376
- nextHandler.replace(this.config.replace);
377
- }
418
+ this.messages.push({ role: "assistant", content: result.message });
419
+
420
+ if (config.debug) {
421
+ log.debug(`Request successful with model: ${currentModelKey}`);
422
+ log.inspect(result.response);
423
+ }
378
424
 
379
- await nextHandler.prepareMessages();
425
+ return result;
380
426
 
381
- const result = await nextHandler.modelEntry.create({
382
- options: nextHandler.options,
383
- config: nextHandler.config
384
- });
385
- nextHandler.messages.push({ role: "assistant", content: result.message });
386
- return result;
427
+ } catch (error) {
428
+ lastError = error;
429
+ log.warn(`Model ${currentModelKey} failed (Attempt #${i + 1}/${this.models.length}).`);
430
+ if (error.message) log.warn(`Error: ${error.message}`);
431
+ if (error.statusCode) log.warn(`Status Code: ${error.statusCode}`);
432
+ if (error.details) log.warn(`Details: ${JSON.stringify(error.details)}`);
433
+
434
+ if (i === this.models.length - 1) {
435
+ log.error(`All ${this.models.length} model(s) failed. Throwing last error from ${currentModelKey}.`);
436
+ throw lastError;
437
+ } else {
438
+ const nextModelKey = this.models[i + 1].key;
439
+ log.info(`-> Proceeding to next model: ${nextModelKey}`);
387
440
  }
388
- throw error;
389
441
  }
390
- } catch (error) {
391
- throw error;
392
442
  }
443
+
444
+ log.error("Fallback logic completed without success or throwing the final error.");
445
+ throw lastError || new Error("Failed to get response from any model, and no specific error was caught.");
393
446
  });
394
447
  }
395
448
  }
449
+
396
450
  class MixCustom {
397
- constructor(args = { config: {}, options: {}, headers: {} }) {
398
- this.config = this.getDefaultConfig(args.config);
399
- this.options = this.getDefaultOptions(args.options);
400
- this.headers = this.getDefaultHeaders(args.headers);
451
+ constructor({ config = {}, options = {}, headers = {} } = {}) {
452
+ this.config = this.getDefaultConfig(config);
453
+ this.options = this.getDefaultOptions(options);
454
+ this.headers = this.getDefaultHeaders(headers);
401
455
  this.streamCallback = null; // Definimos streamCallback aquí
402
456
  }
403
457
 
@@ -411,7 +465,6 @@ class MixCustom {
411
465
  return {
412
466
  url: '',
413
467
  apiKey: '',
414
- prefix: [],
415
468
  ...customConfig
416
469
  };
417
470
  }
@@ -425,31 +478,31 @@ class MixCustom {
425
478
  };
426
479
  }
427
480
 
428
- async create(args = { config: {}, options: {} }) {
481
+ async create({ config = {}, options = {} } = {}) {
429
482
  try {
430
- if (args.config.debug) {
483
+ if (config.debug) {
431
484
  log.debug("config");
432
- log.info(args.config);
485
+ log.info(config);
433
486
  log.debug("options");
434
- log.inspect(args.options);
487
+ log.inspect(options);
435
488
  }
436
489
 
437
- if (args.options.stream) {
438
- return this.processStream(await axios.post(this.config.url, args.options, {
490
+ if (options.stream) {
491
+ return this.processStream(await axios.post(this.config.url, options, {
439
492
  headers: this.headers,
440
493
  responseType: 'stream'
441
494
  }));
442
495
  } else {
443
- return this.processResponse(await axios.post(this.config.url, args.options, {
496
+ return this.processResponse(await axios.post(this.config.url, options, {
444
497
  headers: this.headers
445
498
  }));
446
499
  }
447
500
  } catch (error) {
448
- throw this.handleError(error, args);
501
+ throw this.handleError(error, { config, options });
449
502
  }
450
503
  }
451
504
 
452
- handleError(error, args) {
505
+ handleError(error, { config, options }) {
453
506
  let errorMessage = 'An error occurred in MixCustom';
454
507
  let statusCode = null;
455
508
  let errorDetails = null;
@@ -465,8 +518,8 @@ class MixCustom {
465
518
  statusCode,
466
519
  details: errorDetails,
467
520
  stack: error.stack,
468
- config: args.config,
469
- options: args.options
521
+ config: config,
522
+ options: options
470
523
  };
471
524
 
472
525
  return formattedError;
@@ -514,8 +567,16 @@ class MixCustom {
514
567
  return '';
515
568
  }
516
569
 
570
+ extractMessage(data) {
571
+ if (data.choices && data.choices[0].message.content) return data.choices[0].message.content;
572
+ return '';
573
+ }
574
+
517
575
  processResponse(response) {
518
- return { response: response.data, message: response.data.choices[0].message.content };
576
+ return {
577
+ response: response.data,
578
+ message: this.extractMessage(response.data)
579
+ };
519
580
  }
520
581
  }
521
582
 
@@ -523,27 +584,26 @@ class MixOpenAI extends MixCustom {
523
584
  getDefaultConfig(customConfig) {
524
585
  return super.getDefaultConfig({
525
586
  url: 'https://api.openai.com/v1/chat/completions',
526
- prefix: ['gpt', 'ft:', 'o'],
527
587
  apiKey: process.env.OPENAI_API_KEY,
528
588
  ...customConfig
529
589
  });
530
590
  }
531
591
 
532
- create(args = { config: {}, options: {} }) {
592
+ async create({ config = {}, options = {} } = {}) {
533
593
  if (!this.config.apiKey) {
534
594
  throw new Error('OpenAI API key not found. Please provide it in config or set OPENAI_API_KEY environment variable.');
535
595
  }
536
596
 
537
597
  // Remove max_tokens and temperature for o1/o3 models
538
- if (args.options.model?.startsWith('o')) {
539
- delete args.options.max_tokens;
540
- delete args.options.temperature;
598
+ if (options.model?.startsWith('o')) {
599
+ delete options.max_tokens;
600
+ delete options.temperature;
541
601
  }
542
602
 
543
- const content = args.config.system + args.config.systemExtra;
544
- args.options.messages = [{ role: 'system', content }, ...args.options.messages || []];
545
- args.options.messages = MixOpenAI.convertMessages(args.options.messages);
546
- return super.create(args);
603
+ const content = config.system + config.systemExtra;
604
+ options.messages = [{ role: 'system', content }, ...options.messages || []];
605
+ options.messages = MixOpenAI.convertMessages(options.messages);
606
+ return super.create({ config, options });
547
607
  }
548
608
 
549
609
  static convertMessages(messages) {
@@ -571,21 +631,25 @@ class MixAnthropic extends MixCustom {
571
631
  getDefaultConfig(customConfig) {
572
632
  return super.getDefaultConfig({
573
633
  url: 'https://api.anthropic.com/v1/messages',
574
- prefix: ['claude'],
575
634
  apiKey: process.env.ANTHROPIC_API_KEY,
576
635
  ...customConfig
577
636
  });
578
637
  }
579
638
 
580
- create(args = { config: {}, options: {} }) {
639
+ async create({ config = {}, options = {} } = {}) {
581
640
  if (!this.config.apiKey) {
582
641
  throw new Error('Anthropic API key not found. Please provide it in config or set ANTHROPIC_API_KEY environment variable.');
583
642
  }
584
643
 
585
- delete args.options.response_format;
644
+ // Remove top_p for thinking
645
+ if (options.thinking) {
646
+ delete options.top_p;
647
+ }
586
648
 
587
- args.options.system = args.config.system + args.config.systemExtra;
588
- return super.create(args);
649
+ delete options.response_format;
650
+
651
+ options.system = config.system + config.systemExtra;
652
+ return super.create({ config, options });
589
653
  }
590
654
 
591
655
  getDefaultHeaders(customHeaders) {
@@ -601,8 +665,18 @@ class MixAnthropic extends MixCustom {
601
665
  return '';
602
666
  }
603
667
 
604
- processResponse(response) {
605
- return { response: response.data, message: response.data.content[0].text };
668
+ extractMessage(data) {
669
+ if (data.content) {
670
+ // thinking
671
+ if (data.content?.[1]?.text) {
672
+ return data.content[1].text;
673
+ }
674
+
675
+ if (data.content[0].text) {
676
+ return data.content[0].text;
677
+ }
678
+ }
679
+ return '';
606
680
  }
607
681
  }
608
682
 
@@ -610,20 +684,29 @@ class MixPerplexity extends MixCustom {
610
684
  getDefaultConfig(customConfig) {
611
685
  return super.getDefaultConfig({
612
686
  url: 'https://api.perplexity.ai/chat/completions',
613
- prefix: ['llama-3', 'mixtral'],
614
687
  apiKey: process.env.PPLX_API_KEY,
615
688
  ...customConfig
616
689
  });
617
690
  }
618
691
 
619
- create(args = { config: {}, options: {} }) {
692
+ async create({ config = {}, options = {} } = {}) {
693
+
694
+ if (config.schema) {
695
+ config.systemExtra = '';
696
+
697
+ options.response_format = {
698
+ type: 'json_schema',
699
+ json_schema: { schema: config.schema }
700
+ };
701
+ }
702
+
620
703
  if (!this.config.apiKey) {
621
704
  throw new Error('Perplexity API key not found. Please provide it in config or set PPLX_API_KEY environment variable.');
622
705
  }
623
706
 
624
- const content = args.config.system + args.config.systemExtra;
625
- args.options.messages = [{ role: 'system', content }, ...args.options.messages || []];
626
- return super.create(args);
707
+ const content = config.system + config.systemExtra;
708
+ options.messages = [{ role: 'system', content }, ...options.messages || []];
709
+ return super.create({ config, options });
627
710
  }
628
711
  }
629
712
 
@@ -647,16 +730,16 @@ class MixOllama extends MixCustom {
647
730
  return '';
648
731
  }
649
732
 
650
- create(args = { config: {}, options: {} }) {
733
+ async create({ config = {}, options = {} } = {}) {
651
734
 
652
- args.options.messages = MixOllama.convertMessages(args.options.messages);
653
- const content = args.config.system + args.config.systemExtra;
654
- args.options.messages = [{ role: 'system', content }, ...args.options.messages || []];
655
- return super.create(args);
735
+ options.messages = MixOllama.convertMessages(options.messages);
736
+ const content = config.system + config.systemExtra;
737
+ options.messages = [{ role: 'system', content }, ...options.messages || []];
738
+ return super.create({ config, options });
656
739
  }
657
740
 
658
- processResponse(response) {
659
- return { response: response.data, message: response.data.message.content.trim() };
741
+ extractMessage(data) {
742
+ return data.message.content.trim();
660
743
  }
661
744
 
662
745
  static convertMessages(messages) {
@@ -685,7 +768,6 @@ class MixGrok extends MixOpenAI {
685
768
  getDefaultConfig(customConfig) {
686
769
  return super.getDefaultConfig({
687
770
  url: 'https://api.x.ai/v1/chat/completions',
688
- prefix: ['grok'],
689
771
  apiKey: process.env.XAI_API_KEY,
690
772
  ...customConfig
691
773
  });
@@ -700,11 +782,11 @@ class MixLMStudio extends MixCustom {
700
782
  });
701
783
  }
702
784
 
703
- create(args = { config: {}, options: {} }) {
704
- const content = args.config.system + args.config.systemExtra;
705
- args.options.messages = [{ role: 'system', content }, ...args.options.messages || []];
706
- args.options.messages = MixOpenAI.convertMessages(args.options.messages);
707
- return super.create(args);
785
+ async create({ config = {}, options = {} } = {}) {
786
+ const content = config.system + config.systemExtra;
787
+ options.messages = [{ role: 'system', content }, ...options.messages || []];
788
+ options.messages = MixOpenAI.convertMessages(options.messages);
789
+ return super.create({ config, options });
708
790
  }
709
791
  }
710
792
 
@@ -712,21 +794,20 @@ class MixGroq extends MixCustom {
712
794
  getDefaultConfig(customConfig) {
713
795
  return super.getDefaultConfig({
714
796
  url: 'https://api.groq.com/openai/v1/chat/completions',
715
- prefix: ["llama", "mixtral", "gemma", "deepseek-r1-distill"],
716
797
  apiKey: process.env.GROQ_API_KEY,
717
798
  ...customConfig
718
799
  });
719
800
  }
720
801
 
721
- create(args = { config: {}, options: {} }) {
802
+ async create({ config = {}, options = {} } = {}) {
722
803
  if (!this.config.apiKey) {
723
804
  throw new Error('Groq API key not found. Please provide it in config or set GROQ_API_KEY environment variable.');
724
805
  }
725
806
 
726
- const content = args.config.system + args.config.systemExtra;
727
- args.options.messages = [{ role: 'system', content }, ...args.options.messages || []];
728
- args.options.messages = MixOpenAI.convertMessages(args.options.messages);
729
- return super.create(args);
807
+ const content = config.system + config.systemExtra;
808
+ options.messages = [{ role: 'system', content }, ...options.messages || []];
809
+ options.messages = MixOpenAI.convertMessages(options.messages);
810
+ return super.create({ config, options });
730
811
  }
731
812
  }
732
813
 
@@ -734,7 +815,6 @@ class MixTogether extends MixCustom {
734
815
  getDefaultConfig(customConfig) {
735
816
  return super.getDefaultConfig({
736
817
  url: 'https://api.together.xyz/v1/chat/completions',
737
- prefix: ["meta-llama", "google", "NousResearch", "deepseek-ai"],
738
818
  apiKey: process.env.TOGETHER_API_KEY,
739
819
  ...customConfig
740
820
  });
@@ -756,16 +836,16 @@ class MixTogether extends MixCustom {
756
836
  });
757
837
  }
758
838
 
759
- create(args = { config: {}, options: {} }) {
839
+ async create({ config = {}, options = {} } = {}) {
760
840
  if (!this.config.apiKey) {
761
841
  throw new Error('Together API key not found. Please provide it in config or set TOGETHER_API_KEY environment variable.');
762
842
  }
763
843
 
764
- const content = args.config.system + args.config.systemExtra;
765
- args.options.messages = [{ role: 'system', content }, ...args.options.messages || []];
766
- args.options.messages = MixTogether.convertMessages(args.options.messages);
844
+ const content = config.system + config.systemExtra;
845
+ options.messages = [{ role: 'system', content }, ...options.messages || []];
846
+ options.messages = MixTogether.convertMessages(options.messages);
767
847
 
768
- return super.create(args);
848
+ return super.create({ config, options });
769
849
  }
770
850
  }
771
851
 
@@ -773,18 +853,116 @@ class MixCerebras extends MixCustom {
773
853
  getDefaultConfig(customConfig) {
774
854
  return super.getDefaultConfig({
775
855
  url: 'https://api.cerebras.ai/v1/chat/completions',
776
- prefix: ["llama"],
777
856
  apiKey: process.env.CEREBRAS_API_KEY,
778
857
  ...customConfig
779
858
  });
780
859
  }
781
860
 
782
- create(args = { config: {}, options: {} }) {
783
- const content = args.config.system + args.config.systemExtra;
784
- args.options.messages = [{ role: 'system', content }, ...args.options.messages || []];
785
- args.options.messages = MixTogether.convertMessages(args.options.messages);
786
- return super.create(args);
861
+ async create({ config = {}, options = {} } = {}) {
862
+ const content = config.system + config.systemExtra;
863
+ options.messages = [{ role: 'system', content }, ...options.messages || []];
864
+ options.messages = MixTogether.convertMessages(options.messages);
865
+ return super.create({ config, options });
866
+ }
867
+ }
868
+
869
+ class MixGoogle extends MixCustom {
870
+ getDefaultConfig(customConfig) {
871
+ return super.getDefaultConfig({
872
+ url: 'https://generativelanguage.googleapis.com/v1beta/models',
873
+ apiKey: process.env.GOOGLE_API_KEY,
874
+ ...customConfig
875
+ });
876
+ }
877
+
878
+ getDefaultHeaders(customHeaders) {
879
+ return {
880
+ 'Content-Type': 'application/json',
881
+ ...customHeaders
882
+ };
883
+ }
884
+
885
+ getDefaultOptions(customOptions) {
886
+ return {
887
+ generationConfig: {
888
+ responseMimeType: "text/plain"
889
+ },
890
+ ...customOptions
891
+ };
892
+ }
893
+
894
+ static convertMessages(messages) {
895
+ return messages.map(message => {
896
+ const parts = [];
897
+
898
+ if (message.content instanceof Array) {
899
+ message.content.forEach(content => {
900
+ if (content.type === 'text') {
901
+ parts.push({ text: content.text });
902
+ } else if (content.type === 'image') {
903
+ parts.push({
904
+ inline_data: {
905
+ mime_type: content.source.media_type,
906
+ data: content.source.data
907
+ }
908
+ });
909
+ }
910
+ });
911
+ } else {
912
+ parts.push({ text: message.content });
913
+ }
914
+
915
+ return {
916
+ role: message.role === 'assistant' ? 'model' : 'user',
917
+ parts
918
+ };
919
+ });
920
+ }
921
+
922
+ async create({ config = {}, options = {} } = {}) {
923
+ if (!this.config.apiKey) {
924
+ throw new Error('Google API key not found. Please provide it in config or set GOOGLE_API_KEY environment variable.');
925
+ }
926
+
927
+ const modelId = options.model || 'gemini-2.5-flash-preview-04-17';
928
+ const generateContentApi = options.stream ? 'streamGenerateContent' : 'generateContent';
929
+
930
+ // Construct the full URL with model ID, API endpoint, and API key
931
+ const fullUrl = `${this.config.url}/${modelId}:${generateContentApi}?key=${this.config.apiKey}`;
932
+
933
+ // Convert messages to Gemini format
934
+ const contents = MixGoogle.convertMessages(options.messages);
935
+
936
+ // Add system message if present
937
+ if (config.system || config.systemExtra) {
938
+ contents.unshift({
939
+ role: 'user',
940
+ parts: [{ text: (config.system || '') + (config.systemExtra || '') }]
941
+ });
942
+ }
943
+
944
+ // Prepare the request payload
945
+ const payload = {
946
+ contents,
947
+ generationConfig: options.generationConfig || this.getDefaultOptions().generationConfig
948
+ };
949
+
950
+ try {
951
+ if (options.stream) {
952
+ throw new Error('Stream is not supported for Gemini');
953
+ } else {
954
+ return this.processResponse(await axios.post(fullUrl, payload, {
955
+ headers: this.headers
956
+ }));
957
+ }
958
+ } catch (error) {
959
+ throw this.handleError(error, { config, options });
960
+ }
961
+ }
962
+
963
+ extractMessage(data) {
964
+ return data.candidates?.[0]?.content?.parts?.[0]?.text;
787
965
  }
788
966
  }
789
967
 
790
- module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio, MixGroq, MixTogether, MixGrok, MixCerebras };
968
+ module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio, MixGroq, MixTogether, MixGrok, MixCerebras, MixGoogle };