modelmix 2.5.6 → 2.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/demo/demo.mjs CHANGED
@@ -33,8 +33,8 @@ mmix.attach(new MixOllama({
33
33
 
34
34
  mmix.replace({ '{name}': 'ALF' });
35
35
 
36
- console.log("\n" + '--------| gpt-4o |--------');
37
- const gpt = mmix.create('gpt-4o', { options: { temperature: 0 } }).addText("Have you ever eaten a {animal}?");
36
+ console.log("\n" + '--------| gpt-4o-mini |--------');
37
+ const gpt = mmix.create('gpt-4o-mini', { options: { temperature: 0 } }).addText("Have you ever eaten a {animal}?");
38
38
  gpt.replace({ '{animal}': 'cat' });
39
39
  console.log(await gpt.message());
40
40
 
@@ -0,0 +1,50 @@
1
+ import 'dotenv/config';
2
+ import { ModelMix, MixOpenAI, MixAnthropic, MixPerplexity, MixOllama } from '../index.js';
3
+
4
+ const mix = new ModelMix({
5
+ options: {
6
+ max_tokens: 200,
7
+ },
8
+ config: {
9
+ system: 'You are {name} from Melmac.',
10
+ max_history: 2,
11
+ max_request: 3,
12
+ debug: true,
13
+ }
14
+ });
15
+
16
+ mix.attach(new MixOpenAI());
17
+
18
+ // Función para crear una promesa que se resuelve después de un tiempo aleatorio
19
+ const randomDelay = () => new Promise(resolve => setTimeout(resolve, Math.random() * 2000 + 1000));
20
+
21
+ // Función para realizar una solicitud al modelo
22
+ async function makeRequest(id) {
23
+ const start = Date.now();
24
+ console.log(`Iniciando solicitud ${id}`);
25
+
26
+ const message = await mix.create('gpt-4o-mini')
27
+ .addText(`Genera un hecho interesante sobre el número ${id}.`)
28
+ .message();
29
+
30
+ // await randomDelay(); // Simula algún procesamiento adicional
31
+
32
+ const duration = Date.now() - start;
33
+ console.log(`Solicitud ${id} completada en ${duration}ms: ${message}`);
34
+ }
35
+
36
+ // Función principal para ejecutar el ejemplo
37
+ async function runExample() {
38
+ console.log("Iniciando ejemplo de concurrencia...");
39
+
40
+ // Crear un array de promesas para 5 solicitudes
41
+ const requests = Array.from({ length: 5 }, (_, i) => makeRequest(i + 1));
42
+
43
+ // Ejecutar todas las solicitudes y esperar a que se completen
44
+ await Promise.all(requests);
45
+
46
+ console.log("Ejemplo de concurrencia completado.");
47
+ }
48
+
49
+ // Ejecutar el ejemplo
50
+ runExample().catch(console.error);
package/index.js CHANGED
@@ -2,6 +2,7 @@ const axios = require('axios');
2
2
  const fs = require('fs');
3
3
  const mime = require('mime-types');
4
4
  const log = require('lemonlog')('ModelMix');
5
+ const pLimit = require('p-limit');
5
6
 
6
7
  class ModelMix {
7
8
  constructor(args = { options: {}, config: {} }) {
@@ -20,6 +21,8 @@ class ModelMix {
20
21
  debug: false,
21
22
  ...args.config
22
23
  }
24
+
25
+ this.limit = pLimit(this.config.max_request);
23
26
  }
24
27
 
25
28
  replace(keyValues) {
@@ -30,8 +33,6 @@ class ModelMix {
30
33
  attach(modelInstance) {
31
34
  const key = modelInstance.config.prefix.join("_");
32
35
  this.models[key] = modelInstance;
33
- modelInstance.queue = [];
34
- modelInstance.active_requests = 0;
35
36
  return this;
36
37
  }
37
38
 
@@ -59,29 +60,6 @@ class ModelMix {
59
60
 
60
61
  return new MessageHandler(this, modelEntry, options, config);
61
62
  }
62
-
63
- async processQueue(modelEntry) {
64
- if (modelEntry.active_requests >= modelEntry.config.max_request) {
65
- return;
66
- }
67
-
68
- const nextTask = modelEntry.queue.shift();
69
- if (!nextTask) {
70
- return;
71
- }
72
-
73
- modelEntry.active_requests++;
74
-
75
- try {
76
- const result = await modelEntry.create(nextTask.args);
77
- nextTask.resolve(result);
78
- } catch (error) {
79
- nextTask.reject(error);
80
- } finally {
81
- modelEntry.active_requests--;
82
- this.processQueue(modelEntry);
83
- }
84
- }
85
63
  }
86
64
 
87
65
  class MessageHandler {
@@ -277,33 +255,29 @@ class MessageHandler {
277
255
  }
278
256
 
279
257
  async execute() {
258
+ return this.mix.limit(() => new Promise(async (resolve, reject) => {
259
+ await this.processImageUrls();
280
260
 
281
- await this.processImageUrls();
282
-
283
- this.applyTemplate();
284
- this.messages = this.messages.slice(-this.config.max_history);
285
- this.messages = this.groupByRoles(this.messages);
261
+ this.applyTemplate();
262
+ this.messages = this.messages.slice(-this.config.max_history);
263
+ this.messages = this.groupByRoles(this.messages);
286
264
 
287
- if (this.messages.length === 0) {
288
- throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
289
- }
265
+ if (this.messages.length === 0) {
266
+ throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
267
+ }
290
268
 
291
- this.options.messages = this.messages;
269
+ this.options.messages = this.messages;
292
270
 
293
- return new Promise((resolve, reject) => {
294
- this.modelEntry.queue.push({
295
- args: { options: this.options, config: this.config },
296
- resolve: (result) => {
297
- this.messages.push({ role: "assistant", content: result.message });
298
- resolve(result);
299
- },
300
- reject
301
- });
302
- this.mix.processQueue(this.modelEntry);
303
- });
271
+ try {
272
+ const result = await this.modelEntry.create({ options: this.options, config: this.config });
273
+ this.messages.push({ role: "assistant", content: result.message });
274
+ resolve(result);
275
+ } catch (error) {
276
+ reject(error);
277
+ }
278
+ }));
304
279
  }
305
280
  }
306
-
307
281
  class MixCustom {
308
282
  constructor(args = { config: {}, options: {}, headers: {} }) {
309
283
  this.config = this.getDefaultConfig(args.config);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modelmix",
3
- "version": "2.5.6",
3
+ "version": "2.6.0",
4
4
  "description": "🧬 ModelMix - Unified API for Diverse AI LLM.",
5
5
  "main": "index.js",
6
6
  "repository": {
@@ -36,7 +36,8 @@
36
36
  },
37
37
  "homepage": "https://github.com/clasen/ModelMix#readme",
38
38
  "dependencies": {
39
- "axios": "^1.6.8",
40
- "lemonlog": "^1.1.2"
39
+ "axios": "^1.7.4",
40
+ "lemonlog": "^1.1.2",
41
+ "p-limit": "^3.1.0"
41
42
  }
42
43
  }