modelmix 1.2.2 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -1,12 +1,11 @@
1
1
  const axios = require('axios');
2
- const fs = require('fs').promises;
2
+ const fs = require('fs');
3
3
  const mime = require('mime-types');
4
4
 
5
5
  class ModelMix {
6
6
  constructor(args = { options: {}, config: {} }) {
7
7
  this.models = {};
8
8
  this.defaultOptions = {
9
- model: 'gpt-4o',
10
9
  max_tokens: 2000,
11
10
  temperature: 1,
12
11
  top_p: 1,
@@ -22,10 +21,11 @@ class ModelMix {
22
21
  }
23
22
 
24
23
  attach(modelInstance) {
25
- const key = modelInstance.constructor.name.replace('Model', '').toLowerCase();
24
+ const key = modelInstance.config.prefix.join("_");
26
25
  this.models[key] = modelInstance;
27
26
  modelInstance.queue = [];
28
27
  modelInstance.active_requests = 0;
28
+ return this;
29
29
  }
30
30
 
31
31
  create(modelKey, overOptions = {}) {
@@ -37,7 +37,12 @@ class ModelMix {
37
37
  throw new Error(`Model with prefix matching ${modelKey} is not attached.`);
38
38
  }
39
39
 
40
- const options = { ...this.defaultOptions, ...modelEntry.options, ...overOptions, model: modelKey };
40
+ const options = {
41
+ ...this.defaultOptions,
42
+ ...modelEntry.options,
43
+ ...overOptions,
44
+ model: modelKey
45
+ };
41
46
  const config = { ...this.config, ...modelEntry.config };
42
47
 
43
48
  return new MessageHandler(this, modelEntry, options, config);
@@ -91,9 +96,9 @@ class MessageHandler {
91
96
  return this;
92
97
  }
93
98
 
94
- async addImage(filePath, config = { role: "user" }) {
99
+ addImage(filePath, config = { role: "user" }) {
95
100
  try {
96
- const imageBuffer = await fs.readFile(filePath);
101
+ const imageBuffer = fs.readFileSync(filePath);
97
102
  const mimeType = mime.lookup(filePath);
98
103
 
99
104
  if (!mimeType || !mimeType.startsWith('image/')) {
@@ -125,13 +130,20 @@ class MessageHandler {
125
130
  }
126
131
 
127
132
  async message() {
133
+ this.options.stream = false;
128
134
  const response = await this.execute();
129
135
  return response.message;
130
136
  }
131
137
 
132
138
  async raw() {
133
- const data = await this.execute();
134
- return data.response;
139
+ this.options.stream = false;
140
+ return this.execute();
141
+ }
142
+
143
+ async stream(callback) {
144
+ this.options.stream = true;
145
+ this.modelEntry.streamCallback = callback;
146
+ return this.execute();
135
147
  }
136
148
 
137
149
  groupByRoles(messages) {
@@ -147,10 +159,9 @@ class MessageHandler {
147
159
  }
148
160
 
149
161
  async execute() {
150
-
151
162
  this.messages = this.groupByRoles(this.messages);
152
163
 
153
- if (this.messages.length === 0) { // Only system message is present
164
+ if (this.messages.length === 0) {
154
165
  throw new Error("No user messages have been added. Use addMessage(prompt) to add a message.");
155
166
  }
156
167
 
@@ -171,33 +182,115 @@ class MessageHandler {
171
182
  }
172
183
  }
173
184
 
174
- class OpenAIModel {
175
- constructor(openai, args = { options: {}, config: {} }) {
176
- this.openai = openai;
185
+ class MixCustom {
186
+ constructor(args = { config: {}, options: {}, headers: {} }) {
187
+ this.config = this.getDefaultConfig(args.config);
188
+ this.options = this.getDefaultOptions(args.options);
189
+ this.headers = this.getDefaultHeaders(args.headers);
190
+ this.streamCallback = null; // Definimos streamCallback aquí
191
+ }
177
192
 
178
- this.options = {
179
- frequency_penalty: 0,
180
- presence_penalty: 0,
181
- stream: false,
182
- ...args.options
183
- }
193
+ getDefaultOptions(customOptions) {
194
+ return {
195
+ ...customOptions
196
+ };
197
+ }
184
198
 
185
- this.config = {
186
- prefix: ["gpt"],
187
- max_request: 1,
188
- ...args.config || {}
199
+ getDefaultConfig(customConfig) {
200
+ return {
201
+ url: '',
202
+ apiKey: '',
203
+ prefix: [],
204
+ ...customConfig
205
+ };
206
+ }
207
+
208
+ getDefaultHeaders(customHeaders) {
209
+ return {
210
+ 'accept': 'application/json',
211
+ 'content-type': 'application/json',
212
+ 'authorization': `Bearer ${this.config.apiKey}`,
213
+ ...customHeaders
214
+ };
215
+ }
216
+
217
+ async create(args = { config: {}, options: {} }) {
218
+
219
+ if (args.options.stream) {
220
+ return this.processStream(await axios.post(this.config.url, args.options, {
221
+ headers: this.headers,
222
+ responseType: 'stream'
223
+ }));
224
+ } else {
225
+ return this.processResponse(await axios.post(this.config.url, args.options, {
226
+ headers: this.headers
227
+ }));
189
228
  }
190
229
  }
191
230
 
192
- async create(args = { options: {}, config: {} }) {
231
+ processStream(response) {
232
+ return new Promise((resolve, reject) => {
233
+ let raw = [];
234
+ let message = '';
235
+ let buffer = '';
236
+
237
+ response.data.on('data', chunk => {
238
+ buffer += chunk.toString();
239
+
240
+ let boundary;
241
+ while ((boundary = buffer.indexOf('\n')) !== -1) {
242
+ const dataStr = buffer.slice(0, boundary).trim();
243
+ buffer = buffer.slice(boundary + 1);
244
+
245
+ const firstBraceIndex = dataStr.indexOf('{');
246
+ if (dataStr === '[DONE]' || firstBraceIndex === -1) continue;
247
+
248
+ const jsonStr = dataStr.slice(firstBraceIndex);
249
+ try {
250
+ const data = JSON.parse(jsonStr);
251
+ if (this.streamCallback) {
252
+ const delta = this.extractDelta(data);
253
+ message += delta;
254
+ this.streamCallback({ response: data, message, delta });
255
+ raw.push(data);
256
+ }
257
+ } catch (error) {
258
+ console.error('Error parsing JSON:', error);
259
+ }
260
+ }
261
+ });
262
+
263
+ response.data.on('end', () => resolve({ response: raw, message: message.trim() }));
264
+ response.data.on('error', reject);
265
+ });
266
+ }
267
+
268
+ extractDelta(data) {
269
+ if (data.choices && data.choices[0].delta.content) return data.choices[0].delta.content;
270
+ return '';
271
+ }
272
+
273
+ processResponse(response) {
274
+ return { response: response.data, message: response.data.choices[0].message.content };
275
+ }
276
+ }
277
+
278
+ class MixOpenAI extends MixCustom {
279
+ getDefaultConfig(customConfig) {
280
+ return super.getDefaultConfig({
281
+ url: 'https://api.openai.com/v1/chat/completions',
282
+ prefix: ['gpt'],
283
+ ...customConfig
284
+ });
285
+ }
193
286
 
287
+ create(args = { config: {}, options: {} }) {
194
288
  args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
195
- args.options.messages = this.convertMessages(args.options.messages);
196
- const response = await this.openai.chat.completions.create(args.options);
197
- return { response, message: response.choices[0].message.content };
289
+ args.options.messages = MixOpenAI.convertMessages(args.options.messages);
290
+ return super.create(args);
198
291
  }
199
292
 
200
- convertMessages(messages) {
293
+ static convertMessages(messages) {
201
294
  return messages.map(message => {
202
295
  if (message.role === 'user' && message.content instanceof Array) {
203
296
  message.content = message.content.map(content => {
@@ -218,65 +311,116 @@ class OpenAIModel {
218
311
  }
219
312
  }
220
313
 
221
- class AnthropicModel {
222
- constructor(anthropic, args = { options: {}, config: {} }) {
223
- this.anthropic = anthropic;
224
- this.options = {
225
- temperature: 0.5,
226
- ...args.options || {}
227
- }
314
+ class MixAnthropic extends MixCustom {
315
+ getDefaultConfig(customConfig) {
316
+ return super.getDefaultConfig({
317
+ url: 'https://api.anthropic.com/v1/messages',
318
+ prefix: ['claude'],
319
+ ...customConfig
320
+ });
321
+ }
228
322
 
229
- this.config = {
230
- prefix: ["claude"],
231
- max_request: 1,
232
- ...args.config || {}
233
- }
323
+ getDefaultHeaders(getDefaultHeaders) {
324
+ return super.getDefaultHeaders({
325
+ 'x-api-key': this.config.apiKey,
326
+ 'anthropic-version': '2023-06-01',
327
+ ...getDefaultHeaders
328
+ });
234
329
  }
235
330
 
236
- async create(args = { config: {}, options: {} }) {
331
+ extractDelta(data) {
332
+ if (data.delta && data.delta.text) return data.delta.text;
333
+ return '';
334
+ }
237
335
 
238
- args.options.system = args.config.system;
336
+ processResponse(response) {
337
+ return { response: response.data, message: response.data.content[0].text };
338
+ }
339
+ }
239
340
 
240
- const response = await this.anthropic.messages.create(args.options);
241
- const responseText = response.content[0].text;
341
+ class MixPerplexity extends MixCustom {
342
+ getDefaultConfig(customConfig) {
343
+ return super.getDefaultConfig({
344
+ url: 'https://api.perplexity.ai/chat/completions',
345
+ prefix: ['pplx', 'llama', 'mixtral'],
346
+ ...customConfig
347
+ });
348
+ }
242
349
 
243
- return { response, message: responseText.trim() };
350
+ create(args = { config: {}, options: {} }) {
351
+ args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
352
+ return super.create(args);
244
353
  }
245
354
  }
246
355
 
247
- class CustomModel {
248
- constructor(args = { config: {}, options: {} }) {
249
- this.config = {
250
- url: 'https://api.perplexity.ai/chat/completions',
251
- bearer: '',
252
- prefix: ["pplx", "llama", "mixtral"],
253
- max_request: 1,
254
- ...args.config
255
- };
356
+ class MixOllama extends MixCustom {
256
357
 
257
- this.options = {
258
- return_citations: false,
259
- return_images: false,
260
- stream: false,
261
- presence_penalty: 0,
262
- frequency_penalty: 1,
263
- ...args.options
358
+ getDefaultConfig(customConfig) {
359
+ return super.getDefaultConfig({
360
+ url: 'http://localhost:11434/api/chat',
361
+ ...customConfig
362
+ });
363
+ }
364
+
365
+ getDefaultOptions(customOptions) {
366
+ return {
367
+ options: customOptions,
264
368
  };
265
369
  }
266
370
 
267
- async create(args = { config: {}, options: {} }) {
371
+ extractDelta(data) {
372
+ if (data.message && data.message.content) return data.message.content;
373
+ return '';
374
+ }
375
+
376
+ create(args = { config: {}, options: {} }) {
377
+
378
+ args.options.messages = MixOllama.convertMessages(args.options.messages);
268
379
  args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
269
-
270
- const response = await axios.post(this.config.url, args.options, {
271
- headers: {
272
- 'accept': 'application/json',
273
- 'authorization': `Bearer ${this.config.bearer}`,
274
- 'content-type': 'application/json'
275
- }
380
+ return super.create(args);
381
+ }
382
+
383
+ processResponse(response) {
384
+ return { response: response.data, message: response.data.message.content.trim() };
385
+ }
386
+
387
+ static convertMessages(messages) {
388
+ return messages.map(entry => {
389
+ let content = '';
390
+ let images = [];
391
+
392
+ entry.content.forEach(item => {
393
+ if (item.type === 'text') {
394
+ content += item.text + ' ';
395
+ } else if (item.type === 'image') {
396
+ images.push(item.source.data);
397
+ }
398
+ });
399
+
400
+ return {
401
+ role: entry.role,
402
+ content: content.trim(),
403
+ images: images
404
+ };
276
405
  });
406
+ }
407
+ }
277
408
 
278
- return { response: response.data, message: response.data.choices[0].message.content };
409
+ class MixLMStudio extends MixCustom {
410
+ getDefaultConfig(customConfig) {
411
+ return super.getDefaultConfig({
412
+ url: 'http://localhost:1234/v1/chat/completions',
413
+ ...customConfig
414
+ });
279
415
  }
416
+
417
+ create(args = { config: {}, options: {} }) {
418
+ args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
419
+ args.options.messages = MixOpenAI.convertMessages(args.options.messages);
420
+ return super.create(args);
421
+ }
422
+
423
+
280
424
  }
281
425
 
282
- module.exports = { OpenAIModel, AnthropicModel, CustomModel, ModelMix };
426
+ module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modelmix",
3
- "version": "1.2.2",
3
+ "version": "2.2.0",
4
4
  "description": "🧬 ModelMix - Unified API for Diverse AI Language Models.",
5
5
  "main": "index.js",
6
6
  "repository": {
@@ -20,7 +20,12 @@
20
20
  "llama",
21
21
  "mixtral",
22
22
  "nlp",
23
- "chat"
23
+ "chat",
24
+ "multimodal",
25
+ "omni",
26
+ "4o",
27
+ "ollama",
28
+ "lmstudio"
24
29
  ],
25
30
  "author": "Martin Clasen",
26
31
  "license": "MIT",