modelmix 2.7.0 → 2.7.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/demo/custom.mjs +2 -2
- package/demo/demo.mjs +14 -14
- package/demo/groq.mjs +2 -2
- package/demo/together.mjs +6 -3
- package/index.js +3 -3
- package/package.json +4 -1
package/README.md
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
|
|
7
7
|
- **Unified Interface**: Interact with multiple AI models through a single, coherent API.
|
|
8
8
|
- **Request Rate Control**: Manage the rate of requests to adhere to provider limitations using Bottleneck.
|
|
9
|
-
- **Flexible Integration**: Easily integrate popular models like OpenAI, Anthropic, Perplexity, Groq, Ollama, LM Studio or custom models.
|
|
9
|
+
- **Flexible Integration**: Easily integrate popular models like OpenAI, Anthropic, Perplexity, Groq, Together AI, Ollama, LM Studio or custom models.
|
|
10
10
|
- **History Tracking**: Automatically logs the conversation history with model responses, allowing you to limit the number of historical messages with `max_history`.
|
|
11
11
|
|
|
12
12
|
## 📦 Installation
|
|
@@ -124,9 +124,9 @@ Here's a quick example to get you started:
|
|
|
124
124
|
.stream((data) => { console.log(data.message); });
|
|
125
125
|
```
|
|
126
126
|
|
|
127
|
-
#### Together AI (
|
|
127
|
+
#### Together AI (deepseek-ai/DeepSeek-R1)
|
|
128
128
|
```javascript
|
|
129
|
-
const together = mmix.create('
|
|
129
|
+
const together = mmix.create('deepseek-ai/DeepSeek-R1', { options: { temperature: 0.7 } });
|
|
130
130
|
together.addText('What are the main differences between Python and JavaScript?');
|
|
131
131
|
const comparison = await together.message();
|
|
132
132
|
console.log(comparison);
|
package/demo/custom.mjs
CHANGED
|
@@ -4,7 +4,7 @@ import { ModelMix, MixCustom, MixTogether } from '../index.js';
|
|
|
4
4
|
|
|
5
5
|
const mmix = new ModelMix({
|
|
6
6
|
options: {
|
|
7
|
-
max_tokens:
|
|
7
|
+
max_tokens: 2000,
|
|
8
8
|
},
|
|
9
9
|
config: {
|
|
10
10
|
system: 'You are ALF from Melmac.',
|
|
@@ -15,6 +15,6 @@ const mmix = new ModelMix({
|
|
|
15
15
|
|
|
16
16
|
mmix.attach(new MixTogether());
|
|
17
17
|
|
|
18
|
-
let r = mmix.create('
|
|
18
|
+
let r = mmix.create('deepseek-ai/DeepSeek-R1').addText('hi there');
|
|
19
19
|
r = await r.addText('do you like cats?').message();
|
|
20
20
|
console.log(r);
|
package/demo/demo.mjs
CHANGED
|
@@ -33,16 +33,16 @@ mmix.attach(new MixOllama({
|
|
|
33
33
|
|
|
34
34
|
mmix.replace({ '{name}': 'ALF' });
|
|
35
35
|
|
|
36
|
-
console.log("\n" + '--------| gpt-4o-mini |--------');
|
|
37
|
-
const gpt = mmix.create('gpt-4o-mini', { options: { temperature: 0 } }).addText("Have you ever eaten a {animal}?");
|
|
38
|
-
gpt.replace({ '{animal}': 'cat' });
|
|
39
|
-
console.log(await gpt.message());
|
|
36
|
+
// console.log("\n" + '--------| gpt-4o-mini |--------');
|
|
37
|
+
// const gpt = mmix.create('gpt-4o-mini', { options: { temperature: 0 } }).addText("Have you ever eaten a {animal}?");
|
|
38
|
+
// gpt.replace({ '{animal}': 'cat' });
|
|
39
|
+
// console.log(await gpt.message());
|
|
40
40
|
|
|
41
|
-
console.log("\n" + '--------| claude-3-5-sonnet-20240620 |--------');
|
|
42
|
-
const claude = mmix.create('claude-3-5-sonnet-20240620', { options: { temperature: 0 } });
|
|
43
|
-
claude.addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg');
|
|
44
|
-
const imageDescription = await claude.addText('describe the image').message();
|
|
45
|
-
console.log(imageDescription);
|
|
41
|
+
// console.log("\n" + '--------| claude-3-5-sonnet-20240620 |--------');
|
|
42
|
+
// const claude = mmix.create('claude-3-5-sonnet-20240620', { options: { temperature: 0 } });
|
|
43
|
+
// claude.addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg');
|
|
44
|
+
// const imageDescription = await claude.addText('describe the image').message();
|
|
45
|
+
// console.log(imageDescription);
|
|
46
46
|
|
|
47
47
|
console.log("\n" + '--------| claude-3-5-sonnet-20240620 |--------');
|
|
48
48
|
const writer = mmix.create('claude-3-5-sonnet-20240620', { options: { temperature: 0.5 } });
|
|
@@ -51,11 +51,11 @@ writer.replaceKeyFromFile('{story_title}', './title.md');
|
|
|
51
51
|
const story = await writer.addTextFromFile('./prompt.md').message();
|
|
52
52
|
console.log(story);
|
|
53
53
|
|
|
54
|
-
console.log("\n" + '--------| llama-3-sonar-large-32k-online |--------');
|
|
55
|
-
const pplx = mmix.create('llama-3-sonar-large-32k-online', { config: { max_tokens: 500 } });
|
|
56
|
-
pplx.addText('How much is ETH trading in USD?');
|
|
57
|
-
const news = await pplx.addText('What are the 3 most recent Ethereum news?').message();
|
|
58
|
-
console.log(news);
|
|
54
|
+
// console.log("\n" + '--------| llama-3-sonar-large-32k-online |--------');
|
|
55
|
+
// const pplx = mmix.create('llama-3-sonar-large-32k-online', { config: { max_tokens: 500 } });
|
|
56
|
+
// pplx.addText('How much is ETH trading in USD?');
|
|
57
|
+
// const news = await pplx.addText('What are the 3 most recent Ethereum news?').message();
|
|
58
|
+
// console.log(news);
|
|
59
59
|
|
|
60
60
|
// console.log("\n" + '--------| ollama (llava:latest) |--------');
|
|
61
61
|
// await mmix.create('llava:latest')
|
package/demo/groq.mjs
CHANGED
|
@@ -6,7 +6,7 @@ const env = process.env;
|
|
|
6
6
|
|
|
7
7
|
const mmix = new ModelMix({
|
|
8
8
|
options: {
|
|
9
|
-
max_tokens:
|
|
9
|
+
max_tokens: 2000,
|
|
10
10
|
},
|
|
11
11
|
config: {
|
|
12
12
|
system: 'You are ALF from Melmac.',
|
|
@@ -20,5 +20,5 @@ mmix.attach(new MixGroq({
|
|
|
20
20
|
}
|
|
21
21
|
}));
|
|
22
22
|
|
|
23
|
-
const r = await mmix.create('
|
|
23
|
+
const r = await mmix.create('deepseek-r1-distill-llama-70b').addText('do you like cats?').message();
|
|
24
24
|
console.log(r)
|
package/demo/together.mjs
CHANGED
|
@@ -17,6 +17,9 @@ const mmix = new ModelMix({
|
|
|
17
17
|
|
|
18
18
|
mmix.attach(new MixTogether());
|
|
19
19
|
|
|
20
|
-
let r = mmix.create('NousResearch/Hermes-3-Llama-3.1-405B-Turbo')
|
|
21
|
-
|
|
22
|
-
|
|
20
|
+
let r = mmix.create('NousResearch/Hermes-3-Llama-3.1-405B-Turbo')
|
|
21
|
+
.addText('hi there')
|
|
22
|
+
.addText('do you like cats?')
|
|
23
|
+
.message();
|
|
24
|
+
|
|
25
|
+
console.log(r);
|
package/index.js
CHANGED
|
@@ -428,7 +428,7 @@ class MixOpenAI extends MixCustom {
|
|
|
428
428
|
getDefaultConfig(customConfig) {
|
|
429
429
|
return super.getDefaultConfig({
|
|
430
430
|
url: 'https://api.openai.com/v1/chat/completions',
|
|
431
|
-
prefix: ['gpt'],
|
|
431
|
+
prefix: ['gpt', 'ft:', 'o3'],
|
|
432
432
|
apiKey: process.env.OPENAI_API_KEY,
|
|
433
433
|
...customConfig
|
|
434
434
|
});
|
|
@@ -582,7 +582,7 @@ class MixGroq extends MixCustom {
|
|
|
582
582
|
getDefaultConfig(customConfig) {
|
|
583
583
|
return super.getDefaultConfig({
|
|
584
584
|
url: 'https://api.groq.com/openai/v1/chat/completions',
|
|
585
|
-
prefix: ["llama", "mixtral", "gemma"],
|
|
585
|
+
prefix: ["llama", "mixtral", "gemma", "deepseek-r1-distill"],
|
|
586
586
|
apiKey: process.env.GROQ_API_KEY,
|
|
587
587
|
...customConfig
|
|
588
588
|
});
|
|
@@ -599,7 +599,7 @@ class MixTogether extends MixCustom {
|
|
|
599
599
|
getDefaultConfig(customConfig) {
|
|
600
600
|
return super.getDefaultConfig({
|
|
601
601
|
url: 'https://api.together.xyz/v1/chat/completions',
|
|
602
|
-
prefix: ["meta-llama", "google", "NousResearch"],
|
|
602
|
+
prefix: ["meta-llama", "google", "NousResearch", "deepseek-ai"],
|
|
603
603
|
apiKey: process.env.TOGETHER_API_KEY,
|
|
604
604
|
...customConfig
|
|
605
605
|
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "modelmix",
|
|
3
|
-
"version": "2.7.
|
|
3
|
+
"version": "2.7.4",
|
|
4
4
|
"description": "🧬 ModelMix - Unified API for Diverse AI LLM.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"repository": {
|
|
@@ -31,6 +31,9 @@
|
|
|
31
31
|
"together",
|
|
32
32
|
"gpt-o1",
|
|
33
33
|
"gpt-o1-mini",
|
|
34
|
+
"deepseek",
|
|
35
|
+
"o3",
|
|
36
|
+
"o3-mini",
|
|
34
37
|
"nousresearch",
|
|
35
38
|
"hermes",
|
|
36
39
|
"bottleneck",
|