modelmix 3.1.8 → 3.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/MODELS.md +249 -0
- package/README.md +120 -134
- package/demo/custom.mjs +41 -3
- package/demo/demo.mjs +20 -27
- package/demo/fallback.mjs +3 -14
- package/demo/grok.mjs +7 -4
- package/demo/groq.mjs +2 -2
- package/demo/lmstudio.mjs +1 -1
- package/demo/parallel.mjs +3 -3
- package/demo/short.mjs +34 -0
- package/demo/stream.mjs +6 -67
- package/demo/together.mjs +4 -17
- package/index.js +401 -223
- package/package.json +3 -2
package/demo/demo.mjs
CHANGED
|
@@ -5,6 +5,7 @@ import { ModelMix, MixOpenAI, MixAnthropic, MixPerplexity, MixOllama } from '../
|
|
|
5
5
|
const mmix = new ModelMix({
|
|
6
6
|
options: {
|
|
7
7
|
max_tokens: 200,
|
|
8
|
+
temperature: 0.5,
|
|
8
9
|
},
|
|
9
10
|
config: {
|
|
10
11
|
system: 'You are {name} from Melmac.',
|
|
@@ -14,52 +15,44 @@ const mmix = new ModelMix({
|
|
|
14
15
|
}
|
|
15
16
|
});
|
|
16
17
|
|
|
17
|
-
mmix.attach(new MixOpenAI());
|
|
18
|
-
mmix.attach(new MixAnthropic());
|
|
19
|
-
mmix.attach(new MixPerplexity({
|
|
20
|
-
config: {
|
|
21
|
-
apiKey: process.env.PPLX_API_KEY,
|
|
22
|
-
system: 'You are my personal assistant.'
|
|
23
|
-
},
|
|
24
18
|
|
|
25
|
-
|
|
26
|
-
mmix.attach(new MixOllama({
|
|
19
|
+
const pplxSettings = {
|
|
27
20
|
config: {
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
temperature: 0.5,
|
|
21
|
+
apiKey: process.env.PPLX_API_KEY,
|
|
22
|
+
system: 'You are my personal assistant.',
|
|
23
|
+
max_tokens: 500
|
|
32
24
|
}
|
|
33
|
-
}
|
|
25
|
+
};
|
|
26
|
+
|
|
34
27
|
|
|
35
28
|
mmix.replace({ '{name}': 'ALF' });
|
|
36
29
|
|
|
37
30
|
console.log("\n" + '--------| gpt-4.1-nano |--------');
|
|
38
|
-
const gpt = mmix.
|
|
31
|
+
const gpt = mmix.attach('gpt-4.1-nano', new MixOpenAI({ options: { temperature: 0 } })).addText("Have you ever eaten a {animal}?");
|
|
39
32
|
gpt.replace({ '{animal}': 'cat' });
|
|
40
33
|
console.log(await gpt.json({ time: '24:00:00', message: 'Hello' }, { time: 'Time in format HH:MM:SS' }));
|
|
41
34
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
35
|
+
console.log("\n" + '--------| claude-3-5-sonnet-20240620 |--------');
|
|
36
|
+
const claude = ModelMix.new().attach('claude-3-5-sonnet-20240620', new MixAnthropic());
|
|
37
|
+
claude.addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg');
|
|
38
|
+
const imageDescription = await claude.addText('describe the image').message();
|
|
39
|
+
console.log(imageDescription);
|
|
47
40
|
|
|
48
41
|
console.log("\n" + '--------| claude-3-7-sonnet-20250219 |--------');
|
|
49
|
-
const writer =
|
|
42
|
+
const writer = ModelMix.new().attach('claude-3-7-sonnet-20250219', new MixAnthropic());
|
|
50
43
|
writer.setSystem('You are a writer like Stephen King');
|
|
51
44
|
writer.replaceKeyFromFile('{story_title}', './title.md');
|
|
52
45
|
const story = await writer.addTextFromFile('./prompt.md').message();
|
|
53
46
|
console.log(story);
|
|
54
47
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
48
|
+
console.log("\n" + '--------| sonar |--------');
|
|
49
|
+
const pplx = ModelMix.new().sonar(pplxSettings);
|
|
50
|
+
pplx.addText('How much is ETH trading in USD?');
|
|
51
|
+
const ETH = await pplx.json({ price: 1000.1 });
|
|
52
|
+
console.log(ETH.price);
|
|
60
53
|
|
|
61
54
|
// console.log("\n" + '--------| ollama (llava:latest) |--------');
|
|
62
|
-
// await mmix.
|
|
55
|
+
// await mmix.new().attach('llava:latest', new MixOllama())
|
|
63
56
|
// .addImage('./watson.jpg')
|
|
64
57
|
// .addText('what is the predominant color?')
|
|
65
58
|
// .stream((data) => { console.log(data.message); });
|
package/demo/fallback.mjs
CHANGED
|
@@ -15,24 +15,13 @@ const mmix = new ModelMix({
|
|
|
15
15
|
max_tokens: 8192,
|
|
16
16
|
}
|
|
17
17
|
});
|
|
18
|
-
const an = new MixAnthropic();
|
|
19
|
-
an.config.url = 'fail';
|
|
20
|
-
mmix.attach(new MixOpenAI(), an, new MixGrok());
|
|
21
18
|
|
|
22
|
-
|
|
23
|
-
const modelOptionsRef = ['claude-3-5-sonnet-20241022', 'gpt-4.1-nano'];
|
|
19
|
+
mmix.sonnet37({ config: { url: 'fail' } }).gpt41nano();
|
|
24
20
|
|
|
25
21
|
async function main() {
|
|
26
|
-
|
|
22
|
+
mmix.addText('hola, como estas?');
|
|
23
|
+
const response = await mmix.message();
|
|
27
24
|
console.log(response);
|
|
28
25
|
}
|
|
29
26
|
|
|
30
|
-
async function generateThread(modelOptionsRef) {
|
|
31
|
-
const model = mmix.create(modelOptionsRef, { options: { temperature: 0.5 } });
|
|
32
|
-
model.addText('hola, como estas?');
|
|
33
|
-
const response = await model.message();
|
|
34
|
-
|
|
35
|
-
return response.split('---').map(section => section.trim());
|
|
36
|
-
}
|
|
37
|
-
|
|
38
27
|
main();
|
package/demo/grok.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import 'dotenv/config'
|
|
2
2
|
|
|
3
|
-
import { ModelMix
|
|
3
|
+
import { ModelMix } from '../index.js';
|
|
4
4
|
|
|
5
5
|
const mmix = new ModelMix({
|
|
6
6
|
options: {
|
|
@@ -8,11 +8,14 @@ const mmix = new ModelMix({
|
|
|
8
8
|
},
|
|
9
9
|
config: {
|
|
10
10
|
system: 'You are ALF from Melmac.',
|
|
11
|
-
max_history: 2
|
|
11
|
+
max_history: 2,
|
|
12
|
+
debug: true
|
|
12
13
|
}
|
|
13
14
|
});
|
|
14
15
|
|
|
15
|
-
mmix.attach(new MixGrok(), new MixAnthropic(), new MixOpenAI());
|
|
16
16
|
|
|
17
|
-
const r = await mmix.
|
|
17
|
+
const r = await mmix.grok2()
|
|
18
|
+
.addText('hi there!')
|
|
19
|
+
.addText('do you like cats?')
|
|
20
|
+
.message();
|
|
18
21
|
console.log(r);
|
package/demo/groq.mjs
CHANGED
|
@@ -14,11 +14,11 @@ const mmix = new ModelMix({
|
|
|
14
14
|
}
|
|
15
15
|
});
|
|
16
16
|
|
|
17
|
-
mmix.attach(new MixGroq({
|
|
17
|
+
mmix.attach('deepseek-r1-distill-llama-70b', new MixGroq({
|
|
18
18
|
config: {
|
|
19
19
|
apiKey: env.GROQ_API_KEY,
|
|
20
20
|
}
|
|
21
21
|
}));
|
|
22
22
|
|
|
23
|
-
const r = await mmix.
|
|
23
|
+
const r = await mmix.addText('do you like cats?').message();
|
|
24
24
|
console.log(r)
|
package/demo/lmstudio.mjs
CHANGED
|
@@ -23,7 +23,7 @@ console.log(model.config)
|
|
|
23
23
|
mmix.attach(model);
|
|
24
24
|
|
|
25
25
|
|
|
26
|
-
const LMS = mmix.
|
|
26
|
+
const LMS = mmix.attach('Orenguteng/Llama-3-8B-Lexi-Uncensored-GGUF', model);
|
|
27
27
|
console.log(await LMS
|
|
28
28
|
.addImage('./watson.jpg')
|
|
29
29
|
.addText('describir')
|
package/demo/parallel.mjs
CHANGED
|
@@ -12,9 +12,9 @@ const mix = new ModelMix({
|
|
|
12
12
|
},
|
|
13
13
|
debug: true,
|
|
14
14
|
}
|
|
15
|
-
})
|
|
15
|
+
})
|
|
16
16
|
|
|
17
|
-
mix.
|
|
17
|
+
mix.gpt41nano();
|
|
18
18
|
|
|
19
19
|
// Function to create a promise that resolves after a random time
|
|
20
20
|
const randomDelay = () => new Promise(resolve => setTimeout(resolve, Math.random() * 2000 + 1000));
|
|
@@ -24,7 +24,7 @@ async function makeRequest(id) {
|
|
|
24
24
|
const start = Date.now();
|
|
25
25
|
console.log(`Starting request ${id}`);
|
|
26
26
|
|
|
27
|
-
const message = await mix
|
|
27
|
+
const message = await mix
|
|
28
28
|
.addText(`Generate an interesting fact about the number ${id}.`)
|
|
29
29
|
.message();
|
|
30
30
|
|
package/demo/short.mjs
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import 'dotenv/config'
|
|
2
|
+
|
|
3
|
+
import { ModelMix } from '../index.js';
|
|
4
|
+
|
|
5
|
+
const setup = {
|
|
6
|
+
config: {
|
|
7
|
+
system: "You are ALF, if they ask your name, answer 'ALF'.",
|
|
8
|
+
debug: true
|
|
9
|
+
}
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
const result = await ModelMix.new(setup)
|
|
13
|
+
.scout({ config: { temperature: 0 } })
|
|
14
|
+
.addText("What's your name?")
|
|
15
|
+
.message();
|
|
16
|
+
|
|
17
|
+
console.log(result);
|
|
18
|
+
|
|
19
|
+
const model = await ModelMix.new({ config: { debug: true } })
|
|
20
|
+
.scout({ config: { temperature: 0 } })
|
|
21
|
+
.o4mini()
|
|
22
|
+
.sonnet37think()
|
|
23
|
+
.gpt45()
|
|
24
|
+
.gemini25flash()
|
|
25
|
+
.addText("Name and capital of 3 South American countries.")
|
|
26
|
+
|
|
27
|
+
const jsonResult = await model.json({ countries: [{ name: "", capital: "" }] });
|
|
28
|
+
|
|
29
|
+
console.log(jsonResult);
|
|
30
|
+
|
|
31
|
+
model.addText("Name and capital of 1 South American countries.")
|
|
32
|
+
|
|
33
|
+
const jsonResult2 = await model.json({ countries: [{ name: "", capital: "" }] });
|
|
34
|
+
console.log(jsonResult2);
|
package/demo/stream.mjs
CHANGED
|
@@ -1,77 +1,16 @@
|
|
|
1
1
|
import 'dotenv/config'
|
|
2
|
+
import { ModelMix } from '../index.js';
|
|
2
3
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
const env = process.env;
|
|
6
|
-
|
|
7
|
-
const mmix = new ModelMix({
|
|
8
|
-
options: {
|
|
9
|
-
max_tokens: 100,
|
|
10
|
-
},
|
|
11
|
-
config: {
|
|
12
|
-
system: 'You are ALF from Melmac.',
|
|
13
|
-
max_history: 2
|
|
14
|
-
}
|
|
15
|
-
});
|
|
16
|
-
|
|
17
|
-
mmix.attach(new MixOpenAI({
|
|
18
|
-
config: {
|
|
19
|
-
apiKey: env.OPENAI_API_KEY,
|
|
20
|
-
}
|
|
21
|
-
}));
|
|
22
|
-
|
|
23
|
-
mmix.attach(new MixAnthropic({ config: { apiKey: env.ANTHROPIC_API_KEY } }));
|
|
24
|
-
|
|
25
|
-
mmix.attach(new MixPerplexity({
|
|
26
|
-
config: {
|
|
27
|
-
apiKey: env.PPLX_API_KEY
|
|
28
|
-
},
|
|
29
|
-
system: "You are my personal assistant."
|
|
30
|
-
}));
|
|
31
|
-
|
|
32
|
-
mmix.attach(new MixOllama({
|
|
33
|
-
config: {
|
|
34
|
-
url: 'http://localhost:11434/api/chat',
|
|
35
|
-
prefix: ['openhermes2'],
|
|
36
|
-
system: 'You are ALF, soy de Melmac.',
|
|
37
|
-
},
|
|
38
|
-
options: {
|
|
39
|
-
temperature: 0,
|
|
40
|
-
}
|
|
41
|
-
}));
|
|
42
|
-
|
|
43
|
-
mmix.attach(new MixOllama({
|
|
44
|
-
config: {
|
|
45
|
-
url: 'http://localhost:11434/api/chat',
|
|
46
|
-
prefix: ['llava'],
|
|
47
|
-
},
|
|
48
|
-
options: {
|
|
49
|
-
temperature: 0,
|
|
50
|
-
}
|
|
51
|
-
}));
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
await mmix.create('gpt-4o')
|
|
55
|
-
.addImage('./watson.jpg')
|
|
56
|
-
.addText('describe')
|
|
57
|
-
.stream((data) => { console.log(data.message); });
|
|
58
|
-
|
|
59
|
-
await mmix.create('claude-3-haiku-20240307')
|
|
60
|
-
.addImage('./watson.jpg')
|
|
4
|
+
await ModelMix.new().gpt41nano()
|
|
5
|
+
.addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg')
|
|
61
6
|
.addText('describe')
|
|
62
7
|
.stream((data) => { console.log(data.message); });
|
|
63
8
|
|
|
64
|
-
await
|
|
65
|
-
.
|
|
9
|
+
await ModelMix.new().haiku35()
|
|
10
|
+
.addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg')
|
|
66
11
|
.addText('describe')
|
|
67
12
|
.stream((data) => { console.log(data.message); });
|
|
68
13
|
|
|
69
|
-
await
|
|
14
|
+
await ModelMix.new().sonar()
|
|
70
15
|
.addText('Who is the president of salvador?')
|
|
71
16
|
.stream((data) => { console.log(data.message); });
|
|
72
|
-
|
|
73
|
-
await mmix.create('openhermes2-mistral:latest')
|
|
74
|
-
.addText('Who is the president of salvador?')
|
|
75
|
-
.stream((data) => { console.log(data.message); });
|
|
76
|
-
|
|
77
|
-
console.log(r)
|
package/demo/together.mjs
CHANGED
|
@@ -1,25 +1,12 @@
|
|
|
1
1
|
import 'dotenv/config'
|
|
2
|
-
|
|
3
2
|
import { ModelMix, MixTogether } from '../index.js';
|
|
4
3
|
|
|
5
|
-
const
|
|
6
|
-
|
|
7
|
-
const mmix = new ModelMix({
|
|
8
|
-
options: {
|
|
9
|
-
max_tokens: 200,
|
|
10
|
-
},
|
|
11
|
-
config: {
|
|
12
|
-
system: 'You are ALF from Melmac.',
|
|
13
|
-
max_history: 2,
|
|
14
|
-
debug: true
|
|
15
|
-
}
|
|
16
|
-
});
|
|
17
|
-
|
|
18
|
-
mmix.attach(new MixTogether());
|
|
4
|
+
const setup = { config: { system: "You are ALF from Melmac." } };
|
|
19
5
|
|
|
20
|
-
let r =
|
|
6
|
+
let r = ModelMix.new()
|
|
7
|
+
.attach('deepseek-ai/DeepSeek-R1', new MixTogether(setup))
|
|
21
8
|
.addText('hi there')
|
|
22
9
|
.addText('do you like cats?')
|
|
23
10
|
.message();
|
|
24
11
|
|
|
25
|
-
console.log(r);
|
|
12
|
+
console.log(await r);
|