modelmix 2.6.8 → 2.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -2
- package/demo/custom.mjs +7 -15
- package/demo/together.mjs +22 -0
- package/index.js +35 -1
- package/package.json +8 -2
package/README.md
CHANGED
|
@@ -33,13 +33,14 @@ Here's a quick example to get you started:
|
|
|
33
33
|
ANTHROPIC_API_KEY="your_anthropic_api_key"
|
|
34
34
|
PPLX_API_KEY="your_perplexity_api_key"
|
|
35
35
|
GROQ_API_KEY="your_groq_api_key"
|
|
36
|
+
TOGETHER_API_KEY="your_together_api_key"
|
|
36
37
|
```
|
|
37
38
|
|
|
38
39
|
2. **Create and configure your models**:
|
|
39
40
|
|
|
40
41
|
```javascript
|
|
41
42
|
import 'dotenv/config';
|
|
42
|
-
import { ModelMix, MixOpenAI, MixAnthropic, MixPerplexity, MixOllama } from 'modelmix';
|
|
43
|
+
import { ModelMix, MixOpenAI, MixAnthropic, MixPerplexity, MixOllama, MixTogether } from 'modelmix';
|
|
43
44
|
|
|
44
45
|
const env = process.env;
|
|
45
46
|
|
|
@@ -58,7 +59,7 @@ Here's a quick example to get you started:
|
|
|
58
59
|
mmix.replace({ '{name}': 'ALF' });
|
|
59
60
|
|
|
60
61
|
mmix.attach(new MixOpenAI({ config: { apiKey: env.OPENAI_API_KEY } }));
|
|
61
|
-
mmix.attach(new MixAnthropic(
|
|
62
|
+
mmix.attach(new MixAnthropic()); // it will use the default apiKey from process.env
|
|
62
63
|
mmix.attach(new MixPerplexity({
|
|
63
64
|
config: {
|
|
64
65
|
apiKey: env.PPLX_API_KEY
|
|
@@ -76,6 +77,7 @@ Here's a quick example to get you started:
|
|
|
76
77
|
temperature: 0.5,
|
|
77
78
|
}
|
|
78
79
|
}));
|
|
80
|
+
mmix.attach(new MixTogether());
|
|
79
81
|
```
|
|
80
82
|
|
|
81
83
|
3. **Generate responses from different models**:
|
|
@@ -121,6 +123,14 @@ Here's a quick example to get you started:
|
|
|
121
123
|
.addText("What is the predominant color?")
|
|
122
124
|
.stream((data) => { console.log(data.message); });
|
|
123
125
|
```
|
|
126
|
+
|
|
127
|
+
#### Together AI (meta-llama/Llama-3.2-3B-Instruct-Turbo)
|
|
128
|
+
```javascript
|
|
129
|
+
const together = mmix.create('meta-llama/Llama-3.2-3B-Instruct-Turbo', { options: { temperature: 0.7 } });
|
|
130
|
+
together.addText('What are the main differences between Python and JavaScript?');
|
|
131
|
+
const comparison = await together.message();
|
|
132
|
+
console.log(comparison);
|
|
133
|
+
```
|
|
124
134
|
4. Find the files for this example at: [/ModelMix/demo](https://github.com/clasen/ModelMix/tree/master/demo).
|
|
125
135
|
|
|
126
136
|
## 🔄 Templating Methods
|
|
@@ -346,6 +356,16 @@ new MixLMStudio(args = { config: {}, options: {} })
|
|
|
346
356
|
- `url`: The endpoint URL to which the model sends requests.
|
|
347
357
|
- **options**: Default options for Ollama model instances.
|
|
348
358
|
|
|
359
|
+
### MixTogether Class Overview
|
|
360
|
+
|
|
361
|
+
```javascript
|
|
362
|
+
new MixTogether(args = { config: {}, options: {} })
|
|
363
|
+
```
|
|
364
|
+
|
|
365
|
+
- **args**: Configuration object with `config` and `options` properties.
|
|
366
|
+
- **config**: Specific configuration settings for Together AI, including the `apiKey`.
|
|
367
|
+
- **options**: Default options for Together AI model instances.
|
|
368
|
+
|
|
349
369
|
## 🤝 Contributing
|
|
350
370
|
|
|
351
371
|
Contributions are welcome! If you find any issues or have suggestions for improvement, please open an issue or submit a pull request on the [GitHub repository](https://github.com/clasen/ModelMix).
|
package/demo/custom.mjs
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import 'dotenv/config'
|
|
2
2
|
|
|
3
|
-
import { ModelMix, MixCustom } from '../index.js';
|
|
4
|
-
|
|
5
|
-
const env = process.env;
|
|
3
|
+
import { ModelMix, MixCustom, MixTogether } from '../index.js';
|
|
6
4
|
|
|
7
5
|
const mmix = new ModelMix({
|
|
8
6
|
options: {
|
|
@@ -10,19 +8,13 @@ const mmix = new ModelMix({
|
|
|
10
8
|
},
|
|
11
9
|
config: {
|
|
12
10
|
system: 'You are ALF from Melmac.',
|
|
13
|
-
max_history: 2
|
|
11
|
+
max_history: 2,
|
|
12
|
+
debug: true
|
|
14
13
|
}
|
|
15
14
|
});
|
|
16
15
|
|
|
17
|
-
mmix.attach(new
|
|
18
|
-
config: {
|
|
19
|
-
url: 'https://api.perplexity.ai/chat/completions',
|
|
20
|
-
prefix: ["pplx", "llama", "mixtral"],
|
|
21
|
-
},
|
|
22
|
-
headers: {
|
|
23
|
-
'authorization': `Bearer ${env.PPLX_API_KEY}`
|
|
24
|
-
}
|
|
25
|
-
}));
|
|
16
|
+
mmix.attach(new MixTogether());
|
|
26
17
|
|
|
27
|
-
|
|
28
|
-
|
|
18
|
+
let r = mmix.create('NousResearch/Hermes-3-Llama-3.1-405B-Turbo').addText('hi there');
|
|
19
|
+
r = await r.addText('do you like cats?').message();
|
|
20
|
+
console.log(r);
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import 'dotenv/config'
|
|
2
|
+
|
|
3
|
+
import { ModelMix, MixTogether } from '../index.js';
|
|
4
|
+
|
|
5
|
+
const env = process.env;
|
|
6
|
+
|
|
7
|
+
const mmix = new ModelMix({
|
|
8
|
+
options: {
|
|
9
|
+
max_tokens: 200,
|
|
10
|
+
},
|
|
11
|
+
config: {
|
|
12
|
+
system: 'You are ALF from Melmac.',
|
|
13
|
+
max_history: 2,
|
|
14
|
+
debug: true
|
|
15
|
+
}
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
mmix.attach(new MixTogether());
|
|
19
|
+
|
|
20
|
+
let r = mmix.create('NousResearch/Hermes-3-Llama-3.1-405B-Turbo').addText('hi there')
|
|
21
|
+
r = await r.addText('do you like cats?').message()
|
|
22
|
+
console.log(r)
|
package/index.js
CHANGED
|
@@ -595,4 +595,38 @@ class MixGroq extends MixCustom {
|
|
|
595
595
|
}
|
|
596
596
|
}
|
|
597
597
|
|
|
598
|
-
|
|
598
|
+
class MixTogether extends MixCustom {
|
|
599
|
+
getDefaultConfig(customConfig) {
|
|
600
|
+
return super.getDefaultConfig({
|
|
601
|
+
url: 'https://api.together.xyz/v1/chat/completions',
|
|
602
|
+
prefix: ["meta-llama", "google", "NousResearch"],
|
|
603
|
+
apiKey: process.env.TOGETHER_API_KEY,
|
|
604
|
+
...customConfig
|
|
605
|
+
});
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
getDefaultOptions(customOptions) {
|
|
609
|
+
return {
|
|
610
|
+
stop: ["<|eot_id|>", "<|eom_id|>"],
|
|
611
|
+
...customOptions
|
|
612
|
+
};
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
convertMessages(messages) {
|
|
616
|
+
return messages.map(message => {
|
|
617
|
+
if (message.content instanceof Array) {
|
|
618
|
+
message.content = message.content.map(content => content.text).join("\n\n");
|
|
619
|
+
}
|
|
620
|
+
return message;
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
create(args = { config: {}, options: {} }) {
|
|
625
|
+
args.options.messages = [{ role: 'system', content: args.config.system }, ...args.options.messages || []];
|
|
626
|
+
args.options.messages = this.convertMessages(args.options.messages);
|
|
627
|
+
|
|
628
|
+
return super.create(args);
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
module.exports = { MixCustom, ModelMix, MixAnthropic, MixOpenAI, MixPerplexity, MixOllama, MixLMStudio, MixGroq, MixTogether };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "modelmix",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.7.2",
|
|
4
4
|
"description": "🧬 ModelMix - Unified API for Diverse AI LLM.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"repository": {
|
|
@@ -28,7 +28,13 @@
|
|
|
28
28
|
"4o",
|
|
29
29
|
"ollama",
|
|
30
30
|
"lmstudio",
|
|
31
|
-
"
|
|
31
|
+
"together",
|
|
32
|
+
"gpt-o1",
|
|
33
|
+
"gpt-o1-mini",
|
|
34
|
+
"nousresearch",
|
|
35
|
+
"hermes",
|
|
36
|
+
"bottleneck",
|
|
37
|
+
"clasen"
|
|
32
38
|
],
|
|
33
39
|
"author": "Martin Clasen",
|
|
34
40
|
"license": "MIT",
|