modelmix 3.2.2 → 3.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/demo/lmstudio.mjs CHANGED
@@ -23,7 +23,7 @@ console.log(model.config)
23
23
  mmix.attach(model);
24
24
 
25
25
 
26
- const LMS = mmix.create('Orenguteng/Llama-3-8B-Lexi-Uncensored-GGUF');
26
+ const LMS = mmix.attach('Orenguteng/Llama-3-8B-Lexi-Uncensored-GGUF', model);
27
27
  console.log(await LMS
28
28
  .addImage('./watson.jpg')
29
29
  .addText('describir')
package/demo/parallel.mjs CHANGED
@@ -12,9 +12,9 @@ const mix = new ModelMix({
12
12
  },
13
13
  debug: true,
14
14
  }
15
- });
15
+ })
16
16
 
17
- mix.attach(new MixOpenAI());
17
+ mix.gpt41nano();
18
18
 
19
19
  // Function to create a promise that resolves after a random time
20
20
  const randomDelay = () => new Promise(resolve => setTimeout(resolve, Math.random() * 2000 + 1000));
@@ -24,7 +24,7 @@ async function makeRequest(id) {
24
24
  const start = Date.now();
25
25
  console.log(`Starting request ${id}`);
26
26
 
27
- const message = await mix.create('gpt-4o-mini')
27
+ const message = await mix
28
28
  .addText(`Generate an interesting fact about the number ${id}.`)
29
29
  .message();
30
30
 
package/demo/short.mjs CHANGED
@@ -9,20 +9,26 @@ const setup = {
9
9
  }
10
10
  };
11
11
 
12
- const result = await ModelMix.create(setup)
13
- .sonnet37think()
14
- .o4mini({ config: { temperature: 0 } })
15
- .gpt41nano()
16
- .grok3mini()
17
- .gemini25flash()
12
+ const result = await ModelMix.new(setup)
13
+ .scout({ config: { temperature: 0 } })
18
14
  .addText("What's your name?")
19
15
  .message();
20
16
 
21
17
  console.log(result);
22
18
 
23
- const jsonResult = await ModelMix.create({ config: { debug: false } })
24
- .sonnet37()
19
+ const model = await ModelMix.new({ config: { debug: true } })
20
+ .scout({ config: { temperature: 0 } })
21
+ .o4mini()
22
+ .sonnet37think()
23
+ .gpt45()
24
+ .gemini25flash()
25
25
  .addText("Name and capital of 3 South American countries.")
26
- .json({ countries: [{ name: "", capital: "" }] });
27
26
 
28
- console.log(jsonResult);
27
+ const jsonResult = await model.json({ countries: [{ name: "", capital: "" }] });
28
+
29
+ console.log(jsonResult);
30
+
31
+ model.addText("Name and capital of 1 South American countries.")
32
+
33
+ const jsonResult2 = await model.json({ countries: [{ name: "", capital: "" }] });
34
+ console.log(jsonResult2);
package/demo/stream.mjs CHANGED
@@ -1,77 +1,16 @@
1
1
  import 'dotenv/config'
2
+ import { ModelMix } from '../index.js';
2
3
 
3
- import { ModelMix, MixOpenAI, MixAnthropic, MixPerplexity, MixOllama } from '../index.js';
4
-
5
- const env = process.env;
6
-
7
- const mmix = new ModelMix({
8
- options: {
9
- max_tokens: 100,
10
- },
11
- config: {
12
- system: 'You are ALF from Melmac.',
13
- max_history: 2
14
- }
15
- });
16
-
17
- mmix.attach(new MixOpenAI({
18
- config: {
19
- apiKey: env.OPENAI_API_KEY,
20
- }
21
- }));
22
-
23
- mmix.attach(new MixAnthropic({ config: { apiKey: env.ANTHROPIC_API_KEY } }));
24
-
25
- mmix.attach(new MixPerplexity({
26
- config: {
27
- apiKey: env.PPLX_API_KEY
28
- },
29
- system: "You are my personal assistant."
30
- }));
31
-
32
- mmix.attach(new MixOllama({
33
- config: {
34
- url: 'http://localhost:11434/api/chat',
35
- prefix: ['openhermes2'],
36
- system: 'You are ALF, soy de Melmac.',
37
- },
38
- options: {
39
- temperature: 0,
40
- }
41
- }));
42
-
43
- mmix.attach(new MixOllama({
44
- config: {
45
- url: 'http://localhost:11434/api/chat',
46
- prefix: ['llava'],
47
- },
48
- options: {
49
- temperature: 0,
50
- }
51
- }));
52
-
53
-
54
- await mmix.create('gpt-4o')
4
+ await ModelMix.new().gpt41nano()
55
5
  .addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg')
56
6
  .addText('describe')
57
7
  .stream((data) => { console.log(data.message); });
58
8
 
59
- await mmix.create('claude-3-haiku-20240307')
9
+ await ModelMix.new().haiku35()
60
10
  .addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg')
61
11
  .addText('describe')
62
12
  .stream((data) => { console.log(data.message); });
63
13
 
64
- await mmix.create('llava:latest')
65
- .addImageFromUrl('https://pbs.twimg.com/media/F6-GsjraAAADDGy?format=jpg')
66
- .addText('describe')
67
- .stream((data) => { console.log(data.message); });
68
-
69
- await mmix.create('pplx-70b-online')
14
+ await ModelMix.new().sonar()
70
15
  .addText('Who is the president of salvador?')
71
16
  .stream((data) => { console.log(data.message); });
72
-
73
- await mmix.create('openhermes2-mistral:latest')
74
- .addText('Who is the president of salvador?')
75
- .stream((data) => { console.log(data.message); });
76
-
77
- console.log(r)
package/demo/together.mjs CHANGED
@@ -1,25 +1,12 @@
1
1
  import 'dotenv/config'
2
-
3
2
  import { ModelMix, MixTogether } from '../index.js';
4
3
 
5
- const env = process.env;
6
-
7
- const mmix = new ModelMix({
8
- options: {
9
- max_tokens: 200,
10
- },
11
- config: {
12
- system: 'You are ALF from Melmac.',
13
- max_history: 2,
14
- debug: true
15
- }
16
- });
17
-
18
- mmix.attach(new MixTogether());
4
+ const setup = { config: { system: "You are ALF from Melmac." } };
19
5
 
20
- let r = mmix.create('NousResearch/Hermes-3-Llama-3.1-405B-Turbo')
6
+ let r = ModelMix.new()
7
+ .attach('deepseek-ai/DeepSeek-R1', new MixTogether(setup))
21
8
  .addText('hi there')
22
9
  .addText('do you like cats?')
23
10
  .message();
24
11
 
25
- console.log(r);
12
+ console.log(await r);