modelmix 4.2.6 โ 4.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/demo/package-lock.json +0 -221
- package/demo/package.json +4 -2
- package/demo/parallel-strategy.js +479 -0
- package/demo/rlm-basic.js +183 -0
- package/demo/rlm-fast.js +239 -0
- package/demo/rlm-simple.js +271 -0
- package/demo/round-robin.js +26 -0
- package/index.js +112 -71
- package/package.json +3 -3
package/demo/rlm-fast.js
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
process.loadEnvFile();
|
|
2
|
+
import { ModelMix } from '../index.js';
|
|
3
|
+
import ivm from 'isolated-vm';
|
|
4
|
+
|
|
5
|
+
console.log('๐งฌ ModelMix - IVM + mmix Callback Demo');
|
|
6
|
+
|
|
7
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
8
|
+
// UTILITIES: Describe data for the model (without exposing raw content)
|
|
9
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
10
|
+
|
|
11
|
+
function describeData(data, name = 'data') {
|
|
12
|
+
const type = Array.isArray(data) ? 'array' : typeof data;
|
|
13
|
+
|
|
14
|
+
if (Array.isArray(data)) {
|
|
15
|
+
const itemDescriptions = data.map((item, i) => {
|
|
16
|
+
if (typeof item === 'string') return `[${i}]: string, ${item.length} chars`;
|
|
17
|
+
if (typeof item === 'object') return `[${i}]: object with keys [${Object.keys(item).join(', ')}]`;
|
|
18
|
+
return `[${i}]: ${typeof item}`;
|
|
19
|
+
});
|
|
20
|
+
return `Variable '${name}' is an array with ${data.length} elements:\n ${itemDescriptions.join('\n ')}`;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
if (typeof data === 'object' && data !== null) {
|
|
24
|
+
const keys = Object.keys(data);
|
|
25
|
+
return `Variable '${name}' is an object with keys: [${keys.join(', ')}]`;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (typeof data === 'string') {
|
|
29
|
+
return `Variable '${name}' is a string with ${data.length} characters`;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
return `Variable '${name}' is of type ${type}`;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
36
|
+
// IVM CONTEXT: Setup isolated environment with mmix callback
|
|
37
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
38
|
+
|
|
39
|
+
async function createIvmContext(isolate, contextData, mmixInstance) {
|
|
40
|
+
const context = await isolate.createContext();
|
|
41
|
+
const jail = context.global;
|
|
42
|
+
|
|
43
|
+
// Expose context data as variables
|
|
44
|
+
for (const [key, value] of Object.entries(contextData)) {
|
|
45
|
+
await jail.set(key, new ivm.ExternalCopy(value).copyInto());
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Expose mmix as async callback
|
|
49
|
+
// The callback receives: system prompt, user message, and output schema (as JSON string)
|
|
50
|
+
await jail.set('__mmixCallback', new ivm.Reference(async (system, message, outputJson) => {
|
|
51
|
+
const output = JSON.parse(outputJson);
|
|
52
|
+
const result = await mmixInstance.new()
|
|
53
|
+
.gpt41nano()
|
|
54
|
+
.setSystem(system)
|
|
55
|
+
.addText(message)
|
|
56
|
+
.json(output, output);
|
|
57
|
+
return new ivm.ExternalCopy(JSON.stringify(result)).copyInto();
|
|
58
|
+
}));
|
|
59
|
+
|
|
60
|
+
// Create async wrapper for mmix inside the isolate
|
|
61
|
+
await context.eval(`
|
|
62
|
+
const mmix = {
|
|
63
|
+
async query(system, message, output) {
|
|
64
|
+
const outputJson = JSON.stringify(output);
|
|
65
|
+
const resultJson = await __mmixCallback.apply(undefined, [system, message, outputJson], { result: { promise: true } });
|
|
66
|
+
return JSON.parse(resultJson);
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
`);
|
|
70
|
+
|
|
71
|
+
return context;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
75
|
+
// MAIN: Execute model-generated code in IVM with mmix access
|
|
76
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
77
|
+
|
|
78
|
+
async function runIvmWithMmix({ task, contextData, model, maxChunkSize = 1500 }) {
|
|
79
|
+
const isolate = new ivm.Isolate({ memoryLimit: 128 });
|
|
80
|
+
|
|
81
|
+
try {
|
|
82
|
+
// Build data description for the model
|
|
83
|
+
const dataDescriptions = Object.entries(contextData)
|
|
84
|
+
.map(([key, value]) => describeData(value, key))
|
|
85
|
+
.join('\n');
|
|
86
|
+
|
|
87
|
+
// System prompt explaining the environment
|
|
88
|
+
const systemPrompt = `You are a code generator. You have access to an isolated JavaScript environment with:
|
|
89
|
+
|
|
90
|
+
AVAILABLE DATA:
|
|
91
|
+
${dataDescriptions}
|
|
92
|
+
|
|
93
|
+
AVAILABLE API:
|
|
94
|
+
- mmix.query(system, message, outputSchema): async function that calls an LLM. Returns a JSON object.
|
|
95
|
+
- system: the system prompt for the LLM
|
|
96
|
+
- message: the user message/query
|
|
97
|
+
- outputSchema: object defining expected output structure (keys are variable names, values are descriptions)
|
|
98
|
+
|
|
99
|
+
EXAMPLES:
|
|
100
|
+
// Single query - translation
|
|
101
|
+
const result = await mmix.query(
|
|
102
|
+
'You are a professional translator',
|
|
103
|
+
'Translate to Spanish: Hello world',
|
|
104
|
+
{ translation: 'text translated to spanish' }
|
|
105
|
+
);
|
|
106
|
+
// Returns: { translation: 'Hola mundo' }
|
|
107
|
+
|
|
108
|
+
// Single query - summary with metadata
|
|
109
|
+
const summary = await mmix.query(
|
|
110
|
+
'You are a summarizer',
|
|
111
|
+
'Summarize: ' + text,
|
|
112
|
+
{ summary: 'brief summary of the text', wordCount: 'number of words in summary' }
|
|
113
|
+
);
|
|
114
|
+
// Returns: { summary: '...', wordCount: 42 }
|
|
115
|
+
|
|
116
|
+
// Parallel queries (recommended for multiple independent operations)
|
|
117
|
+
const results = await Promise.all([
|
|
118
|
+
mmix.query('Translator', 'Translate to Spanish: ' + paragraphs[0], { translation: 'text translated to spanish' }),
|
|
119
|
+
mmix.query('Translator', 'Translate to Spanish: ' + paragraphs[1], { translation: 'text translated to spanish' })
|
|
120
|
+
]);
|
|
121
|
+
|
|
122
|
+
// Combine results
|
|
123
|
+
return results.map(r => r.translation).join('\\n\\n');
|
|
124
|
+
|
|
125
|
+
PARALLEL PROCESSING RULES:
|
|
126
|
+
- When processing large texts in parallel, split into chunks of maximum ${maxChunkSize} characters
|
|
127
|
+
- Use Promise.all() to process all chunks simultaneously
|
|
128
|
+
- Example splitting a text:
|
|
129
|
+
const chunkSize = ${maxChunkSize};
|
|
130
|
+
const chunks = [];
|
|
131
|
+
for (let i = 0; i < input.length; i += chunkSize) {
|
|
132
|
+
chunks.push(input.substring(i, i + chunkSize));
|
|
133
|
+
}
|
|
134
|
+
const results = await Promise.all(
|
|
135
|
+
chunks.map(chunk => mmix.query('System prompt', chunk, outputSchema))
|
|
136
|
+
);
|
|
137
|
+
return results.map(r => r.translation).join('');
|
|
138
|
+
|
|
139
|
+
IMPORTANT:
|
|
140
|
+
- Write an async IIFE that returns the final result directly (string, array, or simple object)
|
|
141
|
+
- mmix.query() returns JSON objects, extract the fields you need (e.g., result.translation)
|
|
142
|
+
- Use Promise.all for parallel operations when possible
|
|
143
|
+
- When splitting text, prefer to split at natural boundaries (paragraphs, sentences) when near the ${maxChunkSize} limit
|
|
144
|
+
- Return ONLY the code, no explanations or markdown`;
|
|
145
|
+
|
|
146
|
+
model.setSystem(systemPrompt);
|
|
147
|
+
|
|
148
|
+
// Request code generation
|
|
149
|
+
const code = await model
|
|
150
|
+
.addText(`Task: ${task}\n\nGenerate the JavaScript code. Return ONLY the async IIFE code, nothing else.`)
|
|
151
|
+
.message();
|
|
152
|
+
|
|
153
|
+
console.log('\n๐ Generated code:');
|
|
154
|
+
console.log('โ'.repeat(60));
|
|
155
|
+
console.log(code);
|
|
156
|
+
console.log('โ'.repeat(60));
|
|
157
|
+
|
|
158
|
+
// Create IVM context with data and mmix callback
|
|
159
|
+
const context = await createIvmContext(isolate, contextData, model);
|
|
160
|
+
|
|
161
|
+
// Execute the generated code (wrap in JSON.stringify for safe transfer)
|
|
162
|
+
console.log('\nโก Executing in IVM...');
|
|
163
|
+
const wrappedCode = `(async () => {
|
|
164
|
+
const __result = await ${code};
|
|
165
|
+
return JSON.stringify(__result);
|
|
166
|
+
})()`;
|
|
167
|
+
|
|
168
|
+
const resultJson = await context.eval(wrappedCode, {
|
|
169
|
+
timeout: 60000, // 60s timeout for LLM calls
|
|
170
|
+
promise: true
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
// Parse result (handle both primitives and objects)
|
|
174
|
+
try {
|
|
175
|
+
return JSON.parse(resultJson);
|
|
176
|
+
} catch {
|
|
177
|
+
return resultJson;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
} finally {
|
|
181
|
+
isolate.dispose();
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
186
|
+
// DEMO: Summarize paragraphs example
|
|
187
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
188
|
+
|
|
189
|
+
async function demo() {
|
|
190
|
+
console.log('\n=== Demo: Summarize Paragraphs via IVM + mmix ===\n');
|
|
191
|
+
|
|
192
|
+
// Sample data: 3 paragraphs (the model won't see the content, only metadata)
|
|
193
|
+
const paragraphs = [
|
|
194
|
+
`Artificial intelligence has transformed numerous industries over the past decade. From healthcare diagnostics to autonomous vehicles, AI systems now perform tasks that were once thought to require human intelligence. Machine learning algorithms can analyze vast amounts of data, identify patterns, and make predictions with remarkable accuracy.`,
|
|
195
|
+
|
|
196
|
+
`Climate change represents one of the most pressing challenges facing humanity today. Rising global temperatures are causing more frequent extreme weather events, melting polar ice caps, and threatening biodiversity across the planet. Scientists warn that immediate action is needed to reduce greenhouse gas emissions and transition to renewable energy sources.`,
|
|
197
|
+
|
|
198
|
+
`The evolution of remote work has fundamentally changed how businesses operate. Companies have discovered that distributed teams can be highly productive while offering employees better work-life balance. However, this shift also presents challenges in maintaining company culture, ensuring effective communication, and managing across different time zones.`,
|
|
199
|
+
|
|
200
|
+
`Artificial intelligence has transformed numerous industries over the past decade. From healthcare diagnostics to autonomous vehicles, AI systems now perform tasks that were once thought to require human intelligence. Machine learning algorithms can analyze vast amounts of data, identify patterns, and make predictions with remarkable accuracy.`,
|
|
201
|
+
|
|
202
|
+
`Climate change represents one of the most pressing challenges facing humanity today. Rising global temperatures are causing more frequent extreme weather events, melting polar ice caps, and threatening biodiversity across the planet. Scientists warn that immediate action is needed to reduce greenhouse gas emissions and transition to renewable energy sources.`,
|
|
203
|
+
|
|
204
|
+
`The evolution of remote work has fundamentally changed how businesses operate. Companies have discovered that distributed teams can be highly productive while offering employees better work-life balance. However, this shift also presents challenges in maintaining company culture, ensuring effective communication, and managing across different time zones.`
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
];
|
|
208
|
+
|
|
209
|
+
// Create base mmix instance for the callbacks
|
|
210
|
+
const model = ModelMix.new({ config: { debug: 2, bottleneck: {} } })
|
|
211
|
+
.gpt52({ options: { reasoning_effort: 'none', verbosity: null } })
|
|
212
|
+
.gpt41nano()
|
|
213
|
+
.gemini3flash();
|
|
214
|
+
|
|
215
|
+
// Run the IVM task
|
|
216
|
+
const result = await runIvmWithMmix({
|
|
217
|
+
task: 'Translate each paragraph to latin spanish, then return all joined by double newlines.',
|
|
218
|
+
contextData: { paragraphs },
|
|
219
|
+
model
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
console.log('\nโ
Final result from IVM:');
|
|
223
|
+
console.log('โ'.repeat(60));
|
|
224
|
+
console.log(result);
|
|
225
|
+
console.log('โ'.repeat(60));
|
|
226
|
+
|
|
227
|
+
return result;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
231
|
+
// RUN
|
|
232
|
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
233
|
+
|
|
234
|
+
try {
|
|
235
|
+
await demo();
|
|
236
|
+
console.log('\nโ
Demo completed successfully');
|
|
237
|
+
} catch (error) {
|
|
238
|
+
console.error('โ Error:', error);
|
|
239
|
+
}
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
process.loadEnvFile();
|
|
2
|
+
import { ModelMix } from '../index.js';
|
|
3
|
+
import ivm from 'isolated-vm';
|
|
4
|
+
|
|
5
|
+
console.log('๐งฌ ModelMix - RLM (Recursive Language Model) Demo');
|
|
6
|
+
console.log('๐ Basado en: https://arxiv.org/html/2512.24601v1\n');
|
|
7
|
+
|
|
8
|
+
// Simulamos un documento largo que serรญa demasiado grande para pasar directamente
|
|
9
|
+
const LONG_DOCUMENT = `
|
|
10
|
+
USERS DATABASE - System Report 2025
|
|
11
|
+
====================================
|
|
12
|
+
|
|
13
|
+
USER_001: Alice Johnson
|
|
14
|
+
Role: Senior Developer
|
|
15
|
+
Department: Engineering
|
|
16
|
+
Location: San Francisco, CA
|
|
17
|
+
Skills: JavaScript, Python, React, Node.js
|
|
18
|
+
Projects: 15 completed, 3 in progress
|
|
19
|
+
Performance Score: 9.2/10
|
|
20
|
+
Last Active: 2025-01-09
|
|
21
|
+
Notes: Excellent team leader, mentors junior developers
|
|
22
|
+
|
|
23
|
+
USER_002: Bob Smith
|
|
24
|
+
Role: Data Scientist
|
|
25
|
+
Department: Analytics
|
|
26
|
+
Location: New York, NY
|
|
27
|
+
Skills: Python, R, TensorFlow, SQL
|
|
28
|
+
Projects: 8 completed, 2 in progress
|
|
29
|
+
Performance Score: 8.7/10
|
|
30
|
+
Last Active: 2025-01-08
|
|
31
|
+
Notes: Strong statistical background, innovative approaches
|
|
32
|
+
|
|
33
|
+
USER_003: Carol White
|
|
34
|
+
Role: Product Manager
|
|
35
|
+
Department: Product
|
|
36
|
+
Location: Austin, TX
|
|
37
|
+
Skills: Agile, Jira, User Research, SQL
|
|
38
|
+
Projects: 22 completed, 5 in progress
|
|
39
|
+
Performance Score: 9.5/10
|
|
40
|
+
Last Active: 2025-01-09
|
|
41
|
+
Notes: Exceptional stakeholder management
|
|
42
|
+
|
|
43
|
+
USER_004: David Chen
|
|
44
|
+
Role: Junior Developer
|
|
45
|
+
Department: Engineering
|
|
46
|
+
Location: San Francisco, CA
|
|
47
|
+
Skills: JavaScript, HTML, CSS, Git
|
|
48
|
+
Projects: 3 completed, 1 in progress
|
|
49
|
+
Performance Score: 7.8/10
|
|
50
|
+
Last Active: 2025-01-07
|
|
51
|
+
Notes: Fast learner, needs more experience with backend
|
|
52
|
+
|
|
53
|
+
USER_005: Emma Davis
|
|
54
|
+
Role: Senior Data Scientist
|
|
55
|
+
Department: Analytics
|
|
56
|
+
Location: Boston, MA
|
|
57
|
+
Skills: Python, Machine Learning, PyTorch, AWS
|
|
58
|
+
Projects: 12 completed, 4 in progress
|
|
59
|
+
Performance Score: 9.0/10
|
|
60
|
+
Last Active: 2025-01-09
|
|
61
|
+
Notes: Published 5 research papers, conference speaker
|
|
62
|
+
|
|
63
|
+
USER_006: Frank Martinez
|
|
64
|
+
Role: DevOps Engineer
|
|
65
|
+
Department: Engineering
|
|
66
|
+
Location: Seattle, WA
|
|
67
|
+
Skills: Docker, Kubernetes, AWS, Terraform
|
|
68
|
+
Projects: 18 completed, 2 in progress
|
|
69
|
+
Performance Score: 8.9/10
|
|
70
|
+
Last Active: 2025-01-09
|
|
71
|
+
Notes: Reduced deployment time by 60%
|
|
72
|
+
|
|
73
|
+
USER_007: Grace Lee
|
|
74
|
+
Role: UX Designer
|
|
75
|
+
Department: Design
|
|
76
|
+
Location: Los Angeles, CA
|
|
77
|
+
Skills: Figma, User Research, Prototyping, CSS
|
|
78
|
+
Projects: 25 completed, 6 in progress
|
|
79
|
+
Performance Score: 9.3/10
|
|
80
|
+
Last Active: 2025-01-08
|
|
81
|
+
Notes: Award-winning designer, excellent user empathy
|
|
82
|
+
|
|
83
|
+
USER_008: Henry Taylor
|
|
84
|
+
Role: Junior Data Analyst
|
|
85
|
+
Department: Analytics
|
|
86
|
+
Location: Chicago, IL
|
|
87
|
+
Skills: SQL, Excel, Tableau, Python
|
|
88
|
+
Projects: 5 completed, 2 in progress
|
|
89
|
+
Performance Score: 7.5/10
|
|
90
|
+
Last Active: 2025-01-06
|
|
91
|
+
Notes: Good attention to detail, improving Python skills
|
|
92
|
+
`;
|
|
93
|
+
|
|
94
|
+
// Crear isolate para el REPL
|
|
95
|
+
const isolate = new ivm.Isolate({ memoryLimit: 256 });
|
|
96
|
+
|
|
97
|
+
// Contador de llamadas recursivas (para demostrar la recursiรณn)
|
|
98
|
+
let recursionDepth = 0;
|
|
99
|
+
const maxDepth = 3;
|
|
100
|
+
|
|
101
|
+
async function rlmExample() {
|
|
102
|
+
console.log('=== RLM: Anรกlisis de Documento Largo ===\n');
|
|
103
|
+
console.log(`๐ Documento: ${LONG_DOCUMENT.length} caracteres`);
|
|
104
|
+
console.log(`๐ฏ Tarea: Encontrar usuarios de Engineering con score > 8.5\n`);
|
|
105
|
+
|
|
106
|
+
const gptArgs = { options: { reasoning_effort: "none", verbosity: null } };
|
|
107
|
+
const mmix = ModelMix.new({ config: { debug: false, max_history: 15 } })
|
|
108
|
+
.gpt41nano()
|
|
109
|
+
.gpt52(gptArgs)
|
|
110
|
+
.gemini3flash()
|
|
111
|
+
.setSystem(`You are an RLM (Recursive Language Model) agent.
|
|
112
|
+
|
|
113
|
+
KEY PRINCIPLE: Instead of processing the entire document directly, you can:
|
|
114
|
+
1. Inspect the document structure using code
|
|
115
|
+
2. Break it into smaller chunks programmatically
|
|
116
|
+
3. Process each chunk recursively if needed
|
|
117
|
+
|
|
118
|
+
You have access to:
|
|
119
|
+
- inspect_document: Examine parts of the document via JavaScript. The DOCUMENT variable is available. Write code that returns a value (e.g., "return DOCUMENT.length" or just "DOCUMENT.length").
|
|
120
|
+
- recursive_call: Make a recursive call to yourself with a focused sub-task
|
|
121
|
+
|
|
122
|
+
Current recursion depth: ${recursionDepth}/${maxDepth}`);
|
|
123
|
+
|
|
124
|
+
// Variables compartidas
|
|
125
|
+
let inspectionResults = [];
|
|
126
|
+
|
|
127
|
+
// Tool 1: Inspeccionar el documento programรกticamente
|
|
128
|
+
mmix.addTool({
|
|
129
|
+
name: "inspect_document",
|
|
130
|
+
description: "Execute JavaScript code to inspect and analyze the document. The document is available as 'DOCUMENT' variable. You can use string methods, regex, parsing, etc. The code should return a value (not just log it).",
|
|
131
|
+
inputSchema: {
|
|
132
|
+
type: "object",
|
|
133
|
+
properties: {
|
|
134
|
+
code: {
|
|
135
|
+
type: "string",
|
|
136
|
+
description: "JavaScript code to execute. The DOCUMENT variable contains the full text. Your code should return a value. Example: 'return DOCUMENT.split(\"\\n\").length' or just 'DOCUMENT.length'"
|
|
137
|
+
},
|
|
138
|
+
explanation: {
|
|
139
|
+
type: "string",
|
|
140
|
+
description: "Brief explanation of what this code does"
|
|
141
|
+
}
|
|
142
|
+
},
|
|
143
|
+
required: ["code", "explanation"]
|
|
144
|
+
}
|
|
145
|
+
}, async ({ code, explanation }) => {
|
|
146
|
+
console.log(`\n๐ [Depth ${recursionDepth}] Inspecting document: ${explanation}`);
|
|
147
|
+
console.log('โ'.repeat(60));
|
|
148
|
+
console.log(code);
|
|
149
|
+
console.log('โ'.repeat(60));
|
|
150
|
+
|
|
151
|
+
try {
|
|
152
|
+
const context = await isolate.createContext();
|
|
153
|
+
|
|
154
|
+
// Inyectar el documento en el contexto como variable global
|
|
155
|
+
const jail = context.global;
|
|
156
|
+
await jail.set('DOCUMENT', LONG_DOCUMENT);
|
|
157
|
+
|
|
158
|
+
// Ejecutar el cรณdigo y convertir el resultado a JSON dentro del contexto
|
|
159
|
+
const wrappedCode = `
|
|
160
|
+
(function() {
|
|
161
|
+
const result = (function() {
|
|
162
|
+
${code}
|
|
163
|
+
})();
|
|
164
|
+
return JSON.stringify(result);
|
|
165
|
+
})()
|
|
166
|
+
`;
|
|
167
|
+
|
|
168
|
+
const script = await isolate.compileScript(wrappedCode);
|
|
169
|
+
const jsonResult = await script.run(context, { timeout: 10000 });
|
|
170
|
+
|
|
171
|
+
// Parsear el resultado JSON
|
|
172
|
+
const parsedResult = JSON.parse(jsonResult);
|
|
173
|
+
|
|
174
|
+
inspectionResults.push({ explanation, result: parsedResult });
|
|
175
|
+
|
|
176
|
+
console.log('โ
Resultado:', JSON.stringify(parsedResult, null, 2));
|
|
177
|
+
return JSON.stringify(parsedResult);
|
|
178
|
+
} catch (error) {
|
|
179
|
+
console.log('โ Error:', error.message);
|
|
180
|
+
return `Error: ${error.message}`;
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
// Tool 2: Llamada recursiva (simplificada para el ejemplo)
|
|
185
|
+
mmix.addTool({
|
|
186
|
+
name: "recursive_call",
|
|
187
|
+
description: "Make a recursive call to the RLM with a focused sub-task and optional document chunk. Use this to decompose complex queries into simpler ones.",
|
|
188
|
+
inputSchema: {
|
|
189
|
+
type: "object",
|
|
190
|
+
properties: {
|
|
191
|
+
sub_task: {
|
|
192
|
+
type: "string",
|
|
193
|
+
description: "The focused sub-task to solve recursively"
|
|
194
|
+
},
|
|
195
|
+
document_chunk: {
|
|
196
|
+
type: "string",
|
|
197
|
+
description: "Optional: A smaller chunk of the document to focus on"
|
|
198
|
+
}
|
|
199
|
+
},
|
|
200
|
+
required: ["sub_task"]
|
|
201
|
+
}
|
|
202
|
+
}, async ({ sub_task, document_chunk }) => {
|
|
203
|
+
recursionDepth++;
|
|
204
|
+
|
|
205
|
+
if (recursionDepth > maxDepth) {
|
|
206
|
+
recursionDepth--;
|
|
207
|
+
return `Maximum recursion depth reached (${maxDepth}). Please solve this sub-task directly.`;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
console.log(`\n๐ [Depth ${recursionDepth}] Recursive call:`);
|
|
211
|
+
console.log(`๐ Sub-task: ${sub_task}`);
|
|
212
|
+
if (document_chunk) {
|
|
213
|
+
console.log(`๐ Chunk size: ${document_chunk.length} chars`);
|
|
214
|
+
}
|
|
215
|
+
console.log('โ'.repeat(60));
|
|
216
|
+
|
|
217
|
+
// Crear una nueva instancia para la llamada recursiva
|
|
218
|
+
const recursiveMmix = ModelMix.new({ config: { debug: false } })
|
|
219
|
+
.gpt41nano()
|
|
220
|
+
.setSystem(`You are processing a sub-task. Be concise and direct.
|
|
221
|
+
Recursion depth: ${recursionDepth}/${maxDepth}
|
|
222
|
+
${document_chunk ? 'Document chunk provided.' : 'No document chunk provided.'}`);
|
|
223
|
+
|
|
224
|
+
if (document_chunk) {
|
|
225
|
+
recursiveMmix.addText(`Document chunk:\n${document_chunk}\n\nTask: ${sub_task}`);
|
|
226
|
+
} else {
|
|
227
|
+
recursiveMmix.addText(sub_task);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const result = await recursiveMmix.message();
|
|
231
|
+
|
|
232
|
+
recursionDepth--;
|
|
233
|
+
|
|
234
|
+
console.log(`โ
[Depth ${recursionDepth + 1}] Result: ${result.substring(0, 100)}...`);
|
|
235
|
+
return result;
|
|
236
|
+
});
|
|
237
|
+
|
|
238
|
+
// La tarea principal
|
|
239
|
+
mmix.addText(`
|
|
240
|
+
Using the RLM approach, find all users in the Engineering department with a Performance Score greater than 8.5.
|
|
241
|
+
|
|
242
|
+
APPROACH:
|
|
243
|
+
1. First, use inspect_document to understand the document structure
|
|
244
|
+
2. Use inspect_document to extract all user entries
|
|
245
|
+
3. Use inspect_document to filter users by department and score
|
|
246
|
+
4. Optionally use recursive_call if you need to process sub-tasks
|
|
247
|
+
|
|
248
|
+
Finally, provide a clear summary with the user names and their scores.
|
|
249
|
+
`);
|
|
250
|
+
|
|
251
|
+
const result = await mmix.message();
|
|
252
|
+
|
|
253
|
+
console.log('\n' + '='.repeat(60));
|
|
254
|
+
console.log('๐ฌ FINAL ANSWER:');
|
|
255
|
+
console.log('='.repeat(60));
|
|
256
|
+
console.log(result);
|
|
257
|
+
|
|
258
|
+
console.log('\n๐ INSPECTION SUMMARY:');
|
|
259
|
+
console.log(`Total inspections: ${inspectionResults.length}`);
|
|
260
|
+
inspectionResults.forEach((r, i) => {
|
|
261
|
+
console.log(`${i + 1}. ${r.explanation}`);
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
try {
|
|
266
|
+
await rlmExample();
|
|
267
|
+
console.log('\nโ
RLM Ejemplo completado');
|
|
268
|
+
} catch (error) {
|
|
269
|
+
console.error('โ Error:', error);
|
|
270
|
+
console.error(error.stack);
|
|
271
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
process.loadEnvFile();
|
|
2
|
+
import { ModelMix } from '../index.js';
|
|
3
|
+
|
|
4
|
+
console.log('\n=== Round Robin Simple Demo ===\n');
|
|
5
|
+
|
|
6
|
+
// Create instance with round robin enabled
|
|
7
|
+
const ai = ModelMix.new({
|
|
8
|
+
config: {
|
|
9
|
+
debug: 2, // Show which model is being used
|
|
10
|
+
roundRobin: true
|
|
11
|
+
},
|
|
12
|
+
mix: { openrouter: false } // Exclude OpenRouter (free tier often rate-limited)
|
|
13
|
+
})
|
|
14
|
+
.gptOss();
|
|
15
|
+
|
|
16
|
+
console.log('Making 6 requests with round robin enabled...\n');
|
|
17
|
+
|
|
18
|
+
// Make 6 requests to see rotation through all models (cerebras + groq)
|
|
19
|
+
for (let i = 1; i <= 6; i++) {
|
|
20
|
+
const result = await ai.new()
|
|
21
|
+
.addText(`Calculate ${i} * 2`)
|
|
22
|
+
.message();
|
|
23
|
+
console.log(` Result: ${result.trim()}\n`);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
console.log('โ Demo completed!\n');
|