modelmix 3.8.2 ā 3.8.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +5 -1
- package/README.md +8 -7
- package/demo/demo.mjs +8 -8
- package/demo/images.mjs +9 -0
- package/demo/img.png +0 -0
- package/demo/mcp-simple.mjs +166 -0
- package/demo/mcp-tools.mjs +344 -0
- package/index.js +284 -50
- package/mcp-tools.js +96 -0
- package/package.json +22 -7
- package/test/README.md +158 -0
- package/test/bottleneck.test.js +483 -0
- package/test/fallback.test.js +387 -0
- package/test/fixtures/data.json +36 -0
- package/test/fixtures/img.png +0 -0
- package/test/fixtures/template.txt +15 -0
- package/test/images.test.js +87 -0
- package/test/json.test.js +295 -0
- package/test/live.mcp.js +555 -0
- package/test/live.test.js +356 -0
- package/test/mocha.opts +5 -0
- package/test/setup.js +176 -0
- package/test/templates.test.js +473 -0
- package/test/test-runner.js +75 -0
|
@@ -0,0 +1,473 @@
|
|
|
1
|
+
const { expect } = require('chai');
|
|
2
|
+
const sinon = require('sinon');
|
|
3
|
+
const nock = require('nock');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const { ModelMix } = require('../index.js');
|
|
7
|
+
|
|
8
|
+
describe('Template and File Operations Tests', () => {
|
|
9
|
+
|
|
10
|
+
afterEach(() => {
|
|
11
|
+
nock.cleanAll();
|
|
12
|
+
sinon.restore();
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
describe('Template Replacement', () => {
|
|
16
|
+
let model;
|
|
17
|
+
|
|
18
|
+
beforeEach(() => {
|
|
19
|
+
model = ModelMix.new({
|
|
20
|
+
config: { debug: false }
|
|
21
|
+
});
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
it('should replace simple template variables', async () => {
|
|
25
|
+
model.gpt4o()
|
|
26
|
+
.replace({
|
|
27
|
+
'{{name}}': 'Alice',
|
|
28
|
+
'{{age}}': '30',
|
|
29
|
+
'{{city}}': 'New York'
|
|
30
|
+
})
|
|
31
|
+
.addText('Hello {{name}}, you are {{age}} years old and live in {{city}}.');
|
|
32
|
+
|
|
33
|
+
nock('https://api.openai.com')
|
|
34
|
+
.post('/v1/chat/completions')
|
|
35
|
+
.reply(function (uri, body) {
|
|
36
|
+
|
|
37
|
+
expect(body.messages[1].content[0].text).to.equal('Hello Alice, you are 30 years old and live in New York.');
|
|
38
|
+
|
|
39
|
+
return [200, {
|
|
40
|
+
choices: [{
|
|
41
|
+
message: {
|
|
42
|
+
role: 'assistant',
|
|
43
|
+
content: 'Template processed successfully'
|
|
44
|
+
}
|
|
45
|
+
}]
|
|
46
|
+
}];
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
const response = await model.message();
|
|
50
|
+
expect(response).to.include('Template processed successfully');
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
it('should handle multiple template replacements', async () => {
|
|
54
|
+
model.gpt4o()
|
|
55
|
+
.replace({ '{{greeting}}': 'Hello' })
|
|
56
|
+
.replace({ '{{name}}': 'Bob' })
|
|
57
|
+
.replace({ '{{action}}': 'welcome' })
|
|
58
|
+
.addText('{{greeting}} {{name}}, {{action}} to our platform!');
|
|
59
|
+
|
|
60
|
+
nock('https://api.openai.com')
|
|
61
|
+
.post('/v1/chat/completions')
|
|
62
|
+
.reply(function (uri, body) {
|
|
63
|
+
expect(body.messages[1].content[0].text).to.equal('Hello Bob, welcome to our platform!');
|
|
64
|
+
|
|
65
|
+
return [200, {
|
|
66
|
+
choices: [{
|
|
67
|
+
message: {
|
|
68
|
+
role: 'assistant',
|
|
69
|
+
content: 'Multiple templates replaced'
|
|
70
|
+
}
|
|
71
|
+
}]
|
|
72
|
+
}];
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
const response = await model.message();
|
|
76
|
+
expect(response).to.include('Multiple templates replaced');
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
it('should handle nested template objects', async () => {
|
|
80
|
+
model.gpt4o()
|
|
81
|
+
.replace({
|
|
82
|
+
'{{user_name}}': 'Charlie',
|
|
83
|
+
'{{user_role}}': 'admin',
|
|
84
|
+
'{{company_name}}': 'TechCorp',
|
|
85
|
+
'{{company_domain}}': 'techcorp.com'
|
|
86
|
+
})
|
|
87
|
+
.addText('User {{user_name}} with role {{user_role}} works at {{company_name}} ({{company_domain}})');
|
|
88
|
+
|
|
89
|
+
nock('https://api.openai.com')
|
|
90
|
+
.post('/v1/chat/completions')
|
|
91
|
+
.reply(function (uri, body) {
|
|
92
|
+
expect(body.messages[1].content[0].text).to.equal('User Charlie with role admin works at TechCorp (techcorp.com)');
|
|
93
|
+
|
|
94
|
+
return [200, {
|
|
95
|
+
choices: [{
|
|
96
|
+
message: {
|
|
97
|
+
role: 'assistant',
|
|
98
|
+
content: 'Nested templates working'
|
|
99
|
+
}
|
|
100
|
+
}]
|
|
101
|
+
}];
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
const response = await model.message();
|
|
105
|
+
expect(response).to.include('Nested templates working');
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
it('should preserve unreplaced templates', async () => {
|
|
109
|
+
model.gpt4o()
|
|
110
|
+
.replace({ '{{name}}': 'David' })
|
|
111
|
+
.addText('Hello {{name}}, your ID is {{user_id}} and status is {{status}}');
|
|
112
|
+
|
|
113
|
+
nock('https://api.openai.com')
|
|
114
|
+
.post('/v1/chat/completions')
|
|
115
|
+
.reply(function (uri, body) {
|
|
116
|
+
expect(body.messages[1].content[0].text).to.equal('Hello David, your ID is {{user_id}} and status is {{status}}');
|
|
117
|
+
|
|
118
|
+
return [200, {
|
|
119
|
+
choices: [{
|
|
120
|
+
message: {
|
|
121
|
+
role: 'assistant',
|
|
122
|
+
content: 'Partial template replacement'
|
|
123
|
+
}
|
|
124
|
+
}]
|
|
125
|
+
}];
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
const response = await model.message();
|
|
129
|
+
expect(response).to.include('Partial template replacement');
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
it('should handle empty and special character replacements', async () => {
|
|
133
|
+
model.gpt4o()
|
|
134
|
+
.replace({
|
|
135
|
+
'{{empty}}': '',
|
|
136
|
+
'{{special}}': 'Hello & "World" <test>',
|
|
137
|
+
'{{number}}': '42',
|
|
138
|
+
'{{boolean}}': 'true'
|
|
139
|
+
})
|
|
140
|
+
.addText('Empty: {{empty}}, Special: {{special}}, Number: {{number}}, Boolean: {{boolean}}');
|
|
141
|
+
|
|
142
|
+
nock('https://api.openai.com')
|
|
143
|
+
.post('/v1/chat/completions')
|
|
144
|
+
.reply(function (uri, body) {
|
|
145
|
+
expect(body.messages[1].content[0].text).to.equal('Empty: , Special: Hello & "World" <test>, Number: 42, Boolean: true');
|
|
146
|
+
|
|
147
|
+
return [200, {
|
|
148
|
+
choices: [{
|
|
149
|
+
message: {
|
|
150
|
+
role: 'assistant',
|
|
151
|
+
content: 'Special characters handled'
|
|
152
|
+
}
|
|
153
|
+
}]
|
|
154
|
+
}];
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
const response = await model.message();
|
|
158
|
+
expect(response).to.include('Special characters handled');
|
|
159
|
+
});
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
describe('File Operations', () => {
|
|
163
|
+
let model;
|
|
164
|
+
const fixturesPath = path.join(__dirname, 'fixtures');
|
|
165
|
+
|
|
166
|
+
beforeEach(() => {
|
|
167
|
+
model = ModelMix.new({
|
|
168
|
+
config: { debug: false }
|
|
169
|
+
});
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
it('should load and replace from template file', async () => {
|
|
173
|
+
model.gpt4o()
|
|
174
|
+
.replaceKeyFromFile('{{template}}', path.join(fixturesPath, 'template.txt'))
|
|
175
|
+
.replace({
|
|
176
|
+
'{{name}}': 'Eve',
|
|
177
|
+
'{{platform}}': 'ModelMix',
|
|
178
|
+
'{{username}}': 'eve_user',
|
|
179
|
+
'{{role}}': 'developer',
|
|
180
|
+
'{{created_date}}': '2023-12-01',
|
|
181
|
+
'{{website}}': 'https://modelmix.dev',
|
|
182
|
+
'{{company}}': 'AI Solutions'
|
|
183
|
+
})
|
|
184
|
+
.addText('Process this template: {{template}}');
|
|
185
|
+
|
|
186
|
+
nock('https://api.openai.com')
|
|
187
|
+
.post('/v1/chat/completions')
|
|
188
|
+
.reply(function (uri, body) {
|
|
189
|
+
const content = body.messages[1].content[0].text;
|
|
190
|
+
|
|
191
|
+
expect(content).to.include('Hello Eve, welcome to ModelMix!');
|
|
192
|
+
expect(content).to.include('Username: eve_user');
|
|
193
|
+
expect(content).to.include('Role: developer');
|
|
194
|
+
expect(content).to.include('Created: 2023-12-01');
|
|
195
|
+
expect(content).to.include('The AI Solutions Team');
|
|
196
|
+
|
|
197
|
+
return [200, {
|
|
198
|
+
choices: [{
|
|
199
|
+
message: {
|
|
200
|
+
role: 'assistant',
|
|
201
|
+
content: 'Template file processed'
|
|
202
|
+
}
|
|
203
|
+
}]
|
|
204
|
+
}];
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
const response = await model.message();
|
|
208
|
+
expect(response).to.include('Template file processed');
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
it('should load and process JSON data file', async () => {
|
|
212
|
+
model.gpt4o()
|
|
213
|
+
.replaceKeyFromFile('{{data}}', path.join(fixturesPath, 'data.json'))
|
|
214
|
+
.addText('Process this data: {{data}}');
|
|
215
|
+
|
|
216
|
+
nock('https://api.openai.com')
|
|
217
|
+
.post('/v1/chat/completions')
|
|
218
|
+
.reply(function (uri, body) {
|
|
219
|
+
const content = body.messages[1].content[0].text;
|
|
220
|
+
|
|
221
|
+
expect(content).to.include('Alice Smith');
|
|
222
|
+
expect(content).to.include('alice@example.com');
|
|
223
|
+
expect(content).to.include('admin');
|
|
224
|
+
expect(content).to.include('Bob Johnson');
|
|
225
|
+
expect(content).to.include('Carol Davis');
|
|
226
|
+
expect(content).to.include('"theme": "dark"');
|
|
227
|
+
expect(content).to.include('"version": "1.0.0"');
|
|
228
|
+
|
|
229
|
+
return [200, {
|
|
230
|
+
choices: [{
|
|
231
|
+
message: {
|
|
232
|
+
role: 'assistant',
|
|
233
|
+
content: 'JSON data processed'
|
|
234
|
+
}
|
|
235
|
+
}]
|
|
236
|
+
}];
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
const response = await model.message();
|
|
240
|
+
expect(response).to.include('JSON data processed');
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
it('should handle file loading errors gracefully', async () => {
|
|
244
|
+
model.gpt4o()
|
|
245
|
+
.replaceKeyFromFile('{{missing}}', path.join(fixturesPath, 'nonexistent.txt'))
|
|
246
|
+
.addText('This should contain: {{missing}}');
|
|
247
|
+
|
|
248
|
+
nock('https://api.openai.com')
|
|
249
|
+
.post('/v1/chat/completions')
|
|
250
|
+
.reply(function (uri, body) {
|
|
251
|
+
// The template should remain unreplaced if file doesn't exist
|
|
252
|
+
expect(body.messages[1].content[0].text).to.equal('This should contain: {{missing}}');
|
|
253
|
+
|
|
254
|
+
return [200, {
|
|
255
|
+
choices: [{
|
|
256
|
+
message: {
|
|
257
|
+
role: 'assistant',
|
|
258
|
+
content: 'File not found handled'
|
|
259
|
+
}
|
|
260
|
+
}]
|
|
261
|
+
}];
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
const response = await model.message();
|
|
265
|
+
expect(response).to.include('File not found handled');
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
it('should handle multiple file replacements', async () => {
|
|
269
|
+
model.gpt4o()
|
|
270
|
+
.replaceKeyFromFile('{{template}}', path.join(fixturesPath, 'template.txt'))
|
|
271
|
+
.replaceKeyFromFile('{{data}}', path.join(fixturesPath, 'data.json'))
|
|
272
|
+
.replace({
|
|
273
|
+
'{{name}}': 'Frank',
|
|
274
|
+
'{{platform}}': 'TestPlatform',
|
|
275
|
+
'{{username}}': 'frank_test',
|
|
276
|
+
'{{role}}': 'tester',
|
|
277
|
+
'{{created_date}}': '2023-12-15',
|
|
278
|
+
'{{website}}': 'https://test.com',
|
|
279
|
+
'{{company}}': 'Test Corp'
|
|
280
|
+
})
|
|
281
|
+
.addText('Template: {{template}}\n\nData: {{data}}');
|
|
282
|
+
|
|
283
|
+
nock('https://api.openai.com')
|
|
284
|
+
.post('/v1/chat/completions')
|
|
285
|
+
.reply(function (uri, body) {
|
|
286
|
+
const content = body.messages[1].content[0].text;
|
|
287
|
+
|
|
288
|
+
// Should contain processed template
|
|
289
|
+
expect(content).to.include('Hello Frank, welcome to TestPlatform!');
|
|
290
|
+
expect(content).to.include('Username: frank_test');
|
|
291
|
+
|
|
292
|
+
// Should contain JSON data
|
|
293
|
+
expect(content).to.include('Alice Smith');
|
|
294
|
+
expect(content).to.include('"theme": "dark"');
|
|
295
|
+
|
|
296
|
+
return [200, {
|
|
297
|
+
choices: [{
|
|
298
|
+
message: {
|
|
299
|
+
role: 'assistant',
|
|
300
|
+
content: 'Multiple files processed'
|
|
301
|
+
}
|
|
302
|
+
}]
|
|
303
|
+
}];
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
const response = await model.message();
|
|
307
|
+
expect(response).to.include('Multiple files processed');
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
it('should handle relative and absolute paths', async () => {
|
|
311
|
+
const absolutePath = path.resolve(fixturesPath, 'template.txt');
|
|
312
|
+
|
|
313
|
+
model.gpt4o()
|
|
314
|
+
.replaceKeyFromFile('{{absolute}}', absolutePath)
|
|
315
|
+
.replace({
|
|
316
|
+
'{{name}}': 'Grace',
|
|
317
|
+
'{{platform}}': 'AbsolutePath',
|
|
318
|
+
'{{username}}': 'grace_abs',
|
|
319
|
+
'{{role}}': 'admin',
|
|
320
|
+
'{{created_date}}': '2023-12-20',
|
|
321
|
+
'{{website}}': 'https://absolute.com',
|
|
322
|
+
'{{company}}': 'Absolute Corp'
|
|
323
|
+
})
|
|
324
|
+
.addText('Absolute path content: {{absolute}}');
|
|
325
|
+
|
|
326
|
+
nock('https://api.openai.com')
|
|
327
|
+
.post('/v1/chat/completions')
|
|
328
|
+
.reply(function (uri, body) {
|
|
329
|
+
const content = body.messages[1].content[0].text;
|
|
330
|
+
|
|
331
|
+
expect(content).to.include('Hello Grace, welcome to AbsolutePath!');
|
|
332
|
+
expect(content).to.include('The Absolute Corp Team');
|
|
333
|
+
|
|
334
|
+
return [200, {
|
|
335
|
+
choices: [{
|
|
336
|
+
message: {
|
|
337
|
+
role: 'assistant',
|
|
338
|
+
content: 'Absolute path works'
|
|
339
|
+
}
|
|
340
|
+
}]
|
|
341
|
+
}];
|
|
342
|
+
});
|
|
343
|
+
|
|
344
|
+
const response = await model.message();
|
|
345
|
+
expect(response).to.include('Absolute path works');
|
|
346
|
+
});
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
describe('Template and File Integration', () => {
|
|
350
|
+
let model;
|
|
351
|
+
const fixturesPath = path.join(__dirname, 'fixtures');
|
|
352
|
+
|
|
353
|
+
beforeEach(() => {
|
|
354
|
+
model = ModelMix.new({
|
|
355
|
+
config: { debug: false }
|
|
356
|
+
});
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
it('should combine file loading with template replacement in complex scenarios', async () => {
|
|
360
|
+
model.gpt4o()
|
|
361
|
+
.replaceKeyFromFile('{{user_data}}', path.join(fixturesPath, 'data.json'))
|
|
362
|
+
.replace({
|
|
363
|
+
'{{action}}': 'analyze',
|
|
364
|
+
'{{target}}': 'user behavior patterns',
|
|
365
|
+
'{{format}}': 'detailed report'
|
|
366
|
+
})
|
|
367
|
+
.addText('Please {{action}} the following {{target}} and generate a {{format}}:\n\n{{user_data}}');
|
|
368
|
+
|
|
369
|
+
nock('https://api.openai.com')
|
|
370
|
+
.post('/v1/chat/completions')
|
|
371
|
+
.reply(function (uri, body) {
|
|
372
|
+
const content = body.messages[1].content[0].text;
|
|
373
|
+
|
|
374
|
+
expect(content).to.include('Please analyze the following user behavior patterns and generate a detailed report:');
|
|
375
|
+
expect(content).to.include('Alice Smith');
|
|
376
|
+
expect(content).to.include('total_users');
|
|
377
|
+
|
|
378
|
+
return [200, {
|
|
379
|
+
choices: [{
|
|
380
|
+
message: {
|
|
381
|
+
role: 'assistant',
|
|
382
|
+
content: 'Complex template integration successful'
|
|
383
|
+
}
|
|
384
|
+
}]
|
|
385
|
+
}];
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
const response = await model.message();
|
|
389
|
+
expect(response).to.include('Complex template integration successful');
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
it('should handle template chains with JSON output', async () => {
|
|
393
|
+
const schema = {
|
|
394
|
+
summary: 'Analysis summary',
|
|
395
|
+
user_count: 0,
|
|
396
|
+
active_users: 0,
|
|
397
|
+
roles: ['admin', 'user']
|
|
398
|
+
};
|
|
399
|
+
|
|
400
|
+
model.gpt4o()
|
|
401
|
+
.replaceKeyFromFile('{{data}}', path.join(fixturesPath, 'data.json'))
|
|
402
|
+
.replace({ '{{instruction}}': 'Count active users by role' })
|
|
403
|
+
.addText('{{instruction}} from this data: {{data}}');
|
|
404
|
+
|
|
405
|
+
nock('https://api.openai.com')
|
|
406
|
+
.post('/v1/chat/completions')
|
|
407
|
+
.reply(function (uri, body) {
|
|
408
|
+
expect(body.messages[1].content[0].text).to.include('Count active users by role');
|
|
409
|
+
expect(body.messages[1].content[0].text).to.include('Alice Smith');
|
|
410
|
+
|
|
411
|
+
return [200, {
|
|
412
|
+
choices: [{
|
|
413
|
+
message: {
|
|
414
|
+
role: 'assistant',
|
|
415
|
+
content: JSON.stringify({
|
|
416
|
+
summary: 'User analysis completed',
|
|
417
|
+
user_count: 3,
|
|
418
|
+
active_users: 2,
|
|
419
|
+
roles: ['admin', 'user', 'moderator']
|
|
420
|
+
})
|
|
421
|
+
}
|
|
422
|
+
}]
|
|
423
|
+
}];
|
|
424
|
+
});
|
|
425
|
+
|
|
426
|
+
const result = await model.json(schema);
|
|
427
|
+
expect(result.summary).to.equal('User analysis completed');
|
|
428
|
+
expect(result.user_count).to.equal(3);
|
|
429
|
+
expect(result.active_users).to.equal(2);
|
|
430
|
+
expect(result.roles).to.deep.equal(['admin', 'user', 'moderator']);
|
|
431
|
+
});
|
|
432
|
+
});
|
|
433
|
+
|
|
434
|
+
describe('Error Handling', () => {
|
|
435
|
+
let model;
|
|
436
|
+
|
|
437
|
+
beforeEach(() => {
|
|
438
|
+
model = ModelMix.new({
|
|
439
|
+
config: { debug: false }
|
|
440
|
+
});
|
|
441
|
+
});
|
|
442
|
+
|
|
443
|
+
it('should handle template replacement errors gracefully', () => {
|
|
444
|
+
expect(() => {
|
|
445
|
+
model.gpt4o().replace(null);
|
|
446
|
+
}).to.not.throw();
|
|
447
|
+
|
|
448
|
+
expect(() => {
|
|
449
|
+
model.gpt4o().replace(undefined);
|
|
450
|
+
}).to.not.throw();
|
|
451
|
+
});
|
|
452
|
+
|
|
453
|
+
it('should handle file reading errors without crashing', async () => {
|
|
454
|
+
model.gpt4o()
|
|
455
|
+
.replaceKeyFromFile('{{bad_file}}', '/path/that/does/not/exist.txt')
|
|
456
|
+
.addText('Content: {{bad_file}}');
|
|
457
|
+
|
|
458
|
+
nock('https://api.openai.com')
|
|
459
|
+
.post('/v1/chat/completions')
|
|
460
|
+
.reply(200, {
|
|
461
|
+
choices: [{
|
|
462
|
+
message: {
|
|
463
|
+
role: 'assistant',
|
|
464
|
+
content: 'Error handled gracefully'
|
|
465
|
+
}
|
|
466
|
+
}]
|
|
467
|
+
});
|
|
468
|
+
|
|
469
|
+
const response = await model.message();
|
|
470
|
+
expect(response).to.include('Error handled gracefully');
|
|
471
|
+
});
|
|
472
|
+
});
|
|
473
|
+
});
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ModelMix Test Runner
|
|
5
|
+
*
|
|
6
|
+
* This is the main entry point for running the comprehensive test suite.
|
|
7
|
+
* It includes tests for:
|
|
8
|
+
* - JSON schema generation and structured outputs
|
|
9
|
+
* - Provider fallback chains
|
|
10
|
+
* - Different providers (OpenAI, Anthropic, Google, etc.)
|
|
11
|
+
* - File operations and template system
|
|
12
|
+
* - Image processing and multimodal support
|
|
13
|
+
* - MCP (Model Context Protocol) integration
|
|
14
|
+
* - Rate limiting with Bottleneck
|
|
15
|
+
* - Integration tests and edge cases
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
const { spawn } = require('child_process');
|
|
19
|
+
const path = require('path');
|
|
20
|
+
|
|
21
|
+
const testFiles = [
|
|
22
|
+
'json.test.js',
|
|
23
|
+
'fallback.test.js',
|
|
24
|
+
'templates.test.js',
|
|
25
|
+
'images.test.js',
|
|
26
|
+
'live.mcp.js',
|
|
27
|
+
];
|
|
28
|
+
|
|
29
|
+
console.log('𧬠ModelMix Test Suite Runner');
|
|
30
|
+
console.log('==============================');
|
|
31
|
+
console.log(`Running ${testFiles.length} test suites...\n`);
|
|
32
|
+
|
|
33
|
+
function runTests() {
|
|
34
|
+
const args = [
|
|
35
|
+
'--timeout', '10000',
|
|
36
|
+
'--recursive',
|
|
37
|
+
'test/**/*.test.js'
|
|
38
|
+
];
|
|
39
|
+
|
|
40
|
+
const child = spawn('npx', ['mocha', ...args], {
|
|
41
|
+
cwd: process.cwd(),
|
|
42
|
+
stdio: 'inherit'
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
child.on('close', (code) => {
|
|
46
|
+
if (code === 0) {
|
|
47
|
+
console.log('\nā
All tests passed!');
|
|
48
|
+
console.log('\nTest Coverage:');
|
|
49
|
+
console.log('- ā
JSON Schema Generation');
|
|
50
|
+
console.log('- ā
Provider Fallback Chains');
|
|
51
|
+
console.log('- ā
Multiple Providers (OpenAI, Anthropic, Google, etc.)');
|
|
52
|
+
console.log('- ā
File Operations & Templates');
|
|
53
|
+
console.log('- ā
Image Processing & Multimodal');
|
|
54
|
+
console.log('- ā
MCP Integration');
|
|
55
|
+
console.log('- ā
Live MCP Tools Testing');
|
|
56
|
+
console.log('- ā
Rate Limiting with Bottleneck');
|
|
57
|
+
console.log('- ā
Integration & Edge Cases');
|
|
58
|
+
} else {
|
|
59
|
+
console.error(`\nā Tests failed with exit code ${code}`);
|
|
60
|
+
process.exit(code);
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
child.on('error', (err) => {
|
|
65
|
+
console.error('ā Failed to start test runner:', err);
|
|
66
|
+
process.exit(1);
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Run the tests
|
|
71
|
+
if (require.main === module) {
|
|
72
|
+
runTests();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
module.exports = { runTests, testFiles };
|