modelmix 4.4.22 → 4.4.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/demo/json.js +1 -1
- package/index.js +33 -7
- package/package.json +1 -1
- package/test/images.test.js +34 -0
- package/test/json.test.js +36 -0
package/demo/json.js
CHANGED
|
@@ -2,7 +2,7 @@ import { ModelMix } from '../index.js';
|
|
|
2
2
|
try { process.loadEnvFile(); } catch {}
|
|
3
3
|
|
|
4
4
|
const model = await ModelMix.new({ options: { max_tokens: 10000 }, config: { debug: 3 } })
|
|
5
|
-
.
|
|
5
|
+
.gpt51()
|
|
6
6
|
// .gptOss()
|
|
7
7
|
// .scout({ config: { temperature: 0 } })
|
|
8
8
|
// .o4mini()
|
package/index.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
const axios = require('axios');
|
|
2
2
|
const fs = require('fs');
|
|
3
|
-
const
|
|
3
|
+
const fileType = require('file-type');
|
|
4
|
+
const detectFileTypeFromBuffer = fileType.fileTypeFromBuffer || fileType.fromBuffer;
|
|
4
5
|
const { inspect } = require('util');
|
|
5
6
|
const log = require('lemonlog')('ModelMix');
|
|
6
7
|
const Bottleneck = require('bottleneck');
|
|
@@ -633,11 +634,14 @@ class ModelMix {
|
|
|
633
634
|
|
|
634
635
|
// Detect mimeType if not provided
|
|
635
636
|
if (!mimeType) {
|
|
636
|
-
|
|
637
|
-
|
|
637
|
+
if (typeof detectFileTypeFromBuffer !== 'function') {
|
|
638
|
+
throw new Error('file-type module does not expose a buffer detector');
|
|
639
|
+
}
|
|
640
|
+
const detectedType = await detectFileTypeFromBuffer(buffer);
|
|
641
|
+
if (!detectedType || !detectedType.mime.startsWith('image/')) {
|
|
638
642
|
throw new Error(`Invalid image - unable to detect valid image format`);
|
|
639
643
|
}
|
|
640
|
-
mimeType =
|
|
644
|
+
mimeType = detectedType.mime;
|
|
641
645
|
}
|
|
642
646
|
|
|
643
647
|
// Update the content with processed image
|
|
@@ -1562,7 +1566,7 @@ class MixOpenAIResponses extends MixOpenAI {
|
|
|
1562
1566
|
}
|
|
1563
1567
|
|
|
1564
1568
|
const responsesUrl = this.config.url.replace('/chat/completions', '/responses');
|
|
1565
|
-
const request = MixOpenAIResponses.buildResponsesRequest(options);
|
|
1569
|
+
const request = MixOpenAIResponses.buildResponsesRequest(options, config);
|
|
1566
1570
|
const response = await axios.post(responsesUrl, request, {
|
|
1567
1571
|
headers: this.headers
|
|
1568
1572
|
});
|
|
@@ -1570,16 +1574,38 @@ class MixOpenAIResponses extends MixOpenAI {
|
|
|
1570
1574
|
return MixOpenAIResponses.processResponsesResponse(response);
|
|
1571
1575
|
}
|
|
1572
1576
|
|
|
1573
|
-
static buildResponsesRequest(options = {}) {
|
|
1577
|
+
static buildResponsesRequest(options = {}, config = {}) {
|
|
1578
|
+
const input = MixOpenAIResponses.messagesToResponsesInput(options.messages);
|
|
1579
|
+
if (config.system) {
|
|
1580
|
+
input.unshift({ role: 'developer', content: [{ type: 'input_text', text: config.system }] });
|
|
1581
|
+
}
|
|
1574
1582
|
const request = {
|
|
1575
1583
|
model: options.model,
|
|
1576
|
-
input
|
|
1584
|
+
input,
|
|
1577
1585
|
stream: false
|
|
1578
1586
|
};
|
|
1579
1587
|
|
|
1580
1588
|
if (options.reasoning_effort) request.reasoning = { effort: options.reasoning_effort };
|
|
1581
1589
|
if (options.verbosity) request.text = { verbosity: options.verbosity };
|
|
1582
1590
|
|
|
1591
|
+
if (options.response_format) {
|
|
1592
|
+
const rf = options.response_format;
|
|
1593
|
+
let format;
|
|
1594
|
+
if (rf.type === 'json_schema' && rf.json_schema) {
|
|
1595
|
+
format = {
|
|
1596
|
+
type: 'json_schema',
|
|
1597
|
+
name: rf.json_schema.name || 'response',
|
|
1598
|
+
strict: true,
|
|
1599
|
+
schema: rf.json_schema.schema
|
|
1600
|
+
};
|
|
1601
|
+
} else if (rf.type) {
|
|
1602
|
+
format = { type: rf.type };
|
|
1603
|
+
}
|
|
1604
|
+
if (format) {
|
|
1605
|
+
request.text = { ...request.text, format };
|
|
1606
|
+
}
|
|
1607
|
+
}
|
|
1608
|
+
|
|
1583
1609
|
if (typeof options.max_completion_tokens === 'number') {
|
|
1584
1610
|
request.max_output_tokens = options.max_completion_tokens;
|
|
1585
1611
|
} else if (typeof options.max_tokens === 'number') {
|
package/package.json
CHANGED
package/test/images.test.js
CHANGED
|
@@ -82,5 +82,39 @@ describe('Image Processing and Multimodal Support Tests', () => {
|
|
|
82
82
|
expect(response).to.include('small PNG test image');
|
|
83
83
|
});
|
|
84
84
|
|
|
85
|
+
it('should detect image mime type from buffer when content-type header is missing', async () => {
|
|
86
|
+
const imageUrl = 'https://assets.example.com/test-image';
|
|
87
|
+
const pngBase64 = 'iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAFUlEQVR42mP8z8BQz0AEYBxVSF+FABJADveWkH6oAAAAAElFTkSuQmCC';
|
|
88
|
+
const pngBuffer = Buffer.from(pngBase64, 'base64');
|
|
89
|
+
|
|
90
|
+
model.sonnet46()
|
|
91
|
+
.addText('Describe this image')
|
|
92
|
+
.addImageFromUrl(imageUrl);
|
|
93
|
+
|
|
94
|
+
// No content-type header on purpose: this forces buffer-based detection.
|
|
95
|
+
nock('https://assets.example.com')
|
|
96
|
+
.get('/test-image')
|
|
97
|
+
.reply(200, pngBuffer);
|
|
98
|
+
|
|
99
|
+
nock('https://api.anthropic.com')
|
|
100
|
+
.post('/v1/messages')
|
|
101
|
+
.reply(function (uri, body) {
|
|
102
|
+
const userMsg = body.messages.find(m => m.role === 'user');
|
|
103
|
+
expect(userMsg).to.exist;
|
|
104
|
+
const imageContent = userMsg.content.find(c => c.type === 'image');
|
|
105
|
+
expect(imageContent).to.exist;
|
|
106
|
+
expect(imageContent.source.type).to.equal('base64');
|
|
107
|
+
expect(imageContent.source.media_type).to.equal('image/png');
|
|
108
|
+
expect(imageContent.source.data).to.equal(pngBase64);
|
|
109
|
+
return [200, {
|
|
110
|
+
content: [{ type: "text", text: "Image received." }],
|
|
111
|
+
role: "assistant"
|
|
112
|
+
}];
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
const response = await model.message();
|
|
116
|
+
expect(response).to.include('Image received.');
|
|
117
|
+
});
|
|
118
|
+
|
|
85
119
|
});
|
|
86
120
|
});
|
package/test/json.test.js
CHANGED
|
@@ -376,6 +376,42 @@ describe('JSON Schema and Structured Output Tests', () => {
|
|
|
376
376
|
expect(result.countries[0]).to.have.property('capital');
|
|
377
377
|
});
|
|
378
378
|
|
|
379
|
+
it('should send JSON mode and system instructions in Responses API request', async () => {
|
|
380
|
+
const example = {
|
|
381
|
+
countries: [{
|
|
382
|
+
name: 'France',
|
|
383
|
+
capital: 'Paris'
|
|
384
|
+
}]
|
|
385
|
+
};
|
|
386
|
+
|
|
387
|
+
model.gpt51().addText('Name and capital of 3 South American countries.');
|
|
388
|
+
|
|
389
|
+
let capturedBody;
|
|
390
|
+
nock('https://api.openai.com')
|
|
391
|
+
.post('/v1/responses', (body) => {
|
|
392
|
+
capturedBody = body;
|
|
393
|
+
return true;
|
|
394
|
+
})
|
|
395
|
+
.reply(200, testUtils.createMockResponse('openai-responses', JSON.stringify({
|
|
396
|
+
countries: [
|
|
397
|
+
{ name: 'Argentina', capital: 'BUENOS AIRES' },
|
|
398
|
+
{ name: 'Brazil', capital: 'BRASILIA' },
|
|
399
|
+
{ name: 'Colombia', capital: 'BOGOTA' }
|
|
400
|
+
]
|
|
401
|
+
})));
|
|
402
|
+
|
|
403
|
+
const result = await model.json(example);
|
|
404
|
+
|
|
405
|
+
expect(result.countries).to.be.an('array');
|
|
406
|
+
expect(result.countries).to.have.length(3);
|
|
407
|
+
expect(capturedBody).to.be.an('object');
|
|
408
|
+
expect(capturedBody.text).to.be.an('object');
|
|
409
|
+
expect(capturedBody.text.format).to.deep.equal({ type: 'json_object' });
|
|
410
|
+
expect(capturedBody.input).to.be.an('array').that.is.not.empty;
|
|
411
|
+
expect(capturedBody.input[0].role).to.equal('developer');
|
|
412
|
+
expect(capturedBody.input[0].content[0].text).to.include('JSON');
|
|
413
|
+
});
|
|
414
|
+
|
|
379
415
|
it('should handle complex nested JSON schema', async () => {
|
|
380
416
|
const example = {
|
|
381
417
|
user: {
|