utilitas 2001.1.117 → 2001.1.118
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +140 -51
- package/lib/manifest.mjs +1 -1
- package/lib/web.mjs +6 -0
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { checkSearch, distill, search } from './web.mjs';
|
|
1
|
+
import { checkSearch, distill, getOpenRouterModels, search } from './web.mjs';
|
|
2
2
|
import { create as createUoid } from './uoid.mjs';
|
|
3
3
|
import { packPcmToWav } from './media.mjs';
|
|
4
4
|
import { v4 as uuidv4 } from 'uuid';
|
|
@@ -47,7 +47,7 @@ const _NEED = ['OpenAI', '@google/genai'];
|
|
|
47
47
|
const [
|
|
48
48
|
OPENAI, GOOGLE, OLLAMA, NOVA, DEEPSEEK_32, MD_CODE, CLOUD_OPUS_45, AUDIO,
|
|
49
49
|
WAV, OPENAI_VOICE, GPT_REASONING_EFFORT, THINK, THINK_STR, THINK_END,
|
|
50
|
-
TOOLS_STR, TOOLS_END, TOOLS, TEXT, OK, FUNC, GPT_52,
|
|
50
|
+
TOOLS_STR, TOOLS_END, TOOLS, TEXT, OK, FUNC, GPT_52, GPT_52_CODEX,
|
|
51
51
|
GPT_IMAGE_15, GEMMA_3_27B, ANTHROPIC, ais, MAX_TOOL_RECURSION, LOG, name,
|
|
52
52
|
user, system, assistant, JSON_OBJECT, PROMPT_IS_REQUIRED, k, trimTailing,
|
|
53
53
|
trimBeginning, GEMINI_30_PRO_IMAGE, IMAGE, JINA, SILICONFLOW,
|
|
@@ -61,7 +61,7 @@ const [
|
|
|
61
61
|
'OpenAI', 'Google', 'Ollama', 'nova', 'deepseek-3.2-speciale', '```',
|
|
62
62
|
'claude-opus-4.5', 'audio', 'wav', 'OPENAI_VOICE', 'medium', 'think',
|
|
63
63
|
'<think>', '</think>', '<tools>', '</tools>', 'tools', 'text', 'OK',
|
|
64
|
-
'function', 'gpt-5.2', 'gpt-5.
|
|
64
|
+
'function', 'gpt-5.2', 'gpt-5.2-codex', 'gpt-image-1.5', 'gemma3:27b',
|
|
65
65
|
'Anthropic', [], 30, { log: true }, 'Alan', 'user', { role: 'system' },
|
|
66
66
|
{ role: 'assistant' }, 'json_object', 'Prompt is required.',
|
|
67
67
|
x => 1000 * x, x => x.replace(/[\.\s]*$/, ''),
|
|
@@ -108,47 +108,13 @@ const FEATURE_ICONS = {
|
|
|
108
108
|
vision: '👁️', // finetune: '🔧',
|
|
109
109
|
};
|
|
110
110
|
|
|
111
|
-
const GEMINI_RULES = {
|
|
112
|
-
source: GOOGLE, contextWindow: m(1.05), maxOutputTokens: k(65.5),
|
|
113
|
-
hearing: true, reasoning: true, structured: true, tools: true, vision: true,
|
|
114
|
-
supportedMimeTypes: [
|
|
115
|
-
MIME_PNG, MIME_JPEG, MIME_MOV, MIME_MPEG, MIME_MP4, MIME_MPG, MIME_AVI,
|
|
116
|
-
MIME_WMV, MIME_MPEGPS, MIME_FLV, MIME_PDF, MIME_AAC, MIME_FLAC,
|
|
117
|
-
MIME_MP3, MIME_MPEGA, MIME_M4A, MIME_MPGA, MIME_OPUS, MIME_PCM,
|
|
118
|
-
MIME_WAV, MIME_WEBM, MIME_TGPP, MIME_OGG,
|
|
119
|
-
], defaultProvider: OPENROUTER,
|
|
120
|
-
};
|
|
121
|
-
|
|
122
|
-
const OPENAI_RULES = {
|
|
123
|
-
source: OPENAI, contextWindow: k(400), maxOutputTokens: k(128),
|
|
124
|
-
hearing: true, reasoning: true, structured: true, tools: true, vision: true,
|
|
125
|
-
supportedMimeTypes: [
|
|
126
|
-
MIME_PNG, MIME_JPEG, MIME_GIF, MIME_WEBP, MIME_PDF, MIME_WAV
|
|
127
|
-
], defaultProvider: OPENROUTER,
|
|
128
|
-
};
|
|
129
|
-
|
|
130
111
|
const DEEPSEEK_32_RULES = {
|
|
131
112
|
source: DEEPSEEK, contextWindow: k(163.8), maxOutputTokens: k(65.5),
|
|
132
113
|
structured: true, tools: true, reasoning: true,
|
|
133
114
|
};
|
|
134
115
|
|
|
135
|
-
// https://platform.openai.com/docs/models
|
|
136
|
-
// https://cloud.google.com/vertex-ai/docs/generative-ai/learn/models
|
|
137
|
-
// https://openrouter.ai/docs/features/multimodal/audio (only support input audio)
|
|
138
116
|
const MODELS = {
|
|
139
|
-
// fast and balanced models
|
|
140
|
-
[GEMINI_30_FLASH]: { // https://gemini.google.com/app/c680748b3307790b
|
|
141
|
-
...GEMINI_RULES, fast: true, structured: false, // issue with json output via OpenRouter
|
|
142
|
-
},
|
|
143
|
-
// strong and fast
|
|
144
|
-
[GPT_52]: { ...OPENAI_RULES, fast: true },
|
|
145
|
-
// stronger but slow
|
|
146
|
-
[GEMINI_30_PRO]: { ...GEMINI_RULES },
|
|
147
117
|
// models with generation capabilities
|
|
148
|
-
[GEMINI_30_PRO_IMAGE]: {
|
|
149
|
-
...GEMINI_RULES, icon: '🍌', label: 'Nano Banana Pro',
|
|
150
|
-
contextWindow: k(64), maxOutputTokens: k(32), image: true, tools: false,
|
|
151
|
-
},
|
|
152
118
|
[IMAGEN_4_ULTRA]: {
|
|
153
119
|
source: GOOGLE, maxInputTokens: 480,
|
|
154
120
|
image: true, defaultProvider: GOOGLE,
|
|
@@ -163,18 +129,11 @@ const MODELS = {
|
|
|
163
129
|
supportedMimeTypes: [MIME_PNG, MIME_JPEG], defaultProvider: GOOGLE,
|
|
164
130
|
},
|
|
165
131
|
[GPT_IMAGE_15]: {
|
|
166
|
-
|
|
132
|
+
source: OPENAI, contextWindow: k(400), maxOutputTokens: k(128),
|
|
133
|
+
hearing: true, image: true, reasoning: true, structured: true,
|
|
134
|
+
tools: true, vision: true, defaultProvider: OPENAI,
|
|
167
135
|
supportedMimeTypes: [MIME_PNG, MIME_JPEG, MIME_GIF, MIME_WEBP],
|
|
168
136
|
},
|
|
169
|
-
// models with code capabilities
|
|
170
|
-
[GPT_51_CODEX]: { ...OPENAI_RULES },
|
|
171
|
-
[CLOUD_OPUS_45]: {
|
|
172
|
-
source: ANTHROPIC, contextWindow: k(200), maxOutputTokens: k(64),
|
|
173
|
-
reasoning: true, structured: true, tools: true, vision: true,
|
|
174
|
-
supportedMimeTypes: [
|
|
175
|
-
MIME_TEXT, MIME_PNG, MIME_JPEG, MIME_GIF, MIME_WEBP, MIME_PDF,
|
|
176
|
-
], defaultProvider: OPENROUTER,
|
|
177
|
-
},
|
|
178
137
|
// tts/stt models
|
|
179
138
|
[GEMINI_25_FLASH_TTS]: {
|
|
180
139
|
source: GOOGLE, maxInputTokens: k(32),
|
|
@@ -226,7 +185,7 @@ for (const n in MODELS) {
|
|
|
226
185
|
}
|
|
227
186
|
// Auto model have some issues with tools and reasoning, so we disable them here
|
|
228
187
|
// MODELS[AUTO] = { name: AUTO, defaultProvider: OPENROUTER, };
|
|
229
|
-
// for (const n of [GPT_52,
|
|
188
|
+
// for (const n of [GPT_52, GPT_52_CODEX, GEMINI_30_PRO, GEMINI_30_FLASH]) {
|
|
230
189
|
// // get the most restrictive limits
|
|
231
190
|
// for (const key of [
|
|
232
191
|
// 'contextWindow', 'maxInputTokens', 'maxOutputTokens',
|
|
@@ -263,13 +222,138 @@ for (const n in MODELS) {
|
|
|
263
222
|
|
|
264
223
|
// Default models for each provider
|
|
265
224
|
const DEFAULT_MODELS = {
|
|
266
|
-
[OPENROUTER]:
|
|
225
|
+
[OPENROUTER]: [
|
|
226
|
+
// fast and balanced models
|
|
227
|
+
GEMINI_30_FLASH,
|
|
228
|
+
// strong and fast
|
|
229
|
+
GPT_52,
|
|
230
|
+
// stronger but slow
|
|
231
|
+
GEMINI_30_PRO,
|
|
232
|
+
// models with generation capabilities
|
|
233
|
+
GEMINI_30_PRO_IMAGE,
|
|
234
|
+
// models with code capabilities
|
|
235
|
+
GPT_52_CODEX,
|
|
236
|
+
CLOUD_OPUS_45,
|
|
237
|
+
],
|
|
267
238
|
[SILICONFLOW]: SF_DEEPSEEK_32,
|
|
268
239
|
[OLLAMA]: GEMMA_3_27B,
|
|
269
240
|
[OPENAI_VOICE]: NOVA,
|
|
270
241
|
};
|
|
271
242
|
|
|
272
|
-
let _tools;
|
|
243
|
+
let _tools, openrouterUpdated;
|
|
244
|
+
|
|
245
|
+
const adaptOpenRouterModels = async () => {
|
|
246
|
+
if (openrouterUpdated) { return; }
|
|
247
|
+
openrouterUpdated = true;
|
|
248
|
+
for (const model of await getOpenRouterModels()) {
|
|
249
|
+
model.label = model.name;
|
|
250
|
+
model.name = model.id.split('/').pop().trim();
|
|
251
|
+
model.source = OPENROUTER;
|
|
252
|
+
model.contextWindow = Math.min(
|
|
253
|
+
model.context_length || (1000 * 1000),
|
|
254
|
+
model.top_provider?.context_length || Infinity
|
|
255
|
+
);
|
|
256
|
+
model.maxOutputTokens = Math.min(
|
|
257
|
+
~~model.top_provider?.max_completion_tokens || model.contextWindow,
|
|
258
|
+
parseInt(model.contextWindow * 0.4)
|
|
259
|
+
);
|
|
260
|
+
model.attachmentTokenCost = ATTACHMENT_TOKEN_COST;
|
|
261
|
+
model.maxInputTokens = model.contextWindow - model.maxOutputTokens;
|
|
262
|
+
model.inputModalities = model.architecture.input_modalities;
|
|
263
|
+
model.source = model.label.split(':')[0] || model.id.split('/')[0];
|
|
264
|
+
model.outputModalities = model.architecture.output_modalities;
|
|
265
|
+
model.defaultProvider = OPENROUTER;
|
|
266
|
+
[
|
|
267
|
+
'architecture', 'canonical_slug', 'context_length', 'created',
|
|
268
|
+
'default_parameters', 'expiration_date', 'hugging_face_id',
|
|
269
|
+
'per_request_limits', 'pricing', 'supported_parameters',
|
|
270
|
+
'top_provider',
|
|
271
|
+
].map(key => delete model[key]);
|
|
272
|
+
// Backward compatibility: {
|
|
273
|
+
if (model.inputModalities.includes('image')
|
|
274
|
+
|| model.inputModalities.includes('video')
|
|
275
|
+
|| model.inputModalities.includes('file')) {
|
|
276
|
+
model.vision = true;
|
|
277
|
+
}
|
|
278
|
+
if (model.inputModalities.includes('audio')) {
|
|
279
|
+
model.hearing = true;
|
|
280
|
+
}
|
|
281
|
+
if (/reasoning|thinking/i.test(model.description)) {
|
|
282
|
+
model.reasoning = true;
|
|
283
|
+
}
|
|
284
|
+
if (model.outputModalities.includes('image')) {
|
|
285
|
+
model.image = true;
|
|
286
|
+
}
|
|
287
|
+
// if (model.outputModalities.includes('audio')) {
|
|
288
|
+
// model.audio = true;
|
|
289
|
+
// }
|
|
290
|
+
// https://gemini.google.com/app/c680748b3307790b
|
|
291
|
+
// issue with json output via OpenRouter
|
|
292
|
+
if (/structured/i.test(model.description)
|
|
293
|
+
&& model.name !== 'gemini-3-flash') {
|
|
294
|
+
model.structured = true;
|
|
295
|
+
}
|
|
296
|
+
if (/tool/i.test(model.description)) {
|
|
297
|
+
model.tools = true;
|
|
298
|
+
}
|
|
299
|
+
if (/flash|lite|mini|nano|tiny|small/i.test(model.name)) {
|
|
300
|
+
model.fast = true;
|
|
301
|
+
}
|
|
302
|
+
model.supportedMimeTypes = [];
|
|
303
|
+
switch (model.source) {
|
|
304
|
+
case GOOGLE:
|
|
305
|
+
if (model.vision) {
|
|
306
|
+
model.supportedMimeTypes.push(
|
|
307
|
+
MIME_PNG, MIME_JPEG, MIME_MOV, MIME_MPEG, MIME_MP4,
|
|
308
|
+
MIME_MPG, MIME_AVI, MIME_WMV, MIME_MPEGPS, MIME_FLV,
|
|
309
|
+
MIME_WEBM, MIME_TGPP, MIME_PDF,
|
|
310
|
+
);
|
|
311
|
+
}
|
|
312
|
+
if (model.hearing) {
|
|
313
|
+
model.supportedMimeTypes.push(
|
|
314
|
+
MIME_AAC, MIME_FLAC, MIME_MP3, MIME_MPEGA, MIME_M4A,
|
|
315
|
+
MIME_MPGA, MIME_OPUS, MIME_PCM, MIME_WAV, MIME_OGG,
|
|
316
|
+
);
|
|
317
|
+
}
|
|
318
|
+
if (model.name === GEMINI_30_PRO_IMAGE) {
|
|
319
|
+
model.icon = '🍌';
|
|
320
|
+
model.label = 'Nano Banana Pro';
|
|
321
|
+
} else if (model.name === 'gemini-2.5-flash-image') {
|
|
322
|
+
model.icon = '🍌';
|
|
323
|
+
model.label = 'Nano Banana';
|
|
324
|
+
}
|
|
325
|
+
break;
|
|
326
|
+
case OPENAI:
|
|
327
|
+
// Notes:
|
|
328
|
+
// https://platform.openai.com/docs/models
|
|
329
|
+
// https://cloud.google.com/vertex-ai/docs/generative-ai/learn/models
|
|
330
|
+
// https://openrouter.ai/docs/features/multimodal/audio (only support input audio)
|
|
331
|
+
if (model.vision) {
|
|
332
|
+
model.supportedMimeTypes.push(
|
|
333
|
+
MIME_PNG, MIME_JPEG, MIME_GIF, MIME_WEBP, MIME_PDF,
|
|
334
|
+
);
|
|
335
|
+
}
|
|
336
|
+
if (model.hearing) {
|
|
337
|
+
model.supportedMimeTypes.push(MIME_WAV);
|
|
338
|
+
}
|
|
339
|
+
if (model.name === GPT_52) {
|
|
340
|
+
model.fast = true;
|
|
341
|
+
}
|
|
342
|
+
break;
|
|
343
|
+
case ANTHROPIC:
|
|
344
|
+
if (model.vision) {
|
|
345
|
+
model.supportedMimeTypes.push(
|
|
346
|
+
MIME_TEXT, MIME_PNG, MIME_JPEG, MIME_GIF, MIME_WEBP,
|
|
347
|
+
MIME_PDF,
|
|
348
|
+
);
|
|
349
|
+
}
|
|
350
|
+
break;
|
|
351
|
+
}
|
|
352
|
+
// }
|
|
353
|
+
model.label = model.label.split(':').pop().trim();
|
|
354
|
+
MODELS[model.name] = model;
|
|
355
|
+
}
|
|
356
|
+
};
|
|
273
357
|
|
|
274
358
|
const unifyProvider = provider => {
|
|
275
359
|
assert(provider = (provider || '').trim(), 'AI provider is required.');
|
|
@@ -400,6 +484,7 @@ const init = async (options = {}) => {
|
|
|
400
484
|
(await need('node:util')).inspect.defaultOptions.depth = null;
|
|
401
485
|
options.logLevel = 'debug';
|
|
402
486
|
}
|
|
487
|
+
await adaptOpenRouterModels();
|
|
403
488
|
options.provider = options.provider || OPENROUTER;
|
|
404
489
|
const provider = unifyProvider(options.provider);
|
|
405
490
|
const priority = options.priority;
|
|
@@ -413,7 +498,10 @@ const init = async (options = {}) => {
|
|
|
413
498
|
x => ensureArray(options.model).includes(x.name)
|
|
414
499
|
);
|
|
415
500
|
} else if (DEFAULT_MODELS[provider]) { // Default model
|
|
416
|
-
|
|
501
|
+
const modelsByName = Object.fromEntries(
|
|
502
|
+
Object.values(MODELS).map(x => [x.name, x])
|
|
503
|
+
);
|
|
504
|
+
models = ensureArray(DEFAULT_MODELS[provider]).map(x => modelsByName[x]);
|
|
417
505
|
} else if (options.modelConfig) {
|
|
418
506
|
models = ensureArray(options.modelConfig);
|
|
419
507
|
}
|
|
@@ -1614,6 +1702,7 @@ export {
|
|
|
1614
1702
|
GEMINI_30_PRO_IMAGE,
|
|
1615
1703
|
GPT_IMAGE_15,
|
|
1616
1704
|
GPT_52,
|
|
1705
|
+
GPT_52_CODEX,
|
|
1617
1706
|
IMAGEN_4_ULTRA,
|
|
1618
1707
|
INSTRUCTIONS,
|
|
1619
1708
|
MODELS,
|
package/lib/manifest.mjs
CHANGED
package/lib/web.mjs
CHANGED
|
@@ -381,6 +381,11 @@ const getExchangeRate = async (to, from, amount) => {
|
|
|
381
381
|
return { rate, amount: amount.toString() };
|
|
382
382
|
};
|
|
383
383
|
|
|
384
|
+
const getOpenRouterModels = async () => {
|
|
385
|
+
const resp = await get('https://openrouter.ai/api/v1/models', { encode: 'JSON' });
|
|
386
|
+
assert(resp?.content?.data, 'Error fetching OpenRouter models.', 500);
|
|
387
|
+
return resp.content.data;
|
|
388
|
+
};
|
|
384
389
|
|
|
385
390
|
export default get;
|
|
386
391
|
export {
|
|
@@ -397,6 +402,7 @@ export {
|
|
|
397
402
|
getCurrentPosition,
|
|
398
403
|
getExchangeRate,
|
|
399
404
|
getJson,
|
|
405
|
+
getOpenRouterModels,
|
|
400
406
|
getParsedHtml,
|
|
401
407
|
getVersionOnNpm,
|
|
402
408
|
getYoutubeMetadata,
|