utilitas 2000.3.58 → 2000.3.59
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +45 -45
- package/lib/manifest.mjs +1 -1
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -51,10 +51,9 @@ const [
|
|
|
51
51
|
PROMPT_IS_REQUIRED, OPENAI_HI_RES_SIZE, k, m, minute, hour, gb, trimTailing,
|
|
52
52
|
trimBeginning, GEMINI_30_PRO_IMAGE, IMAGE, JINA, JINA_DEEPSEARCH,
|
|
53
53
|
SILICONFLOW, SF_DEEPSEEK_32, MAX_TIRE, OPENROUTER_API, OPENROUTER, AUTO,
|
|
54
|
-
TOOL,
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
GPT_4O_TRANSCRIBE, INVALID_AUDIO, OGG_EXT, ELLIPSIS,
|
|
54
|
+
TOOL, ONLINE, GEMINI_30_PRO, GEMINI_25_FLASH, IMAGEN_4_ULTRA, VEO_31,
|
|
55
|
+
IMAGEN_4_UPSCALE, ERROR_GENERATING, GEMINI_25_FLASH_TTS, GEMINI_25_PRO_TTS,
|
|
56
|
+
wav, GPT_4O_MIMI_TTS, GPT_4O_TRANSCRIBE, INVALID_AUDIO, OGG_EXT, ELLIPSIS,
|
|
58
57
|
] = [
|
|
59
58
|
'OpenAI', 'Google', 'Ollama', 'nova', 'deepseek-3.2-speciale', '```',
|
|
60
59
|
'claude-opus-4.5', 'audio', 'wav', 'OPENAI_VOICE', 'medium', 'think',
|
|
@@ -68,12 +67,12 @@ const [
|
|
|
68
67
|
'gemini-3-pro-image-preview', 'image', 'Jina', 'jina-deepsearch-v1',
|
|
69
68
|
'SiliconFlow', 'deepseek-ai/DeepSeek-V3.2-exp', 768 * 768,
|
|
70
69
|
'https://openrouter.ai/api/v1', 'OpenRouter', 'openrouter/auto', 'tool',
|
|
71
|
-
'
|
|
72
|
-
'
|
|
73
|
-
'
|
|
74
|
-
'
|
|
75
|
-
'
|
|
76
|
-
'
|
|
70
|
+
':online', 'gemini-3-pro-preview', 'gemini-2.5-flash-preview-09-2025',
|
|
71
|
+
'imagen-4.0-ultra-generate-001', 'veo-3.1-generate-preview',
|
|
72
|
+
'imagen-4.0-upscale-preview', 'Error generating content.',
|
|
73
|
+
'gemini-2.5-flash-preview-tts', 'gemini-2.5-pro-tts', 'wav',
|
|
74
|
+
'gpt-4o-mini-tts', 'gpt-4o-transcribe', 'Invalid audio data.', 'ogg',
|
|
75
|
+
'...',
|
|
77
76
|
];
|
|
78
77
|
|
|
79
78
|
const [tool, messages, text]
|
|
@@ -95,7 +94,7 @@ const libOpenAi = async opts => await need('openai', { ...opts, raw: true });
|
|
|
95
94
|
const buildTextWithEllipsis = (txt, trim) => `${txt}${(trim ? ELLIPSIS : '')}`;
|
|
96
95
|
|
|
97
96
|
const GEMINI_RULES = {
|
|
98
|
-
source:
|
|
97
|
+
source: GOOGLE, icon: '♊️',
|
|
99
98
|
json: true, audioCostTokens: 1000 * 1000 * 1, // 8.4 hours => 1 million tokens
|
|
100
99
|
imageCostTokens: ~~(v8k / MAX_TIRE * 258), maxAudioLength: hour(8.4),
|
|
101
100
|
maxAudioPerPrompt: 1, maxFileSize: m(20), maxImagePerPrompt: 3000,
|
|
@@ -110,7 +109,7 @@ const GEMINI_RULES = {
|
|
|
110
109
|
};
|
|
111
110
|
|
|
112
111
|
const OPENAI_RULES = {
|
|
113
|
-
source:
|
|
112
|
+
source: OPENAI, icon: '⚛️',
|
|
114
113
|
contextWindow: k(400), maxOutputTokens: k(128),
|
|
115
114
|
imageCostTokens: ~~(OPENAI_HI_RES_SIZE / MAX_TIRE * 140 + 70),
|
|
116
115
|
maxFileSize: m(50), maxImageSize: OPENAI_HI_RES_SIZE,
|
|
@@ -147,11 +146,11 @@ const MODELS = {
|
|
|
147
146
|
contextWindow: k(64), maxOutputTokens: k(32), image: true, tools: false,
|
|
148
147
|
},
|
|
149
148
|
[IMAGEN_4_ULTRA]: {
|
|
150
|
-
source:
|
|
149
|
+
source: GOOGLE, maxInputTokens: 480,
|
|
151
150
|
image: true, defaultProvider: GOOGLE,
|
|
152
151
|
},
|
|
153
152
|
[VEO_31]: {
|
|
154
|
-
source:
|
|
153
|
+
source: GOOGLE, maxInputTokens: 1024,
|
|
155
154
|
imageCostTokens: 0, maxImagePerPrompt: 1,
|
|
156
155
|
maxImageSize: Infinity, vision: true, video: true,
|
|
157
156
|
supportedMimeTypes: [MIME_PNG, MIME_JPEG], defaultProvider: GOOGLE,
|
|
@@ -162,7 +161,7 @@ const MODELS = {
|
|
|
162
161
|
// models with code capabilities
|
|
163
162
|
[GPT_51_CODEX]: { ...OPENAI_RULES },
|
|
164
163
|
[CLOUD_OPUS_45]: {
|
|
165
|
-
source:
|
|
164
|
+
source: ANTHROPIC, icon: '✳️',
|
|
166
165
|
contextWindow: k(200), maxOutputTokens: k(64),
|
|
167
166
|
documentCostTokens: 3000 * 10, maxDocumentFile: m(32),
|
|
168
167
|
maxDocumentPages: 100, imageCostTokens: ~~(v8k / 750),
|
|
@@ -175,19 +174,19 @@ const MODELS = {
|
|
|
175
174
|
},
|
|
176
175
|
// tts/stt models
|
|
177
176
|
[GEMINI_25_FLASH_TTS]: {
|
|
178
|
-
source:
|
|
177
|
+
source: GOOGLE, maxInputTokens: k(32), audio: true, fast: true,
|
|
179
178
|
hidden: true, defaultProvider: GOOGLE,
|
|
180
179
|
},
|
|
181
180
|
[GEMINI_25_PRO_TTS]: {
|
|
182
|
-
source:
|
|
181
|
+
source: GOOGLE, maxInputTokens: k(32), audio: true,
|
|
183
182
|
hidden: true, defaultProvider: GOOGLE,
|
|
184
183
|
},
|
|
185
184
|
[GPT_4O_MIMI_TTS]: {
|
|
186
|
-
source:
|
|
185
|
+
source: OPENAI, maxInputTokens: k(2), audio: true, fast: true,
|
|
187
186
|
hidden: true, defaultProvider: OPENAI,
|
|
188
187
|
},
|
|
189
188
|
[GPT_4O_TRANSCRIBE]: {
|
|
190
|
-
source:
|
|
189
|
+
source: OPENAI, maxInputTokens: 0, hearing: true, fast: true,
|
|
191
190
|
hidden: true, defaultProvider: OPENAI,
|
|
192
191
|
},
|
|
193
192
|
// models with deepsearch capabilities
|
|
@@ -384,17 +383,15 @@ const packTools = async () => {
|
|
|
384
383
|
return _tools;
|
|
385
384
|
};
|
|
386
385
|
|
|
387
|
-
const buildAiId = (provider, model) => [
|
|
386
|
+
const buildAiId = (provider, model) => packModelId([
|
|
388
387
|
provider, ...isOpenrouter(provider, model) ? [model.source] : [],
|
|
389
388
|
model?.name
|
|
390
|
-
]
|
|
389
|
+
], { case: 'SNAKE', raw: true }).join('_');
|
|
391
390
|
|
|
392
|
-
const buildAiName = (provider, model) => [
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
])
|
|
397
|
-
].join(' ');
|
|
391
|
+
const buildAiName = (provider, model) => packModelId([
|
|
392
|
+
provider, ...isOpenrouter(provider, model) ? [model.source] : [],
|
|
393
|
+
model.label || model.name
|
|
394
|
+
]);
|
|
398
395
|
|
|
399
396
|
const buildAiFeatures = model => Object.entries(FEATURE_ICONS).map(
|
|
400
397
|
x => model[x[0]] ? x[1] : ''
|
|
@@ -402,10 +399,12 @@ const buildAiFeatures = model => Object.entries(FEATURE_ICONS).map(
|
|
|
402
399
|
|
|
403
400
|
const setupAi = ai => {
|
|
404
401
|
const id = buildAiId(ai.provider, ai.model);
|
|
402
|
+
const name = buildAiName(ai.provider, ai.model);
|
|
403
|
+
const icon = ai.model?.icon || getProviderIcon(ai.provider);
|
|
404
|
+
const features = buildAiFeatures(ai.model);
|
|
405
405
|
ais.push({
|
|
406
|
-
id, name:
|
|
407
|
-
|
|
408
|
-
...ai, priority: ai.priority || 0,
|
|
406
|
+
id, icon, name, features, label: `${icon} ${name} (${features})`,
|
|
407
|
+
initOrder: ais.length, ...ai, priority: ai.priority || 0,
|
|
409
408
|
prompt: async (text, opts) => await ai.prompt(id, text, opts),
|
|
410
409
|
});
|
|
411
410
|
};
|
|
@@ -753,21 +752,22 @@ const packResp = async (resp, options) => {
|
|
|
753
752
|
...annotationsMarkdown ? { annotationsMarkdown } : {},
|
|
754
753
|
...audio ? { audio } : {}, ...images?.length ? { images } : {},
|
|
755
754
|
processing: !!options?.processing,
|
|
756
|
-
model:
|
|
755
|
+
model: packModelId([
|
|
757
756
|
options.provider, options?.router?.provider,
|
|
758
757
|
options?.router?.model || options?.model,
|
|
759
758
|
]),
|
|
760
759
|
};
|
|
761
760
|
};
|
|
762
761
|
|
|
763
|
-
const
|
|
762
|
+
const packModelId = (model_reference, options = {}) => {
|
|
764
763
|
const catched = new Set();
|
|
765
|
-
|
|
764
|
+
const ref = model_reference.join('/').split('/').map(x => {
|
|
766
765
|
const key = ensureString(x, { case: 'UP' });
|
|
767
766
|
if (catched.has(key)) { return null; }
|
|
768
767
|
catched.add(key);
|
|
769
|
-
return x;
|
|
770
|
-
}).filter(x => x)
|
|
768
|
+
return ensureString(x, options);
|
|
769
|
+
}).filter(x => x);
|
|
770
|
+
return options?.raw ? ref : ref.join('/');
|
|
771
771
|
};
|
|
772
772
|
|
|
773
773
|
const buildPrompts = async (model, input, options = {}) => {
|
|
@@ -904,7 +904,7 @@ const promptOpenRouter = async (aiId, content, options = {}) => {
|
|
|
904
904
|
} else if (!modalities && model.image) {
|
|
905
905
|
modalities = [TEXT, IMAGE];
|
|
906
906
|
}
|
|
907
|
-
const googleImageMode = source ===
|
|
907
|
+
const googleImageMode = source === GOOGLE && modalities?.includes?.(IMAGE);
|
|
908
908
|
const packedTools = _tools.map(x => x.def);
|
|
909
909
|
const ext = provider === OPENROUTER && !packedTools?.find(
|
|
910
910
|
x => x.function.name === 'searchWeb'
|
|
@@ -922,7 +922,7 @@ const promptOpenRouter = async (aiId, content, options = {}) => {
|
|
|
922
922
|
{ type: 'code_interpreter', container: { type: 'auto', memory_limit: '8g' } },
|
|
923
923
|
]);
|
|
924
924
|
}
|
|
925
|
-
if (source ===
|
|
925
|
+
if (source === GOOGLE) {
|
|
926
926
|
packedTools.push(...[
|
|
927
927
|
{ googleSearch: {} }, { codeExecution: {} }, { urlContext: {} },
|
|
928
928
|
// { googleMaps: {} }, // https://ai.google.dev/gemini-api/docs/maps-grounding // NOT for Gemini 3
|
|
@@ -1019,13 +1019,13 @@ const promptOpenRouter = async (aiId, content, options = {}) => {
|
|
|
1019
1019
|
...annotations.length ? { annotations } : {},
|
|
1020
1020
|
};
|
|
1021
1021
|
switch (source) {
|
|
1022
|
-
case
|
|
1022
|
+
case ANTHROPIC:
|
|
1023
1023
|
event.content = reasoning_details.map(x => ({
|
|
1024
1024
|
type: 'thinking', thinking: x.text,
|
|
1025
1025
|
...x.signature ? { signature: x.signature } : {},
|
|
1026
1026
|
}));
|
|
1027
1027
|
break;
|
|
1028
|
-
case
|
|
1028
|
+
case GOOGLE:
|
|
1029
1029
|
reasoning_details?.length
|
|
1030
1030
|
&& (event.reasoning_details = reasoning_details);
|
|
1031
1031
|
}
|
|
@@ -1073,7 +1073,7 @@ const promptGoogle = async (aiId, prompt, options = {}) => {
|
|
|
1073
1073
|
data: await convert(x.image.imageBytes, {
|
|
1074
1074
|
input: BASE64, suffix: 'png', ...options || {}
|
|
1075
1075
|
}), mimeType: x.image.mimeType,
|
|
1076
|
-
}))), model:
|
|
1076
|
+
}))), model: packModelId([provider, M.source, M.name]),
|
|
1077
1077
|
}
|
|
1078
1078
|
}
|
|
1079
1079
|
} else if (M?.video) {
|
|
@@ -1121,7 +1121,7 @@ const promptGoogle = async (aiId, prompt, options = {}) => {
|
|
|
1121
1121
|
input: FILE, suffix: 'mp4', ...options || {}
|
|
1122
1122
|
}), mimeType: MIME_MP4, jobId: resp.name,
|
|
1123
1123
|
};
|
|
1124
|
-
})), model:
|
|
1124
|
+
})), model: packModelId([provider, M.source, M.name]),
|
|
1125
1125
|
};
|
|
1126
1126
|
}
|
|
1127
1127
|
} else if (M?.audio) { // https://ai.google.dev/gemini-api/docs/speech-generation#voices
|
|
@@ -1146,7 +1146,7 @@ const promptGoogle = async (aiId, prompt, options = {}) => {
|
|
|
1146
1146
|
data: await packPcmToWav(rawAudio?.data, {
|
|
1147
1147
|
input: BASE64, suffix: wav, ...options || {},
|
|
1148
1148
|
}), mimeType: MIME_WAV,
|
|
1149
|
-
}, model:
|
|
1149
|
+
}, model: packModelId([provider, M.source, M.name]),
|
|
1150
1150
|
};
|
|
1151
1151
|
}
|
|
1152
1152
|
} else {
|
|
@@ -1182,7 +1182,7 @@ const promptOpenAI = async (aiId, prompt, options = {}) => {
|
|
|
1182
1182
|
await resp.arrayBuffer()
|
|
1183
1183
|
), { suffix: OGG_EXT, ...options || {} }),
|
|
1184
1184
|
mimeType: MIME_OGG,
|
|
1185
|
-
}, model:
|
|
1185
|
+
}, model: packModelId([provider, M.source, M.name]),
|
|
1186
1186
|
};
|
|
1187
1187
|
}
|
|
1188
1188
|
} else if (M?.hearing) {
|
|
@@ -1202,7 +1202,7 @@ const promptOpenAI = async (aiId, prompt, options = {}) => {
|
|
|
1202
1202
|
if (!options?.raw) {
|
|
1203
1203
|
resp = {
|
|
1204
1204
|
text: resp.trim(),
|
|
1205
|
-
model:
|
|
1205
|
+
model: packModelId([provider, M.source, M.name]),
|
|
1206
1206
|
};
|
|
1207
1207
|
}
|
|
1208
1208
|
} else {
|
|
@@ -1339,7 +1339,7 @@ const stt = async (audio, options = {}) => await distillFile(
|
|
|
1339
1339
|
|
|
1340
1340
|
const prompt = async (input, options = {}) => {
|
|
1341
1341
|
const ai = await getAi(options?.aiId, options);
|
|
1342
|
-
const tag =
|
|
1342
|
+
const tag = packModelId([ai.provider, ai.model.source, ai.model.name]);
|
|
1343
1343
|
options.log && log(`Prompt ${tag}: ${JSON.stringify(input || '[ATTACHMENTS]')}`);
|
|
1344
1344
|
const resp = await ai.prompt(input, options);
|
|
1345
1345
|
const msgs = options?.messages || [];
|
package/lib/manifest.mjs
CHANGED