utilitas 2000.3.57 → 2000.3.59
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +45 -43
- package/lib/manifest.mjs +1 -1
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -51,10 +51,9 @@ const [
|
|
|
51
51
|
PROMPT_IS_REQUIRED, OPENAI_HI_RES_SIZE, k, m, minute, hour, gb, trimTailing,
|
|
52
52
|
trimBeginning, GEMINI_30_PRO_IMAGE, IMAGE, JINA, JINA_DEEPSEARCH,
|
|
53
53
|
SILICONFLOW, SF_DEEPSEEK_32, MAX_TIRE, OPENROUTER_API, OPENROUTER, AUTO,
|
|
54
|
-
TOOL,
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
GPT_4O_TRANSCRIBE, INVALID_AUDIO, OGG_EXT, ELLIPSIS,
|
|
54
|
+
TOOL, ONLINE, GEMINI_30_PRO, GEMINI_25_FLASH, IMAGEN_4_ULTRA, VEO_31,
|
|
55
|
+
IMAGEN_4_UPSCALE, ERROR_GENERATING, GEMINI_25_FLASH_TTS, GEMINI_25_PRO_TTS,
|
|
56
|
+
wav, GPT_4O_MIMI_TTS, GPT_4O_TRANSCRIBE, INVALID_AUDIO, OGG_EXT, ELLIPSIS,
|
|
58
57
|
] = [
|
|
59
58
|
'OpenAI', 'Google', 'Ollama', 'nova', 'deepseek-3.2-speciale', '```',
|
|
60
59
|
'claude-opus-4.5', 'audio', 'wav', 'OPENAI_VOICE', 'medium', 'think',
|
|
@@ -68,12 +67,12 @@ const [
|
|
|
68
67
|
'gemini-3-pro-image-preview', 'image', 'Jina', 'jina-deepsearch-v1',
|
|
69
68
|
'SiliconFlow', 'deepseek-ai/DeepSeek-V3.2-exp', 768 * 768,
|
|
70
69
|
'https://openrouter.ai/api/v1', 'OpenRouter', 'openrouter/auto', 'tool',
|
|
71
|
-
'
|
|
72
|
-
'
|
|
73
|
-
'
|
|
74
|
-
'
|
|
75
|
-
'
|
|
76
|
-
'
|
|
70
|
+
':online', 'gemini-3-pro-preview', 'gemini-2.5-flash-preview-09-2025',
|
|
71
|
+
'imagen-4.0-ultra-generate-001', 'veo-3.1-generate-preview',
|
|
72
|
+
'imagen-4.0-upscale-preview', 'Error generating content.',
|
|
73
|
+
'gemini-2.5-flash-preview-tts', 'gemini-2.5-pro-tts', 'wav',
|
|
74
|
+
'gpt-4o-mini-tts', 'gpt-4o-transcribe', 'Invalid audio data.', 'ogg',
|
|
75
|
+
'...',
|
|
77
76
|
];
|
|
78
77
|
|
|
79
78
|
const [tool, messages, text]
|
|
@@ -95,7 +94,7 @@ const libOpenAi = async opts => await need('openai', { ...opts, raw: true });
|
|
|
95
94
|
const buildTextWithEllipsis = (txt, trim) => `${txt}${(trim ? ELLIPSIS : '')}`;
|
|
96
95
|
|
|
97
96
|
const GEMINI_RULES = {
|
|
98
|
-
source:
|
|
97
|
+
source: GOOGLE, icon: '♊️',
|
|
99
98
|
json: true, audioCostTokens: 1000 * 1000 * 1, // 8.4 hours => 1 million tokens
|
|
100
99
|
imageCostTokens: ~~(v8k / MAX_TIRE * 258), maxAudioLength: hour(8.4),
|
|
101
100
|
maxAudioPerPrompt: 1, maxFileSize: m(20), maxImagePerPrompt: 3000,
|
|
@@ -110,7 +109,7 @@ const GEMINI_RULES = {
|
|
|
110
109
|
};
|
|
111
110
|
|
|
112
111
|
const OPENAI_RULES = {
|
|
113
|
-
source:
|
|
112
|
+
source: OPENAI, icon: '⚛️',
|
|
114
113
|
contextWindow: k(400), maxOutputTokens: k(128),
|
|
115
114
|
imageCostTokens: ~~(OPENAI_HI_RES_SIZE / MAX_TIRE * 140 + 70),
|
|
116
115
|
maxFileSize: m(50), maxImageSize: OPENAI_HI_RES_SIZE,
|
|
@@ -147,11 +146,11 @@ const MODELS = {
|
|
|
147
146
|
contextWindow: k(64), maxOutputTokens: k(32), image: true, tools: false,
|
|
148
147
|
},
|
|
149
148
|
[IMAGEN_4_ULTRA]: {
|
|
150
|
-
source:
|
|
149
|
+
source: GOOGLE, maxInputTokens: 480,
|
|
151
150
|
image: true, defaultProvider: GOOGLE,
|
|
152
151
|
},
|
|
153
152
|
[VEO_31]: {
|
|
154
|
-
source:
|
|
153
|
+
source: GOOGLE, maxInputTokens: 1024,
|
|
155
154
|
imageCostTokens: 0, maxImagePerPrompt: 1,
|
|
156
155
|
maxImageSize: Infinity, vision: true, video: true,
|
|
157
156
|
supportedMimeTypes: [MIME_PNG, MIME_JPEG], defaultProvider: GOOGLE,
|
|
@@ -162,7 +161,7 @@ const MODELS = {
|
|
|
162
161
|
// models with code capabilities
|
|
163
162
|
[GPT_51_CODEX]: { ...OPENAI_RULES },
|
|
164
163
|
[CLOUD_OPUS_45]: {
|
|
165
|
-
source:
|
|
164
|
+
source: ANTHROPIC, icon: '✳️',
|
|
166
165
|
contextWindow: k(200), maxOutputTokens: k(64),
|
|
167
166
|
documentCostTokens: 3000 * 10, maxDocumentFile: m(32),
|
|
168
167
|
maxDocumentPages: 100, imageCostTokens: ~~(v8k / 750),
|
|
@@ -175,19 +174,19 @@ const MODELS = {
|
|
|
175
174
|
},
|
|
176
175
|
// tts/stt models
|
|
177
176
|
[GEMINI_25_FLASH_TTS]: {
|
|
178
|
-
source:
|
|
177
|
+
source: GOOGLE, maxInputTokens: k(32), audio: true, fast: true,
|
|
179
178
|
hidden: true, defaultProvider: GOOGLE,
|
|
180
179
|
},
|
|
181
180
|
[GEMINI_25_PRO_TTS]: {
|
|
182
|
-
source:
|
|
181
|
+
source: GOOGLE, maxInputTokens: k(32), audio: true,
|
|
183
182
|
hidden: true, defaultProvider: GOOGLE,
|
|
184
183
|
},
|
|
185
184
|
[GPT_4O_MIMI_TTS]: {
|
|
186
|
-
source:
|
|
185
|
+
source: OPENAI, maxInputTokens: k(2), audio: true, fast: true,
|
|
187
186
|
hidden: true, defaultProvider: OPENAI,
|
|
188
187
|
},
|
|
189
188
|
[GPT_4O_TRANSCRIBE]: {
|
|
190
|
-
source:
|
|
189
|
+
source: OPENAI, maxInputTokens: 0, hearing: true, fast: true,
|
|
191
190
|
hidden: true, defaultProvider: OPENAI,
|
|
192
191
|
},
|
|
193
192
|
// models with deepsearch capabilities
|
|
@@ -384,15 +383,15 @@ const packTools = async () => {
|
|
|
384
383
|
return _tools;
|
|
385
384
|
};
|
|
386
385
|
|
|
387
|
-
const buildAiId = (provider, model) => [
|
|
386
|
+
const buildAiId = (provider, model) => packModelId([
|
|
388
387
|
provider, ...isOpenrouter(provider, model) ? [model.source] : [],
|
|
389
388
|
model?.name
|
|
390
|
-
]
|
|
389
|
+
], { case: 'SNAKE', raw: true }).join('_');
|
|
391
390
|
|
|
392
|
-
const buildAiName = (provider, model) => [
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
]
|
|
391
|
+
const buildAiName = (provider, model) => packModelId([
|
|
392
|
+
provider, ...isOpenrouter(provider, model) ? [model.source] : [],
|
|
393
|
+
model.label || model.name
|
|
394
|
+
]);
|
|
396
395
|
|
|
397
396
|
const buildAiFeatures = model => Object.entries(FEATURE_ICONS).map(
|
|
398
397
|
x => model[x[0]] ? x[1] : ''
|
|
@@ -400,10 +399,12 @@ const buildAiFeatures = model => Object.entries(FEATURE_ICONS).map(
|
|
|
400
399
|
|
|
401
400
|
const setupAi = ai => {
|
|
402
401
|
const id = buildAiId(ai.provider, ai.model);
|
|
402
|
+
const name = buildAiName(ai.provider, ai.model);
|
|
403
|
+
const icon = ai.model?.icon || getProviderIcon(ai.provider);
|
|
404
|
+
const features = buildAiFeatures(ai.model);
|
|
403
405
|
ais.push({
|
|
404
|
-
id, name:
|
|
405
|
-
|
|
406
|
-
...ai, priority: ai.priority || 0,
|
|
406
|
+
id, icon, name, features, label: `${icon} ${name} (${features})`,
|
|
407
|
+
initOrder: ais.length, ...ai, priority: ai.priority || 0,
|
|
407
408
|
prompt: async (text, opts) => await ai.prompt(id, text, opts),
|
|
408
409
|
});
|
|
409
410
|
};
|
|
@@ -751,21 +752,22 @@ const packResp = async (resp, options) => {
|
|
|
751
752
|
...annotationsMarkdown ? { annotationsMarkdown } : {},
|
|
752
753
|
...audio ? { audio } : {}, ...images?.length ? { images } : {},
|
|
753
754
|
processing: !!options?.processing,
|
|
754
|
-
model:
|
|
755
|
+
model: packModelId([
|
|
755
756
|
options.provider, options?.router?.provider,
|
|
756
757
|
options?.router?.model || options?.model,
|
|
757
758
|
]),
|
|
758
759
|
};
|
|
759
760
|
};
|
|
760
761
|
|
|
761
|
-
const
|
|
762
|
+
const packModelId = (model_reference, options = {}) => {
|
|
762
763
|
const catched = new Set();
|
|
763
|
-
|
|
764
|
+
const ref = model_reference.join('/').split('/').map(x => {
|
|
764
765
|
const key = ensureString(x, { case: 'UP' });
|
|
765
766
|
if (catched.has(key)) { return null; }
|
|
766
767
|
catched.add(key);
|
|
767
|
-
return x;
|
|
768
|
-
}).filter(x => x)
|
|
768
|
+
return ensureString(x, options);
|
|
769
|
+
}).filter(x => x);
|
|
770
|
+
return options?.raw ? ref : ref.join('/');
|
|
769
771
|
};
|
|
770
772
|
|
|
771
773
|
const buildPrompts = async (model, input, options = {}) => {
|
|
@@ -902,7 +904,7 @@ const promptOpenRouter = async (aiId, content, options = {}) => {
|
|
|
902
904
|
} else if (!modalities && model.image) {
|
|
903
905
|
modalities = [TEXT, IMAGE];
|
|
904
906
|
}
|
|
905
|
-
const googleImageMode = source ===
|
|
907
|
+
const googleImageMode = source === GOOGLE && modalities?.includes?.(IMAGE);
|
|
906
908
|
const packedTools = _tools.map(x => x.def);
|
|
907
909
|
const ext = provider === OPENROUTER && !packedTools?.find(
|
|
908
910
|
x => x.function.name === 'searchWeb'
|
|
@@ -920,7 +922,7 @@ const promptOpenRouter = async (aiId, content, options = {}) => {
|
|
|
920
922
|
{ type: 'code_interpreter', container: { type: 'auto', memory_limit: '8g' } },
|
|
921
923
|
]);
|
|
922
924
|
}
|
|
923
|
-
if (source ===
|
|
925
|
+
if (source === GOOGLE) {
|
|
924
926
|
packedTools.push(...[
|
|
925
927
|
{ googleSearch: {} }, { codeExecution: {} }, { urlContext: {} },
|
|
926
928
|
// { googleMaps: {} }, // https://ai.google.dev/gemini-api/docs/maps-grounding // NOT for Gemini 3
|
|
@@ -1017,13 +1019,13 @@ const promptOpenRouter = async (aiId, content, options = {}) => {
|
|
|
1017
1019
|
...annotations.length ? { annotations } : {},
|
|
1018
1020
|
};
|
|
1019
1021
|
switch (source) {
|
|
1020
|
-
case
|
|
1022
|
+
case ANTHROPIC:
|
|
1021
1023
|
event.content = reasoning_details.map(x => ({
|
|
1022
1024
|
type: 'thinking', thinking: x.text,
|
|
1023
1025
|
...x.signature ? { signature: x.signature } : {},
|
|
1024
1026
|
}));
|
|
1025
1027
|
break;
|
|
1026
|
-
case
|
|
1028
|
+
case GOOGLE:
|
|
1027
1029
|
reasoning_details?.length
|
|
1028
1030
|
&& (event.reasoning_details = reasoning_details);
|
|
1029
1031
|
}
|
|
@@ -1071,7 +1073,7 @@ const promptGoogle = async (aiId, prompt, options = {}) => {
|
|
|
1071
1073
|
data: await convert(x.image.imageBytes, {
|
|
1072
1074
|
input: BASE64, suffix: 'png', ...options || {}
|
|
1073
1075
|
}), mimeType: x.image.mimeType,
|
|
1074
|
-
}))), model:
|
|
1076
|
+
}))), model: packModelId([provider, M.source, M.name]),
|
|
1075
1077
|
}
|
|
1076
1078
|
}
|
|
1077
1079
|
} else if (M?.video) {
|
|
@@ -1119,7 +1121,7 @@ const promptGoogle = async (aiId, prompt, options = {}) => {
|
|
|
1119
1121
|
input: FILE, suffix: 'mp4', ...options || {}
|
|
1120
1122
|
}), mimeType: MIME_MP4, jobId: resp.name,
|
|
1121
1123
|
};
|
|
1122
|
-
})), model:
|
|
1124
|
+
})), model: packModelId([provider, M.source, M.name]),
|
|
1123
1125
|
};
|
|
1124
1126
|
}
|
|
1125
1127
|
} else if (M?.audio) { // https://ai.google.dev/gemini-api/docs/speech-generation#voices
|
|
@@ -1144,7 +1146,7 @@ const promptGoogle = async (aiId, prompt, options = {}) => {
|
|
|
1144
1146
|
data: await packPcmToWav(rawAudio?.data, {
|
|
1145
1147
|
input: BASE64, suffix: wav, ...options || {},
|
|
1146
1148
|
}), mimeType: MIME_WAV,
|
|
1147
|
-
}, model:
|
|
1149
|
+
}, model: packModelId([provider, M.source, M.name]),
|
|
1148
1150
|
};
|
|
1149
1151
|
}
|
|
1150
1152
|
} else {
|
|
@@ -1180,7 +1182,7 @@ const promptOpenAI = async (aiId, prompt, options = {}) => {
|
|
|
1180
1182
|
await resp.arrayBuffer()
|
|
1181
1183
|
), { suffix: OGG_EXT, ...options || {} }),
|
|
1182
1184
|
mimeType: MIME_OGG,
|
|
1183
|
-
}, model:
|
|
1185
|
+
}, model: packModelId([provider, M.source, M.name]),
|
|
1184
1186
|
};
|
|
1185
1187
|
}
|
|
1186
1188
|
} else if (M?.hearing) {
|
|
@@ -1200,7 +1202,7 @@ const promptOpenAI = async (aiId, prompt, options = {}) => {
|
|
|
1200
1202
|
if (!options?.raw) {
|
|
1201
1203
|
resp = {
|
|
1202
1204
|
text: resp.trim(),
|
|
1203
|
-
model:
|
|
1205
|
+
model: packModelId([provider, M.source, M.name]),
|
|
1204
1206
|
};
|
|
1205
1207
|
}
|
|
1206
1208
|
} else {
|
|
@@ -1337,7 +1339,7 @@ const stt = async (audio, options = {}) => await distillFile(
|
|
|
1337
1339
|
|
|
1338
1340
|
const prompt = async (input, options = {}) => {
|
|
1339
1341
|
const ai = await getAi(options?.aiId, options);
|
|
1340
|
-
const tag =
|
|
1342
|
+
const tag = packModelId([ai.provider, ai.model.source, ai.model.name]);
|
|
1341
1343
|
options.log && log(`Prompt ${tag}: ${JSON.stringify(input || '[ATTACHMENTS]')}`);
|
|
1342
1344
|
const resp = await ai.prompt(input, options);
|
|
1343
1345
|
const msgs = options?.messages || [];
|
package/lib/manifest.mjs
CHANGED