utilitas 2000.3.55 → 2000.3.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +35 -28
- package/lib/manifest.mjs +1 -1
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -48,13 +48,13 @@ const [
|
|
|
48
48
|
TOOLS_STR, TOOLS_END, TOOLS, TEXT, OK, FUNC, GPT_52, GPT_51_CODEX,
|
|
49
49
|
GPT_5_IMAGE, GEMMA_3_27B, ANTHROPIC, v8k, ais, MAX_TOOL_RECURSION, LOG,
|
|
50
50
|
name, user, system, assistant, MODEL, JSON_OBJECT, tokenSafeRatio,
|
|
51
|
-
PROMPT_IS_REQUIRED, OPENAI_HI_RES_SIZE, k,
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
51
|
+
PROMPT_IS_REQUIRED, OPENAI_HI_RES_SIZE, k, m, minute, hour, gb, trimTailing,
|
|
52
|
+
trimBeginning, GEMINI_30_PRO_IMAGE, IMAGE, JINA, JINA_DEEPSEARCH,
|
|
53
|
+
SILICONFLOW, SF_DEEPSEEK_32, MAX_TIRE, OPENROUTER_API, OPENROUTER, AUTO,
|
|
54
|
+
TOOL, S_OPENAI, S_GOOGLE, S_ANTHROPIC, ONLINE, GEMINI_30_PRO,
|
|
55
|
+
GEMINI_25_FLASH, IMAGEN_4_ULTRA, VEO_31, IMAGEN_4_UPSCALE, ERROR_GENERATING,
|
|
56
|
+
GEMINI_25_FLASH_TTS, GEMINI_25_PRO_TTS, wav, GPT_4O_MIMI_TTS,
|
|
57
|
+
GPT_4O_TRANSCRIBE, INVALID_AUDIO, OGG_EXT, ELLIPSIS,
|
|
58
58
|
] = [
|
|
59
59
|
'OpenAI', 'Google', 'Ollama', 'nova', 'deepseek-3.2-speciale', '```',
|
|
60
60
|
'claude-opus-4.5', 'audio', 'wav', 'OPENAI_VOICE', 'medium', 'think',
|
|
@@ -62,12 +62,11 @@ const [
|
|
|
62
62
|
'function', 'gpt-5.2', 'gpt-5.1-codex', 'gpt-5-image', 'gemma3:27b',
|
|
63
63
|
'Anthropic', 7680 * 4320, [], 30, { log: true }, 'Alan', 'user',
|
|
64
64
|
{ role: 'system' }, 'assistant', 'model', 'json_object', 1.1,
|
|
65
|
-
'Prompt is required.', 2048 * 2048, x =>
|
|
66
|
-
x =>
|
|
67
|
-
x =>
|
|
68
|
-
|
|
69
|
-
'
|
|
70
|
-
'deepseek-ai/DeepSeek-V3.2-exp', 768 * 768,
|
|
65
|
+
'Prompt is required.', 2048 * 2048, x => 1000 * x, x => 1000 * 1000 * x,
|
|
66
|
+
x => 60 * x, x => 60 * 60 * x, x => 1000 * 1000 * 1000 * x,
|
|
67
|
+
x => x.replace(/[\.\s]*$/, ''), x => x.replace(/^[\.\s]*/, ''),
|
|
68
|
+
'gemini-3-pro-image-preview', 'image', 'Jina', 'jina-deepsearch-v1',
|
|
69
|
+
'SiliconFlow', 'deepseek-ai/DeepSeek-V3.2-exp', 768 * 768,
|
|
71
70
|
'https://openrouter.ai/api/v1', 'OpenRouter', 'openrouter/auto', 'tool',
|
|
72
71
|
'openai', 'google', 'anthropic', ':online', 'gemini-3-pro-preview',
|
|
73
72
|
'gemini-2.5-flash-preview-09-2025', 'imagen-4.0-ultra-generate-001',
|
|
@@ -112,7 +111,7 @@ const GEMINI_RULES = {
|
|
|
112
111
|
|
|
113
112
|
const OPENAI_RULES = {
|
|
114
113
|
source: S_OPENAI, icon: '⚛️',
|
|
115
|
-
contextWindow:
|
|
114
|
+
contextWindow: k(400), maxOutputTokens: k(128),
|
|
116
115
|
imageCostTokens: ~~(OPENAI_HI_RES_SIZE / MAX_TIRE * 140 + 70),
|
|
117
116
|
maxFileSize: m(50), maxImageSize: OPENAI_HI_RES_SIZE,
|
|
118
117
|
json: true, tools: true, vision: true, hearing: true, reasoning: true,
|
|
@@ -122,7 +121,7 @@ const OPENAI_RULES = {
|
|
|
122
121
|
};
|
|
123
122
|
|
|
124
123
|
const DEEPSEEK_32_RULES = {
|
|
125
|
-
icon: '🐬', contextWindow:
|
|
124
|
+
icon: '🐬', contextWindow: k(163.8), maxOutputTokens: k(65.5),
|
|
126
125
|
json: true, tools: true, reasoning: true,
|
|
127
126
|
};
|
|
128
127
|
|
|
@@ -164,7 +163,7 @@ const MODELS = {
|
|
|
164
163
|
[GPT_51_CODEX]: { ...OPENAI_RULES },
|
|
165
164
|
[CLOUD_OPUS_45]: {
|
|
166
165
|
source: S_ANTHROPIC, icon: '✳️',
|
|
167
|
-
contextWindow:
|
|
166
|
+
contextWindow: k(200), maxOutputTokens: k(64),
|
|
168
167
|
documentCostTokens: 3000 * 10, maxDocumentFile: m(32),
|
|
169
168
|
maxDocumentPages: 100, imageCostTokens: ~~(v8k / 750),
|
|
170
169
|
maxImagePerPrompt: 100, maxFileSize: m(5), maxImageSize: 2000 * 2000,
|
|
@@ -176,15 +175,15 @@ const MODELS = {
|
|
|
176
175
|
},
|
|
177
176
|
// tts/stt models
|
|
178
177
|
[GEMINI_25_FLASH_TTS]: {
|
|
179
|
-
source: S_GOOGLE, maxInputTokens:
|
|
178
|
+
source: S_GOOGLE, maxInputTokens: k(32), audio: true, fast: true,
|
|
180
179
|
hidden: true, defaultProvider: GOOGLE,
|
|
181
180
|
},
|
|
182
181
|
[GEMINI_25_PRO_TTS]: {
|
|
183
|
-
source: S_GOOGLE, maxInputTokens:
|
|
182
|
+
source: S_GOOGLE, maxInputTokens: k(32), audio: true,
|
|
184
183
|
hidden: true, defaultProvider: GOOGLE,
|
|
185
184
|
},
|
|
186
185
|
[GPT_4O_MIMI_TTS]: {
|
|
187
|
-
source: S_OPENAI, maxInputTokens:
|
|
186
|
+
source: S_OPENAI, maxInputTokens: k(2), audio: true, fast: true,
|
|
188
187
|
hidden: true, defaultProvider: OPENAI,
|
|
189
188
|
},
|
|
190
189
|
[GPT_4O_TRANSCRIBE]: {
|
|
@@ -204,7 +203,7 @@ const MODELS = {
|
|
|
204
203
|
[SF_DEEPSEEK_32]: { ...DEEPSEEK_32_RULES, defaultProvider: SILICONFLOW },
|
|
205
204
|
// best local model
|
|
206
205
|
[GEMMA_3_27B]: {
|
|
207
|
-
icon: '❇️', contextWindow:
|
|
206
|
+
icon: '❇️', contextWindow: k(128), maxOutputTokens: k(8),
|
|
208
207
|
imageCostTokens: 256, maxImageSize: 896 * 896,
|
|
209
208
|
supportedMimeTypes: [MIME_PNG, MIME_JPEG, MIME_GIF],
|
|
210
209
|
fast: true, json: true, vision: true,
|
|
@@ -1284,9 +1283,9 @@ const talk = async (input, options = {}) => {
|
|
|
1284
1283
|
};
|
|
1285
1284
|
|
|
1286
1285
|
const getChatPromptLimit = async (options) => {
|
|
1287
|
-
let resp = 0;
|
|
1286
|
+
let [resp, aiId] = [0, ensureArray(options?.aiId).filter(x => x)];
|
|
1288
1287
|
(await getAi(null, { all: true })).map(x => {
|
|
1289
|
-
if (
|
|
1288
|
+
if (aiId.length && !aiId.includes(x.id)) { return; }
|
|
1290
1289
|
const maxInputTokens = x.model.maxInputTokens;
|
|
1291
1290
|
resp = resp ? Math.min(resp, maxInputTokens) : maxInputTokens;
|
|
1292
1291
|
});
|
|
@@ -1295,9 +1294,9 @@ const getChatPromptLimit = async (options) => {
|
|
|
1295
1294
|
};
|
|
1296
1295
|
|
|
1297
1296
|
const getChatAttachmentCost = async (options) => {
|
|
1298
|
-
let resp = 0;
|
|
1297
|
+
let [resp, aiId] = [0, ensureArray(options?.aiId).filter(x => x)];
|
|
1299
1298
|
(await getAi(null, { all: true })).map(x => {
|
|
1300
|
-
if (
|
|
1299
|
+
if (aiId.length && !aiId.includes(x.id)) { return; }
|
|
1301
1300
|
resp = Math.max(resp, x.model.imageCostTokens || 0);
|
|
1302
1301
|
});
|
|
1303
1302
|
assert(resp > 0, 'Chat engine has not been initialized.');
|
|
@@ -1341,12 +1340,17 @@ const prompt = async (input, options = {}) => {
|
|
|
1341
1340
|
const tag = packModelLabel([ai.provider, ai.model.source, ai.model.name]);
|
|
1342
1341
|
options.log && log(`Prompt ${tag}: ${JSON.stringify(input || '[ATTACHMENTS]')}`);
|
|
1343
1342
|
const resp = await ai.prompt(input, options);
|
|
1343
|
+
const msgs = options?.messages || [];
|
|
1344
|
+
const [rag_msgs, ctx_msgs] = [
|
|
1345
|
+
msgs.filter(x => !!x.score).length, msgs.filter(x => !x.score).length
|
|
1346
|
+
];
|
|
1347
|
+
options.log && log(`w/ RAG: ${rag_msgs}, context: ${ctx_msgs}, attachments: ${~~options?.attachments?.length}.`);
|
|
1344
1348
|
options.log && log(`Response ${tag}: ${JSON.stringify(resp.text)}`);
|
|
1345
1349
|
return resp;
|
|
1346
1350
|
};
|
|
1347
1351
|
|
|
1348
1352
|
const trimPrompt = async (getPrompt, trimFunc, contextWindow, options) => {
|
|
1349
|
-
let [i, maxTry] = [0, ~~options?.maxTry ||
|
|
1353
|
+
let [i, maxTry] = [0, ~~options?.maxTry || k(128)];
|
|
1350
1354
|
while ((await countTokens(await getPrompt(), { fast: true }) > contextWindow)
|
|
1351
1355
|
|| (await countTokens(await getPrompt()) > contextWindow)) {
|
|
1352
1356
|
await trimFunc();
|
|
@@ -1365,6 +1369,10 @@ const analyzeSessions = async (sessionIds, options) => {
|
|
|
1365
1369
|
));
|
|
1366
1370
|
if (sm.length) { sses[ids[i]] = sm; }
|
|
1367
1371
|
}
|
|
1372
|
+
const ai = await getAi(options?.aiId, {
|
|
1373
|
+
jsonMode: true, simple: true, select: { json: true, fast: true },
|
|
1374
|
+
...options || {}
|
|
1375
|
+
});
|
|
1368
1376
|
const pmt = options?.prompt || (
|
|
1369
1377
|
'Help me organize the dialogues in the following JSON into a title '
|
|
1370
1378
|
+ 'dictionary and return it in JSON format. The input data may contain '
|
|
@@ -1384,10 +1392,9 @@ const analyzeSessions = async (sessionIds, options) => {
|
|
|
1384
1392
|
x, JSON.stringify(sses[x]).length,
|
|
1385
1393
|
]).sort((x, y) => y[1] - x[1])[0][0]];
|
|
1386
1394
|
}
|
|
1387
|
-
}, await getChatPromptLimit(options));
|
|
1395
|
+
}, await getChatPromptLimit({ aiId: ai.id, ...options, }));
|
|
1388
1396
|
const aiResp = Object.keys(sses) ? (await prompt(getInput(), {
|
|
1389
|
-
|
|
1390
|
-
...options || {}
|
|
1397
|
+
aiId: ai.id, ...options || {}
|
|
1391
1398
|
})) : {};
|
|
1392
1399
|
assert(aiResp, 'Unable to analyze sessions.');
|
|
1393
1400
|
ids.map(x => resp[x] = aiResp[x] || null);
|
package/lib/manifest.mjs
CHANGED