utilitas 1998.2.61 → 1998.2.62
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +20 -9
- package/lib/manifest.mjs +1 -1
- package/lib/utilitas.mjs +4 -2
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -49,7 +49,7 @@ const [
|
|
|
49
49
|
OPENAI, GEMINI, CHATGPT, OPENAI_EMBEDDING, GEMINI_EMEDDING, OPENAI_TRAINING,
|
|
50
50
|
OLLAMA, CLAUDE, GPT_4O_MINI, GPT_4O, GPT_O1, GPT_O3_MINI, GEMINI_20_FLASH,
|
|
51
51
|
GEMINI_20_FLASH_THINKING, GEMINI_20_PRO, NOVA, EMBEDDING_001, DEEPSEEK_R1,
|
|
52
|
-
DEEPSEEK_R1_70B, MD_CODE, CHATGPT_REASONING, TEXT_EMBEDDING_3_SMALL,
|
|
52
|
+
DEEPSEEK_R1_70B, DEEPSEEK_R1_32B, MD_CODE, CHATGPT_REASONING, TEXT_EMBEDDING_3_SMALL,
|
|
53
53
|
TEXT_EMBEDDING_3_LARGE, CLOUD_37_SONNET, AUDIO, WAV, CHATGPT_MINI,
|
|
54
54
|
ATTACHMENTS, CHAT, OPENAI_VOICE, MEDIUM, LOW, HIGH, GPT_REASONING_EFFORT,
|
|
55
55
|
THINK, THINK_STR, THINK_END, AZURE, TOOLS_STR, TOOLS_END, TOOLS, TEXT,
|
|
@@ -62,11 +62,12 @@ const [
|
|
|
62
62
|
'OPENAI_TRAINING', 'OLLAMA', 'CLAUDE', 'gpt-4o-mini', 'gpt-4o', 'o1',
|
|
63
63
|
'o3-mini', 'gemini-2.0-flash', 'gemini-2.0-flash-thinking-exp',
|
|
64
64
|
'gemini-2.0-pro-exp', 'nova', 'embedding-001', 'deepseek-r1',
|
|
65
|
-
'deepseek-r1:70b', '
|
|
66
|
-
'text-embedding-3-
|
|
67
|
-
'
|
|
68
|
-
'
|
|
69
|
-
'
|
|
65
|
+
'deepseek-r1:70b', 'deepseek-r1:32b', '```', 'CHATGPT_REASONING',
|
|
66
|
+
'text-embedding-3-small', 'text-embedding-3-large',
|
|
67
|
+
'claude-3-7-sonnet@20250219', 'audio', 'wav', 'CHATGPT_MINI',
|
|
68
|
+
'[ATTACHMENTS]', 'CHAT', 'OPENAI_VOICE', 'medium', 'low', 'high',
|
|
69
|
+
'medium', 'think', '<think>', '</think>', 'AZURE', '<tools>',
|
|
70
|
+
'</tools>', 'tools', 'text', 'thinking', 'OK', 'function',
|
|
70
71
|
'gpt-4.5-preview', 'redacted_thinking', 'gemma-3-27b-it',
|
|
71
72
|
'AZURE OPENAI', 'ANTHROPIC', 'VERTEX ANTHROPIC', 'gemma3:27b',
|
|
72
73
|
7680 * 4320, {}, 10, { log: true }, 'Alan', 'user', 'system',
|
|
@@ -369,6 +370,7 @@ const MODELS = {
|
|
|
369
370
|
};
|
|
370
371
|
|
|
371
372
|
MODELS[DEEPSEEK_R1_70B] = MODELS[DEEPSEEK_R1];
|
|
373
|
+
MODELS[DEEPSEEK_R1_32B] = MODELS[DEEPSEEK_R1];
|
|
372
374
|
MODELS[GEMMA327B] = MODELS[GEMMA_3_27B];
|
|
373
375
|
|
|
374
376
|
for (const n in MODELS) {
|
|
@@ -566,13 +568,21 @@ const init = async (options = {}) => {
|
|
|
566
568
|
break;
|
|
567
569
|
case OLLAMA:
|
|
568
570
|
// https://github.com/ollama/ollama/blob/main/docs/openai.md
|
|
571
|
+
const baseURL = 'http://localhost:11434/v1/';
|
|
569
572
|
ais[id] = {
|
|
570
573
|
id, provider, model, client: await OpenAI({
|
|
571
|
-
baseURL
|
|
572
|
-
...options
|
|
574
|
+
baseURL, apiKey: 'ollama', ...options
|
|
573
575
|
}),
|
|
574
576
|
prompt: async (cnt, opts) => await promptOpenAI(id, cnt, opts),
|
|
575
577
|
};
|
|
578
|
+
const phLog = m => log(`Ollama preheat: ${m?.message || m}`);
|
|
579
|
+
ignoreErrFunc(async () => {
|
|
580
|
+
phLog(await (await fetch(`${baseURL}completions`, {
|
|
581
|
+
method: 'POST', body: JSON.stringify({
|
|
582
|
+
model: model.name, prompt: '', keep_alive: -1
|
|
583
|
+
})
|
|
584
|
+
})).text());
|
|
585
|
+
}, { log: phLog });
|
|
576
586
|
break;
|
|
577
587
|
default:
|
|
578
588
|
throwError(`Invalid AI provider: ${options.provider || 'null'}.`);
|
|
@@ -1000,6 +1010,7 @@ const promptOpenAI = async (aiId, content, options = {}) => {
|
|
|
1000
1010
|
const resp = await client.chat.completions.create({
|
|
1001
1011
|
model: azure ? undefined : options.model, ...history,
|
|
1002
1012
|
...options.jsonMode ? { response_format: { type: JSON_OBJECT } } : {},
|
|
1013
|
+
...provider === OLLAMA ? { keep_alive: -1 } : {},
|
|
1003
1014
|
modalities, audio: options.audio || (
|
|
1004
1015
|
modalities?.find?.(x => x === AUDIO)
|
|
1005
1016
|
&& { voice: DEFAULT_MODELS[OPENAI_VOICE], format: 'pcm16' }
|
|
@@ -1560,7 +1571,7 @@ const PREFERRED_ENGINES = [
|
|
|
1560
1571
|
export default init;
|
|
1561
1572
|
export {
|
|
1562
1573
|
ATTACHMENT_TOKEN_COST, CLOUD_37_SONNET, CODE_INTERPRETER, DEEPSEEK_R1,
|
|
1563
|
-
DEEPSEEK_R1_70B, DEFAULT_MODELS,
|
|
1574
|
+
DEEPSEEK_R1_32B, DEEPSEEK_R1_70B, DEFAULT_MODELS,
|
|
1564
1575
|
EMBEDDING_001,
|
|
1565
1576
|
FUNCTION, GEMINI_20_FLASH, GEMINI_20_FLASH_THINKING, GPT_45, GPT_4O, GPT_4O_MINI, GPT_O1, GPT_O3_MINI, INSTRUCTIONS, MODELS,
|
|
1566
1577
|
OPENAI_VOICE, RETRIEVAL,
|
package/lib/manifest.mjs
CHANGED
package/lib/utilitas.mjs
CHANGED
|
@@ -650,8 +650,10 @@ const checkInterval = (itv, sed) =>
|
|
|
650
650
|
|
|
651
651
|
const ignoreErrFunc = async (func, options) => {
|
|
652
652
|
const run = async () => {
|
|
653
|
-
try { return await func(...options?.args || []) }
|
|
654
|
-
|
|
653
|
+
try { return await func(...options?.args || []) } catch (err) {
|
|
654
|
+
if (Function.isFunction(options?.log)) { options.log(err); }
|
|
655
|
+
else if (options?.log) { console.error(err); }
|
|
656
|
+
}
|
|
655
657
|
};
|
|
656
658
|
if (options?.await) { await timeout(options.await); return await run(); }
|
|
657
659
|
else if (options?.wait) { return setTimeout(run, options.wait); }
|