utilitas 2000.3.44 → 2000.3.46

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/alan.mjs CHANGED
@@ -54,7 +54,7 @@ const [
54
54
  OPENROUTER, AUTO, TOOL, S_OPENAI, S_GOOGLE, S_ANTHROPIC, ONLINE,
55
55
  GEMINI_30_PRO, GEMINI_25_FLASH, IMAGEN_4_ULTRA, VEO_31, IMAGEN_4_UPSCALE,
56
56
  ERROR_GENERATING, GEMINI_25_FLASH_TTS, GEMINI_25_PRO_TTS, wav,
57
- GPT_4O_MIMI_TTS, GPT_4O_TRANSCRIBE, INVALID_AUDIO, OGG_EXT,
57
+ GPT_4O_MIMI_TTS, GPT_4O_TRANSCRIBE, INVALID_AUDIO, OGG_EXT, ELLIPSIS,
58
58
  ] = [
59
59
  'OpenAI', 'Google', 'Ollama', 'nova', 'deepseek-3.2-speciale', '```',
60
60
  'claude-opus-4.5', 'audio', 'wav', 'OPENAI_VOICE', 'medium', 'think',
@@ -74,7 +74,7 @@ const [
74
74
  'veo-3.1-generate-preview', 'imagen-4.0-upscale-preview',
75
75
  'Error generating content.', 'gemini-2.5-flash-preview-tts',
76
76
  'gemini-2.5-pro-tts', 'wav', 'gpt-4o-mini-tts', 'gpt-4o-transcribe',
77
- 'Invalid audio data.', 'ogg',
77
+ 'Invalid audio data.', 'ogg', '...',
78
78
  ];
79
79
 
80
80
  const [tool, messages, text]
@@ -93,6 +93,7 @@ const countToolCalls = r => r?.split('\n').filter(x => x === TOOLS_STR).length;
93
93
  const assertApiKey = (p, o) => assert(o?.apiKey, `${p} api key is required.`);
94
94
  const getProviderIcon = provider => PROVIDER_ICONS[provider] || '🔮';
95
95
  const libOpenAi = async opts => await need('openai', { ...opts, raw: true });
96
+ const buildTextWithEllipsis = (txt, trim) => `${txt}${(trim ? ELLIPSIS : '')}`;
96
97
 
97
98
  const GEMINI_RULES = {
98
99
  source: S_GOOGLE, icon: '♊️',
@@ -1046,10 +1047,7 @@ const promptGoogle = async (aiId, prompt, options = {}) => {
1046
1047
  prompt = ensureString(prompt, { trim: true });
1047
1048
  assertPrompt(prompt);
1048
1049
  M.tts && (prompt = `${options?.prompt || TTS_PROMPT}: ${prompt}`);
1049
- assert(await countTokens(prompt, { fast: true })
1050
- <= M.maxInputTokens,
1051
- `Prompt must be less than ${M.maxInputTokens} tokens.`, 400
1052
- );
1050
+ prompt = await trimText(prompt, M.maxInputTokens);
1053
1051
  if (M?.image) {
1054
1052
  var resp = await client.models.generateImages({
1055
1053
  model: M.name, prompt, config: mergeAtoB(options?.config, {
@@ -1167,11 +1165,9 @@ const promptOpenAI = async (aiId, prompt, options = {}) => {
1167
1165
  if (M?.audio) {
1168
1166
  assertPrompt(prompt);
1169
1167
  const ins_prompt = options?.prompt || `${TTS_PROMPT}.`;
1170
- assert(await countTokens(
1171
- JSON.stringify([ins_prompt, prompt]), { fast: true }
1172
- ) <= M.maxInputTokens,
1173
- `Prompt must be less than ${M.maxInputTokens} tokens.`, 400
1174
- );
1168
+ prompt = await trimText(prompt, M.maxInputTokens - await countTokens(
1169
+ ins_prompt, { fast: true }
1170
+ ));
1175
1171
  // https://platform.openai.com/docs/api-reference/audio/createSpeech
1176
1172
  var resp = await client.audio.speech.create({
1177
1173
  model: M.name, voice: DEFAULT_MODELS[OPENAI_VOICE],
@@ -1398,6 +1394,21 @@ const analyzeSessions = async (sessionIds, options) => {
1398
1394
  return Array.isArray(sessionIds) ? resp : resp[sessionIds[0]];
1399
1395
  };
1400
1396
 
1397
+ const trimText = async (text, limit = Infinity) => {
1398
+ text = ensureString(text, { trim: true });
1399
+ let trimmed = false;
1400
+ let lastCheck = null;
1401
+ while ((lastCheck = await countTokens(
1402
+ buildTextWithEllipsis(text, trimmed), { fast: true }
1403
+ )) > limit) {
1404
+ text = text.split(' ').slice(
1405
+ 0, -Math.ceil((Math.abs(lastCheck - limit) / 10))
1406
+ ).join(' ').trimEnd();
1407
+ trimmed = true;
1408
+ }
1409
+ return buildTextWithEllipsis(text, trimmed);
1410
+ };
1411
+
1401
1412
  export default init;
1402
1413
  export {
1403
1414
  _NEED,
@@ -1428,14 +1439,15 @@ export {
1428
1439
  getChatPromptLimit,
1429
1440
  getSession,
1430
1441
  init,
1431
- tts,
1432
- stt,
1433
1442
  initChat,
1434
1443
  k,
1435
1444
  listOpenAIModels,
1436
1445
  prompt,
1437
1446
  promptOpenRouter,
1438
1447
  resetSession,
1448
+ stt,
1439
1449
  talk,
1440
1450
  trimPrompt,
1451
+ trimText,
1452
+ tts,
1441
1453
  };
package/lib/embedding.mjs CHANGED
@@ -1,11 +1,9 @@
1
1
  import { convert } from './storage.mjs';
2
- import { countTokens } from './alan.mjs';
3
2
  import { ensureArray, ensureString, need } from './utilitas.mjs';
3
+ import { trimText } from './alan.mjs';
4
4
 
5
5
  const _NEED = ['openai'];
6
6
  const clients = {};
7
- const ELLIPSIS = '...';
8
- const buildTextWithEllipsis = (txt, trim) => `${txt}${(trim ? ELLIPSIS : '')}`;
9
7
 
10
8
  const [
11
9
  OPENAI,
@@ -82,21 +80,6 @@ const ensureApiKey = (options) => {
82
80
  return options.apiKey;
83
81
  };
84
82
 
85
- const trimTextToLimit = async (text, limit = Infinity) => {
86
- text = ensureString(text, { trim: true });
87
- let trimmed = false;
88
- let lastCheck = null;
89
- while ((lastCheck = await countTokens(
90
- buildTextWithEllipsis(text, trimmed), { fast: true }
91
- )) > limit) {
92
- text = text.split(' ').slice(
93
- 0, -Math.ceil((Math.abs(lastCheck - limit) / 10))
94
- ).join(' ').trimEnd();
95
- trimmed = true;
96
- }
97
- return buildTextWithEllipsis(text, trimmed);
98
- };
99
-
100
83
  const getClient = (provider) => {
101
84
  provider = ensureString(provider, { case: 'UP' })
102
85
  || Object.keys(clients || {})[0];
@@ -129,9 +112,7 @@ const embed = async (input, options = {}) => {
129
112
  'Only one type of input is allowed at a time.', 400
130
113
  );
131
114
  if (x.text) {
132
- x.text = await trimTextToLimit(
133
- x.text, MODEL_CONFIG[model]?.maxTokens
134
- );
115
+ x.text = await trimText(x.text, MODEL_CONFIG[model]?.maxTokens);
135
116
  } else if (x.image) {
136
117
  assert(
137
118
  MODEL_CONFIG[model]?.image,
@@ -161,6 +142,9 @@ const embed = async (input, options = {}) => {
161
142
  body.model = `${source ? `${source}/` : ''}${body.model}`;
162
143
  case OPENAI:
163
144
  resp = await client.embeddings.create(body);
145
+ break;
146
+ default:
147
+ throw new Error(`Unsupported provider: ${provider}`);
164
148
  }
165
149
  assert(resp?.data?.length, 'No embeddings returned.', 500);
166
150
  if (options?.raw) { return resp; }
package/lib/manifest.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  const manifest = {
2
2
  "name": "utilitas",
3
3
  "description": "Just another common utility for JavaScript.",
4
- "version": "2000.3.44",
4
+ "version": "2000.3.46",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/Leask/utilitas",
7
7
  "main": "index.mjs",
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "utilitas",
3
3
  "description": "Just another common utility for JavaScript.",
4
- "version": "2000.3.44",
4
+ "version": "2000.3.46",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/Leask/utilitas",
7
7
  "main": "index.mjs",