utilitas 1998.2.41 → 1998.2.43

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/alan.mjs CHANGED
@@ -628,8 +628,10 @@ const buildClaudeMessage = (text, options) => {
628
628
  }
629
629
  });
630
630
  return String.isString(text) ? {
631
- role: options?.role || user,
632
- content: [...attachments, { type: TEXT, text }],
631
+ role: options?.role || user, content: [...attachments, {
632
+ type: TEXT, text,
633
+ cache_control: options?.cache_control ?? { type: 'ephemeral' }
634
+ }],
633
635
  } : text;
634
636
  };
635
637
 
@@ -655,7 +657,7 @@ const streamResp = async (resp, options) => {
655
657
  };
656
658
 
657
659
  const getInfoEnd = text => Math.max(...[THINK_END, TOOLS_END].map(x => {
658
- const keyEnd = text.indexOf(`${x}\n`);
660
+ const keyEnd = text.indexOf(text.endsWith(x) ? x : `${x}\n`);
659
661
  return keyEnd >= 0 ? (keyEnd + x.length) : 0;
660
662
  }));
661
663
 
@@ -739,7 +741,7 @@ const packResp = async (resp, options) => {
739
741
  ...references ? { references } : {},
740
742
  ...referencesMarkdown ? { referencesMarkdown } : {},
741
743
  ...audio ? { audio, audioMimeType: options?.audioMimeType } : {},
742
- processing: options?.processing,
744
+ processing: !!options?.processing,
743
745
  model: options?.model,
744
746
  };
745
747
  };
@@ -1018,18 +1020,25 @@ const promptOllama = async (content, options = {}) => {
1018
1020
 
1019
1021
  const promptClaude = async (content, options = {}) => {
1020
1022
  options.model = options.model || DEFAULT_MODELS[CLAUDE];
1021
- let [_MODEL, event, text, thinking, signature, result, thinkEnd, tool_use]
1022
- = [MODELS[options.model], null, '', '', '', options.result ?? '', '', []];
1023
+ let [
1024
+ _MODEL, event, text, thinking, signature, result, thinkEnd, tool_use,
1025
+ responded
1026
+ ] = [
1027
+ MODELS[options.model], null, '', '', '', options.result ?? '', '',
1028
+ [], false
1029
+ ];
1023
1030
  const { client } = await getClaudeClient(options);
1024
1031
  const { history }
1025
1032
  = await buildPrompts(_MODEL, content, { ...options, flavor: CLAUDE });
1026
- const resp = await client.messages.create({
1033
+ const resp = await client.beta.messages.create({
1027
1034
  model: options.model, max_tokens: _MODEL.maxOutputTokens, ...history,
1028
1035
  stream: true, ...options.reasoning ?? _MODEL?.reasoning ? {
1029
1036
  thinking: options.thinking || { type: 'enabled', budget_tokens: 1024 },
1030
1037
  } : {}, ..._MODEL?.tools ? { // https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking
1031
1038
  tools: options.tools ?? toolsClaude.map(x => x.def),
1032
1039
  tool_choice: { type: 'auto' },
1040
+ betas: ['token-efficient-tools-2025-02-19'],
1041
+ // @todo: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
1033
1042
  } : {},
1034
1043
  });
1035
1044
  for await (const chunk of resp) {
@@ -1048,6 +1057,8 @@ const promptClaude = async (content, options = {}) => {
1048
1057
  tool_use[tool_use.length - 1].input += event.partial_json;
1049
1058
  }
1050
1059
  deltaText = deltaThink + deltaText;
1060
+ options.result && deltaText
1061
+ && (responded = responded || (deltaText = `\n\n${deltaText}`));
1051
1062
  result += deltaText;
1052
1063
  deltaText && await streamResp({
1053
1064
  text: options.delta ? deltaText : result,
@@ -1143,7 +1154,6 @@ const promptGemini = async (content, options = {}) => {
1143
1154
  ) : {},
1144
1155
  });
1145
1156
  // https://github.com/google/generative-ai-js/blob/main/samples/node/advanced-chat.js
1146
- // @todo: check this issue similar to Vertex AI:
1147
1157
  // Google's bug: history is not allowed while using inline_data?
1148
1158
  const chat = client.startChat({ history, ...generationConfig(options) });
1149
1159
  const resp = await chat.sendMessageStream(prompt);
@@ -1346,10 +1356,10 @@ const resetSession = async (sessionId, options) => {
1346
1356
  const packResult = resp => {
1347
1357
  const result = {
1348
1358
  ...resp, spoken: renderText(
1349
- resp.markdown, { noCode: true, noLink: true }
1359
+ resp.text, { noCode: true, noLink: true }
1350
1360
  ).replace(/\[\^\d\^\]/ig, ''),
1351
1361
  };
1352
- log(`Response (${result.model}): ${JSON.stringify(result.markdown)}`);
1362
+ log(`Response (${result.model}): ${JSON.stringify(result.text)}`);
1353
1363
  // log(result);
1354
1364
  return result;
1355
1365
  };
package/lib/manifest.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  const manifest = {
2
2
  "name": "utilitas",
3
3
  "description": "Just another common utility for JavaScript.",
4
- "version": "1998.2.41",
4
+ "version": "1998.2.43",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/Leask/utilitas",
7
7
  "main": "index.mjs",
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "utilitas",
3
3
  "description": "Just another common utility for JavaScript.",
4
- "version": "1998.2.41",
4
+ "version": "1998.2.43",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/Leask/utilitas",
7
7
  "main": "index.mjs",