utilitas 1998.2.36 → 1998.2.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/alan.mjs CHANGED
@@ -751,8 +751,7 @@ const streamResp = async (resp, options) => {
751
751
 
752
752
  const packGptResp = async (resp, options) => {
753
753
  // simple mode is not recommended for streaming responses
754
- let text = resp.text // ChatGPT / Claude
755
- || (Function.isFunction(resp?.text) ? resp.text() : resp?.text) // Gemini
754
+ let text = resp.text // ChatGPT / Claude / Gemini
756
755
  || resp?.message?.content || ''; // Ollama
757
756
  const audio = resp?.message?.audio?.data; // ChatGPT audio mode
758
757
  if (options?.raw) { return resp; }
@@ -783,11 +782,11 @@ const handleToolsCall = async (msg, options) => {
783
782
  toolsResponse = (toolsResponse + m).trim();
784
783
  await streamResp({ text: options?.delta ? m : toolsResponse }, options);
785
784
  };
786
- const calls = msg.tool_calls || msg.content || [];
785
+ const calls = msg.tool_calls || msg.content || msg.parts || [];
787
786
  if (calls.length) {
788
787
  switch (options?.flavor) {
789
788
  case CLAUDE: preRes.push(msg); break;
790
- case GEMINI: preRes.push({ role: MODEL, parts: msg?.tool_calls.map(x => ({ functionCall: x })) }); break;
789
+ case GEMINI: preRes.push(msg); break;
791
790
  case CHATGPT: default: preRes.push(msg); break;
792
791
  }
793
792
  for (const fn of calls) {
@@ -799,11 +798,11 @@ const handleToolsCall = async (msg, options) => {
799
798
  });
800
799
  break;
801
800
  case GEMINI:
802
- input = fn.args;
801
+ input = fn?.functionCall?.args;
803
802
  packMsg = (t, e) => ({
804
803
  functionResponse: {
805
- name: fn.name, response: {
806
- name: fn.name, content: e ? `[Error] ${t}` : t,
804
+ name: fn?.functionCall?.name, response: {
805
+ name: fn?.functionCall?.name, content: e ? `[Error] ${t}` : t,
807
806
  }
808
807
  }
809
808
  });
@@ -816,7 +815,7 @@ const handleToolsCall = async (msg, options) => {
816
815
  });
817
816
  break;
818
817
  }
819
- const name = fn?.function?.name || fn?.name;
818
+ const name = (fn?.function || fn?.functionCall || fn)?.name;
820
819
  if (!name) { continue; }
821
820
  await resp(`\nName: ${name}`);
822
821
  const f = tools.find(x => insensitiveCompare(
@@ -1012,7 +1011,8 @@ const promptClaude = async (content, options = {}) => {
1012
1011
  );
1013
1012
  if (tool_use.length && countToolCalls(toolsResponse) < MAX_TOOL_RECURSION) {
1014
1013
  return await promptClaude(content, {
1015
- ...options, toolsResult: [...options?.toolsResult || [], ...toolsResult], result: toolsResponse,
1014
+ ...options, toolsResult: [...options?.toolsResult || [],
1015
+ ...toolsResult], result: toolsResponse,
1016
1016
  });
1017
1017
  }
1018
1018
  return packGptResp({ text: mergeMsgs(toolsResponse, tool_use) }, options);
@@ -1078,54 +1078,46 @@ const promptGemini = async (content, options = {}) => {
1078
1078
  options.model = genModel;
1079
1079
  const chat = generative.startChat({
1080
1080
  history: [
1081
- ...options?.messages && !options?.attachments?.length
1082
- ? options.messages : [],
1083
- ...options?.toolsResult ? [{
1084
- role: user, parts: buildGeminiMessage(content, options)
1085
- }, options?.toolsResult[0]] : [],
1081
+ ...options?.messages && !options?.attachments?.length ? options.messages : [],
1082
+ ...options?.toolsResult ? [
1083
+ buildGeminiMessage(content, { ...options, history: true }),
1084
+ ...options.toolsResult.slice(0, options.toolsResult.length - 1)
1085
+ ] : []
1086
1086
  ], ...generationConfig(options),
1087
1087
  });
1088
- const resp = await chat[
1089
- options?.stream ? 'sendMessageStream' : 'sendMessage'
1090
- ](options?.toolsResult ?
1091
- options?.toolsResult[1].parts : buildGeminiMessage(content, options));
1092
- let [result, references, functionCalls] = [
1093
- options?.toolsResponse ? `${options?.toolsResponse}\n\n` : '', null, null
1094
- ];
1095
- if (options?.stream) {
1096
- for await (const chunk of resp.stream) {
1097
- functionCalls || (functionCalls = chunk.functionCalls);
1098
- const delta = chunk?.text?.() || '';
1099
- const rfc = packGeminiReferences(
1100
- chunk.candidates[0]?.groundingMetadata?.groundingChunks,
1101
- chunk.candidates[0]?.groundingMetadata?.groundingSupports
1102
- );
1103
- if (delta === '' && !rfc) { continue; }
1104
- result += delta;
1105
- references = rfc;
1106
- await ignoreErrFunc(async () => await options.stream(
1107
- await packGptResp({
1108
- text: () => options?.delta ? delta : result, references,
1109
- }, { ...options, processing: true })
1110
- ), LOG);
1111
- }
1088
+ const resp = await chat.sendMessageStream(
1089
+ options?.toolsResult?.[options?.toolsResult?.length]?.parts
1090
+ || buildGeminiMessage(content, options)
1091
+ );
1092
+ let [result, references, functionCalls]
1093
+ = [options?.result ?? '', null, null];
1094
+ for await (const chunk of resp.stream) {
1095
+ functionCalls || (functionCalls = chunk.functionCalls);
1096
+ const delta = chunk?.text?.() || '';
1097
+ const rfc = packGeminiReferences(
1098
+ chunk.candidates[0]?.groundingMetadata?.groundingChunks,
1099
+ chunk.candidates[0]?.groundingMetadata?.groundingSupports
1100
+ );
1101
+ if (delta === '' && !rfc) { continue; }
1102
+ result += delta;
1103
+ references = rfc;
1104
+ await streamResp({ text: options?.delta ? delta : result }, options);
1112
1105
  }
1113
1106
  const _resp = await resp.response;
1114
- const { toolsResult, toolsResponse } = await handleToolsCall({
1115
- tool_calls: (functionCalls || _resp.functionCalls)()
1116
- }, { ...options, flavor: GEMINI });
1117
- options?.toolsResponse && !options?.stream
1118
- && (_resp.text = [options?.toolsResponse, _resp.text()].join('\n\n'));
1119
- return await (toolsResult.length && !options?.toolsResult ? promptGemini(
1120
- content, { ...options || {}, toolsResult, toolsResponse }
1121
- ) : packGptResp(options?.stream ? {
1122
- _resp, text: () => result, references
1123
- } : {
1124
- ..._resp, references: packGeminiReferences(
1125
- _resp.candidates[0]?.groundingMetadata?.groundingChunks,
1126
- _resp.candidates[0]?.groundingMetadata?.groundingSupports
1127
- )
1128
- }, options));
1107
+ functionCalls = (functionCalls() || _resp.functionCalls() || []).map(x => ({ functionCall: x }));
1108
+ const { toolsResult, toolsResponse } = await handleToolsCall(
1109
+ { role: MODEL, parts: functionCalls },
1110
+ { ...options, result, flavor: GEMINI }
1111
+ );
1112
+ if (functionCalls.length && countToolCalls(toolsResponse) < MAX_TOOL_RECURSION) {
1113
+ return promptGemini(content, {
1114
+ ...options || {}, toolsResult: [...options?.toolsResult || [],
1115
+ ...toolsResult], result: toolsResponse,
1116
+ });
1117
+ }
1118
+ return await packGptResp({
1119
+ text: mergeMsgs(toolsResponse, functionCalls), references,
1120
+ }, options);
1129
1121
  };
1130
1122
 
1131
1123
  const checkEmbeddingInput = async (input, model) => {
package/lib/manifest.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  const manifest = {
2
2
  "name": "utilitas",
3
3
  "description": "Just another common utility for JavaScript.",
4
- "version": "1998.2.36",
4
+ "version": "1998.2.37",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/Leask/utilitas",
7
7
  "main": "index.mjs",
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "utilitas",
3
3
  "description": "Just another common utility for JavaScript.",
4
- "version": "1998.2.36",
4
+ "version": "1998.2.37",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/Leask/utilitas",
7
7
  "main": "index.mjs",