utilitas 1998.2.7 → 1998.2.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +28 -13
- package/lib/manifest.mjs +1 -1
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -543,11 +543,15 @@ const packResp = async (resp, options) => {
|
|
|
543
543
|
// DeepSeek R1 {
|
|
544
544
|
let lines = (richText || txt).split('\n');
|
|
545
545
|
const indexOfEnd = lines.indexOf(THINK_END);
|
|
546
|
-
if (lines[0] === THINK_STR
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
546
|
+
if (lines[0] === THINK_STR) {
|
|
547
|
+
if (indexOfEnd === -1) {
|
|
548
|
+
lines.shift();
|
|
549
|
+
} else {
|
|
550
|
+
lines[0] = MD_CODE + THINK;
|
|
551
|
+
lines[indexOfEnd] = MD_CODE;
|
|
552
|
+
lines.slice(1, indexOfEnd).join('').trim()
|
|
553
|
+
|| (lines = lines.slice(indexOfEnd + 1));
|
|
554
|
+
}
|
|
551
555
|
richText = lines.join('\n').trim();
|
|
552
556
|
}
|
|
553
557
|
// }
|
|
@@ -1083,9 +1087,15 @@ const getMaxChatPromptLimit = (options) => {
|
|
|
1083
1087
|
|
|
1084
1088
|
const distillFile = async (attachments, o) => {
|
|
1085
1089
|
const strPmt = o?.prompt || (
|
|
1086
|
-
'You are an intelligent document
|
|
1087
|
-
+ '
|
|
1088
|
-
+
|
|
1090
|
+
'You are an intelligent document analyzer.'
|
|
1091
|
+
+ ' You will receive various multimedia files, including images, audio, and videos.'
|
|
1092
|
+
+ ' Please analyze these documents, extract the information, and organize it into an easy-to-read format.'
|
|
1093
|
+
+ ' For document-type files or image files primarily containing text information, act as a document scanner, return the text content, and describe any images and tables present.'
|
|
1094
|
+
+ ' For audio files, please provide a transcript of the spoken voices. If there are background noises or music, attempt to briefly describe the environmental sounds and music sections.'
|
|
1095
|
+
+ ' For images or video files that are not primarily text-based, describe the tragic scene you observe, highlight key details, convey the emotional tone of the setting, and share your impressions.'
|
|
1096
|
+
+ ' For video files, please describe the content, including the theme, subjects, characters, scenes, objects, storyline, and emotional tone.'
|
|
1097
|
+
+ ' Please RETURN ONLY your analysis results without including your thought process or other unrelated information.'
|
|
1098
|
+
+ (o.summarize ? ' Please organize the key content of this document, systematically present the key information it contains in a concise summary, and remove any unimportant filler content.' : '')
|
|
1089
1099
|
+ (o.toLanguage ? ` Please return the results in ${o.toLanguage}.` : '')
|
|
1090
1100
|
+ (o.keepPaging ? '' : ' If the document has multiple pages, merge them into one page. Please do not return any paging information.')
|
|
1091
1101
|
+ (o.keepDecoration ? '' : ' If the document has headers, footers, or watermarks, please ignore them.')
|
|
@@ -1103,12 +1113,16 @@ const distillFile = async (attachments, o) => {
|
|
|
1103
1113
|
}
|
|
1104
1114
|
attachments = await Promise.all(attachments);
|
|
1105
1115
|
// print(attachments);
|
|
1106
|
-
return await prompt(strPmt, {
|
|
1116
|
+
return await prompt(strPmt, {
|
|
1117
|
+
fast: true, multimodal: true, ...o, attachments,
|
|
1118
|
+
});
|
|
1107
1119
|
};
|
|
1108
1120
|
|
|
1109
1121
|
const prompt = async (input, options) => {
|
|
1110
1122
|
let egn = options?.engine && unifyEngine(options);
|
|
1111
|
-
|
|
1123
|
+
const engines = PREFERRED_ENGINES.slice();
|
|
1124
|
+
options?.multimodal && engines.sort((x, y) => x.multimodal - y.multimodal);
|
|
1125
|
+
for (const engine of engines) {
|
|
1112
1126
|
if ((egn ? engine.client === egn : true) && clients[engine.client]) {
|
|
1113
1127
|
const extra = {};
|
|
1114
1128
|
if (engine.client === OPENAI) {
|
|
@@ -1171,9 +1185,10 @@ const analyzeSessions = async (sessionIds, options) => {
|
|
|
1171
1185
|
};
|
|
1172
1186
|
|
|
1173
1187
|
const PREFERRED_ENGINES = [
|
|
1174
|
-
{ client: OPENAI, func: promptChatGPT },
|
|
1175
|
-
{ client: GEMINI, func: promptGemini },
|
|
1176
|
-
{ client: CLAUDE, func: promptClaude },
|
|
1188
|
+
{ client: OPENAI, func: promptChatGPT, multimodal: 1 },
|
|
1189
|
+
{ client: GEMINI, func: promptGemini, multimodal: 0 },
|
|
1190
|
+
{ client: CLAUDE, func: promptClaude, multimodal: 2 },
|
|
1191
|
+
{ client: OLLAMA, func: promptOllama, multimodal: 99 },
|
|
1177
1192
|
]; // keep gpt first to avoid gemini grounding by default
|
|
1178
1193
|
|
|
1179
1194
|
export default init;
|
package/lib/manifest.mjs
CHANGED