halbot 1994.1.8 → 1995.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +0 -4
- package/bin/halbot.mjs +1 -1
- package/index.mjs +55 -64
- package/lib/hal.mjs +210 -938
- package/package.json +6 -6
- package/pipeline/010_broca.mjs +261 -0
- package/pipeline/020_cmd.mjs +77 -0
- package/pipeline/030_echo.mjs +70 -0
- package/pipeline/040_help.mjs +49 -0
- package/pipeline/050_auth.mjs +30 -0
- package/pipeline/060_config.mjs +84 -0
- package/pipeline/070_collect.mjs +115 -0
- package/pipeline/080_history.mjs +111 -0
- package/pipeline/090_ai.mjs +77 -0
- package/pipeline/100_chat.mjs +65 -0
- package/skills/-8845_thread.mjs +0 -185
- package/skills/10_ai.mjs +0 -97
- package/skills/20_instant.mjs +0 -25
- package/skills/30_wording.mjs +0 -71
- package/skills/40_dream.mjs +0 -61
- package/skills/50_prompt.mjs +0 -60
- package/skills/60_prepare.mjs +0 -61
- package/skills/70_chat.mjs +0 -101
package/skills/30_wording.mjs
DELETED
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
import { bot, hal } from '../index.mjs';
|
|
2
|
-
|
|
3
|
-
const execPrompt = (ctx, arrLines) => ctx.collect((ctx.context = {
|
|
4
|
-
cmd: ctx.cmd.cmd, prompt: bot.lines(arrLines),
|
|
5
|
-
}).prompt);
|
|
6
|
-
|
|
7
|
-
// Inspired by:
|
|
8
|
-
// https://github.com/yetone/bob-plugin-openai-translator/blob/main/src/main.js
|
|
9
|
-
const promptTranslate = (ctx, lang) => execPrompt(ctx, [
|
|
10
|
-
'You are a translation engine that can only translate text and cannot interpret it.',
|
|
11
|
-
`Translate all the following text I send to you to ${lang}.`
|
|
12
|
-
]);
|
|
13
|
-
|
|
14
|
-
// Inspired by:
|
|
15
|
-
// https://github.com/yetone/bob-plugin-openai-polisher/blob/main/src/main.js
|
|
16
|
-
const promptPolish = ctx => execPrompt(ctx, [
|
|
17
|
-
'Revise the following sentences to make them more clear, concise, and coherent.',
|
|
18
|
-
'Please note that you need to list the changes and briefly explain why.',
|
|
19
|
-
]);
|
|
20
|
-
|
|
21
|
-
const action = async (ctx, next) => {
|
|
22
|
-
switch (ctx.cmd.cmd) {
|
|
23
|
-
case 'lang':
|
|
24
|
-
if (!ctx.cmd.args) {
|
|
25
|
-
return await ctx.ok('Please specify a language.');
|
|
26
|
-
}
|
|
27
|
-
const cnf = {
|
|
28
|
-
...ctx.session.config = {
|
|
29
|
-
...ctx.session.config,
|
|
30
|
-
...ctx.config = {
|
|
31
|
-
lang: ctx.cmd.args,
|
|
32
|
-
hello: `Please reply in ${ctx.cmd.args}. Hello!`,
|
|
33
|
-
},
|
|
34
|
-
}
|
|
35
|
-
};
|
|
36
|
-
Object.keys(ctx.config).map(x => cnf[x] += ' <-- SET');
|
|
37
|
-
ctx.result = hal.map(cnf);
|
|
38
|
-
ctx.hello();
|
|
39
|
-
break;
|
|
40
|
-
case 'to': promptTranslate(ctx, ctx.cmd.args || ctx.session.config?.lang || ctx._.lang); break;
|
|
41
|
-
case 'polish': promptPolish(ctx); break;
|
|
42
|
-
case 'toen': promptTranslate(ctx, 'English'); break;
|
|
43
|
-
case 'tofr': promptTranslate(ctx, 'French'); break;
|
|
44
|
-
case 'tozht': promptTranslate(ctx, 'Traditional Chinese'); break;
|
|
45
|
-
case 'tozhs': promptTranslate(ctx, 'Simplified Chinese'); break;
|
|
46
|
-
}
|
|
47
|
-
await next();
|
|
48
|
-
};
|
|
49
|
-
|
|
50
|
-
export const { name, run, priority, func, cmds, help } = {
|
|
51
|
-
name: 'Wording',
|
|
52
|
-
run: true,
|
|
53
|
-
priority: 30,
|
|
54
|
-
func: action,
|
|
55
|
-
help: bot.lines([
|
|
56
|
-
'¶ Set your default language.',
|
|
57
|
-
'Example 1: /lang Français',
|
|
58
|
-
'¶ Prompt the AI engine to translate or polish your text.',
|
|
59
|
-
"Fallback order: `TO_LANG`, `config.lang`, `bot's lang`, `English`.",
|
|
60
|
-
'Example 2: /translate Chinese',
|
|
61
|
-
]),
|
|
62
|
-
cmds: {
|
|
63
|
-
lang: 'Set your default language: /lang `LANG`',
|
|
64
|
-
translate: 'Translate your text to any language: /translate `TO_LANG`',
|
|
65
|
-
polish: 'Polish your text.',
|
|
66
|
-
toen: 'Translate your text to English.',
|
|
67
|
-
tofr: 'Translate your text to French.',
|
|
68
|
-
tozht: 'Translate your text to Traditional Chinese.',
|
|
69
|
-
tozhs: 'Translate your text to Simplified Chinese.',
|
|
70
|
-
},
|
|
71
|
-
};
|
package/skills/40_dream.mjs
DELETED
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
import { bot, storage } from '../index.mjs';
|
|
2
|
-
|
|
3
|
-
const GOOGLE = 'GOOGLE';
|
|
4
|
-
const types = { image: 'photo', video: 'video' };
|
|
5
|
-
|
|
6
|
-
const action = async (ctx, next) => {
|
|
7
|
-
let [provider, func, reference] = [GOOGLE, 'image', null];
|
|
8
|
-
switch (ctx.cmd.cmd) {
|
|
9
|
-
case 'fantasy': func = 'video'; break;
|
|
10
|
-
case 'gptimage':
|
|
11
|
-
provider = 'OPENAI';
|
|
12
|
-
reference = ctx.collected.filter(x => [
|
|
13
|
-
storage.MIME_JPEG, storage.MIME_PNG, storage.MIME_WEBP
|
|
14
|
-
].includes(x?.content?.mime_type)).slice(0, 16).map(
|
|
15
|
-
x => x?.content?.data
|
|
16
|
-
);
|
|
17
|
-
}
|
|
18
|
-
if (!ctx.cmd.args) {
|
|
19
|
-
return await ctx.ok('Please input your prompt.');
|
|
20
|
-
}
|
|
21
|
-
let [objMsg, output] = [(await ctx.ok('💭'))[0], null]; //tts = null
|
|
22
|
-
try {
|
|
23
|
-
output = (await ctx._.gen[func](ctx.cmd.args, {
|
|
24
|
-
provider, expected: 'FILE',
|
|
25
|
-
...reference?.length ? { reference, input: 'BASE64' } : {},
|
|
26
|
-
})) || [];
|
|
27
|
-
} catch (err) {
|
|
28
|
-
return await ctx.er(err.message || `Error generating ${func}.`,
|
|
29
|
-
{ lastMessageId: objMsg.message_id });
|
|
30
|
-
}
|
|
31
|
-
await ctx.deleteMessage(objMsg.message_id);
|
|
32
|
-
await ctx.media(
|
|
33
|
-
output.map(x => ({ type: types[func], src: x.data })),
|
|
34
|
-
{ caption: output[0]?.caption || '' }
|
|
35
|
-
);
|
|
36
|
-
// tts = output.tts || '';
|
|
37
|
-
// await ctx.shouldSpeech(tts);
|
|
38
|
-
};
|
|
39
|
-
|
|
40
|
-
export const { name, run, priority, func, cmds, help } = {
|
|
41
|
-
name: 'Dream',
|
|
42
|
-
run: true,
|
|
43
|
-
priority: 40,
|
|
44
|
-
func: action,
|
|
45
|
-
help: bot.lines([
|
|
46
|
-
'¶ Use Google `Imagen` (default) or OpenAI `GPT Image` to generate images.',
|
|
47
|
-
'Example 1: /dream a cat in a rocket',
|
|
48
|
-
'¶ Use Google `Veo` to generate videos.',
|
|
49
|
-
'Example 2: /fantasy two cats are kissing each other',
|
|
50
|
-
'¶ Use `Imagen` to generate images.',
|
|
51
|
-
'Example 3: /imagen a cat in a car',
|
|
52
|
-
'¶ Use `GPT Image` to generate images.',
|
|
53
|
-
'Example 4: /gptimage a cat on a bike',
|
|
54
|
-
]),
|
|
55
|
-
cmds: {
|
|
56
|
-
dream: 'Generate images with default model: /dream `PROMPT`',
|
|
57
|
-
fantasy: 'Generate videos with `Veo`: /fantasy `PROMPT`',
|
|
58
|
-
imagen: 'Generate images with `Imagen`: /imagen `PROMPT`',
|
|
59
|
-
gptimage: 'Generate images with `GPT Image`: /gptimage `PROMPT`',
|
|
60
|
-
},
|
|
61
|
-
};
|
package/skills/50_prompt.mjs
DELETED
|
@@ -1,60 +0,0 @@
|
|
|
1
|
-
import { bot, hal, utilitas } from '../index.mjs';
|
|
2
|
-
|
|
3
|
-
const action = async (ctx, next) => {
|
|
4
|
-
ctx.session.prompts || (ctx.session.prompts = {});
|
|
5
|
-
const cmd = ctx.cmd?.cmd;
|
|
6
|
-
switch (cmd) {
|
|
7
|
-
case 'prompts':
|
|
8
|
-
const prompts = hal.lines2(Object.keys(ctx.session.prompts || {}).map(
|
|
9
|
-
x => bot.lines([`- /${x}`, ctx.session.prompts[x]])
|
|
10
|
-
));
|
|
11
|
-
return await ctx.ok(prompts || 'No custom prompts.');
|
|
12
|
-
case 'add':
|
|
13
|
-
const arrText = (ctx.cmd.args || '').split('\n');
|
|
14
|
-
const subArrText = arrText[0].split('>');
|
|
15
|
-
const _cmd = utilitas.ensureString(
|
|
16
|
-
subArrText[0], { case: 'SNAKE' }
|
|
17
|
-
).slice(0, hal.MAX_MENU_LENGTH);
|
|
18
|
-
const _prompt = bot.lines([
|
|
19
|
-
subArrText.slice(1).join(' '), ...arrText.slice(1)
|
|
20
|
-
]).trim();
|
|
21
|
-
if (_cmd && _prompt) {
|
|
22
|
-
ctx.session.prompts[_cmd] = _prompt;
|
|
23
|
-
await ctx.ok(`Prompt added: /${_cmd}`);
|
|
24
|
-
} else {
|
|
25
|
-
await ctx.ok('Invalid command or prompt.');
|
|
26
|
-
}
|
|
27
|
-
return;
|
|
28
|
-
case 'del':
|
|
29
|
-
if (ctx.session.prompts[ctx.cmd.args]) {
|
|
30
|
-
delete ctx.session.prompts[ctx.cmd.args];
|
|
31
|
-
await ctx.complete();
|
|
32
|
-
} else {
|
|
33
|
-
await ctx.ok('Prompt not found.');
|
|
34
|
-
}
|
|
35
|
-
return;
|
|
36
|
-
default:
|
|
37
|
-
const prompt = ctx.session.prompts?.[cmd] || ctx._.prompts?.[cmd]?.prompt;
|
|
38
|
-
!ctx.context && prompt && (ctx.context = { cmd, prompt });
|
|
39
|
-
ctx.context && await ctx.clear(ctx.context);
|
|
40
|
-
}
|
|
41
|
-
await next();
|
|
42
|
-
};
|
|
43
|
-
|
|
44
|
-
export const { name, run, priority, func, help, cmds, cmdx } = {
|
|
45
|
-
name: 'Prompt',
|
|
46
|
-
run: true,
|
|
47
|
-
priority: 50,
|
|
48
|
-
func: action,
|
|
49
|
-
help: bot.lines([
|
|
50
|
-
'¶ Maintain custom prompts.',
|
|
51
|
-
'Example 1: /add `code` > `Code with me.`',
|
|
52
|
-
'Example 2: /del `code`',
|
|
53
|
-
]),
|
|
54
|
-
cmds: {
|
|
55
|
-
prompts: 'List all custom prompts.',
|
|
56
|
-
add: 'Add or edit a custom prompt: /add `COMMAND` > `PROMPT`.',
|
|
57
|
-
del: 'Delete a custom prompt: /del `COMMAND`.',
|
|
58
|
-
},
|
|
59
|
-
cmdx: {},
|
|
60
|
-
};
|
package/skills/60_prepare.mjs
DELETED
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
import { alan, hal, utilitas } from '../index.mjs';
|
|
2
|
-
|
|
3
|
-
const checkUnsupportedMimeType = async ctx => {
|
|
4
|
-
ctx.carry.attachments = [];
|
|
5
|
-
const ais = await alan.getAi(null, { all: true });
|
|
6
|
-
for (const x of ctx.collected.filter(x => x.type === 'PROMPT')) {
|
|
7
|
-
let notSupported = false;
|
|
8
|
-
ctx.selectedAi.map(y => {
|
|
9
|
-
const ai = ais.find(z => z.id === y);
|
|
10
|
-
if (![
|
|
11
|
-
...ai.model.supportedMimeTypes,
|
|
12
|
-
...ai.model.supportedDocTypes,
|
|
13
|
-
...ai.model.supportedAudioTypes,
|
|
14
|
-
].includes(x?.content?.mime_type)) { notSupported = true; }
|
|
15
|
-
});
|
|
16
|
-
notSupported ? await x.content.analyze() : ctx.carry.attachments.push({
|
|
17
|
-
...x.content, analyze: undefined,
|
|
18
|
-
});
|
|
19
|
-
}
|
|
20
|
-
};
|
|
21
|
-
|
|
22
|
-
const action = async (ctx, next) => {
|
|
23
|
-
// avatar
|
|
24
|
-
if (ctx.result) {
|
|
25
|
-
ctx.avatar = '⚙️';
|
|
26
|
-
} else if (ctx.m?.voice) {
|
|
27
|
-
ctx.avatar = hal.EMOJI_SPEECH; ctx.result = utilitas.trim(ctx.txt);
|
|
28
|
-
} else if (ctx.m?.data) {
|
|
29
|
-
ctx.avatar = '🔘'; ctx.result = utilitas.trim(ctx.txt);
|
|
30
|
-
} else if (ctx.m?.poll) {
|
|
31
|
-
ctx.avatar = '📊';
|
|
32
|
-
} else if (ctx.cmd?.cmd && !ctx.cmd?.ignored) {
|
|
33
|
-
ctx.avatar = '🚀'; ctx.result = utilitas.trim(ctx.txt);
|
|
34
|
-
} else {
|
|
35
|
-
ctx.avatar = '😸';
|
|
36
|
-
}
|
|
37
|
-
// prompt
|
|
38
|
-
await checkUnsupportedMimeType(ctx);
|
|
39
|
-
const maxInputTokens = await alan.getChatPromptLimit()
|
|
40
|
-
- await alan.getChatAttachmentCost() * ctx.carry.attachments.length;
|
|
41
|
-
const additionInfo = ctx.collected.filter(
|
|
42
|
-
x => String.isString(x.content)
|
|
43
|
-
).map(x => x.content).join('\n').split(' ').filter(x => x);
|
|
44
|
-
ctx.prompt = (ctx.txt || '') + '\n\n';
|
|
45
|
-
while (await alan.countTokens(
|
|
46
|
-
`${ctx.prompt}${additionInfo?.[0] || ''}`
|
|
47
|
-
) < maxInputTokens && additionInfo.length) {
|
|
48
|
-
ctx.prompt += `${additionInfo.shift()} `;
|
|
49
|
-
}
|
|
50
|
-
ctx.prompt = utilitas.trim(ctx.prompt);
|
|
51
|
-
additionInfo.filter(x => x).length && (ctx.prompt += '...');
|
|
52
|
-
// next
|
|
53
|
-
await next();
|
|
54
|
-
};
|
|
55
|
-
|
|
56
|
-
export const { name, run, priority, func } = {
|
|
57
|
-
name: 'Prepare',
|
|
58
|
-
run: true,
|
|
59
|
-
priority: 60,
|
|
60
|
-
func: action,
|
|
61
|
-
};
|
package/skills/70_chat.mjs
DELETED
|
@@ -1,101 +0,0 @@
|
|
|
1
|
-
import { alan, utilitas } from '../index.mjs';
|
|
2
|
-
|
|
3
|
-
const onProgress = { onProgress: true };
|
|
4
|
-
const LN2 = '\n\n';
|
|
5
|
-
const [joinL1, joinL2] = [a => a.join(LN2), a => a.join(LN2)];
|
|
6
|
-
const log = content => utilitas.log(content, import.meta.url);
|
|
7
|
-
|
|
8
|
-
const action = async (ctx, next) => {
|
|
9
|
-
if (!ctx.prompt && !ctx.carry.attachments.length) { return await next(); }
|
|
10
|
-
let [
|
|
11
|
-
ais, YOU, msgs, pms, extra, lock, sResp, lastMsg, lastSent, references,
|
|
12
|
-
audio,
|
|
13
|
-
] = [
|
|
14
|
-
await alan.getAi(null, { all: true }), `${ctx.avatar} You:`, {}, [],
|
|
15
|
-
{ buttons: [] }, 1000 * 5, null, null, 0, null, null,
|
|
16
|
-
];
|
|
17
|
-
const packMsg = options => {
|
|
18
|
-
const said = !options?.tts && ctx.result ? ctx.result : '';
|
|
19
|
-
const packed = [
|
|
20
|
-
...ctx.carry?.threadInfo, ...said ? [joinL2([YOU, said])] : [],
|
|
21
|
-
];
|
|
22
|
-
const pure = [];
|
|
23
|
-
ctx.selectedAi.map(n => {
|
|
24
|
-
const content = msgs[n]?.[options?.tts ? 'spoken' : 'text'] || '';
|
|
25
|
-
pure.push(content);
|
|
26
|
-
const ai = ais.find(x => x.id === n);
|
|
27
|
-
const aiName = ai.name.replace(
|
|
28
|
-
/^(.*\().*(\))$/,
|
|
29
|
-
`$1${msgs[n]?.model.replace(/^[^\/]*\//, '')}$2`
|
|
30
|
-
);
|
|
31
|
-
packed.push(joinL2([
|
|
32
|
-
...options?.tts ? [] : [`${aiName}:`], content
|
|
33
|
-
]));
|
|
34
|
-
});
|
|
35
|
-
return pure.join('').trim().length ? joinL1(packed) : '';
|
|
36
|
-
};
|
|
37
|
-
const ok = async options => {
|
|
38
|
-
const [curTime, curMsg] = [Date.now(), packMsg(options)];
|
|
39
|
-
if (options?.onProgress && (
|
|
40
|
-
curTime - lastSent < ctx.limit || lastMsg === curMsg
|
|
41
|
-
)) { return; }
|
|
42
|
-
[lastSent, lastMsg] = [curTime + lock, curMsg];
|
|
43
|
-
const cmd = ctx.session.context?.cmd;
|
|
44
|
-
if (options?.final) {
|
|
45
|
-
(references?.links || []).map((x, i) => extra.buttons.push({
|
|
46
|
-
label: `${i + 1}. ${x.title}`, url: x.uri,
|
|
47
|
-
}));
|
|
48
|
-
cmd && (extra.buttons.push({
|
|
49
|
-
label: `❎ End context: \`${cmd}\``, text: '/clear',
|
|
50
|
-
}));
|
|
51
|
-
}
|
|
52
|
-
sResp = await ctx.ok(curMsg, {
|
|
53
|
-
...ctx.carry.keyboards ? { keyboards: ctx.carry.keyboards } : {},
|
|
54
|
-
md: true, ...extra, ...options || {},
|
|
55
|
-
});
|
|
56
|
-
lastSent = curTime;
|
|
57
|
-
return sResp;
|
|
58
|
-
};
|
|
59
|
-
ctx.carry.threadInfo.length || await ok(onProgress);
|
|
60
|
-
for (const n of ctx.selectedAi) {
|
|
61
|
-
pms.push((async ai => {
|
|
62
|
-
try {
|
|
63
|
-
const resp = await alan.talk(ctx.prompt || alan.ATTACHMENTS, {
|
|
64
|
-
aiId: ai, ...ctx.carry, stream: async r => {
|
|
65
|
-
msgs[ai] = r;
|
|
66
|
-
ctx.carry.threadInfo.length || ok(onProgress);
|
|
67
|
-
},
|
|
68
|
-
});
|
|
69
|
-
references = resp.references;
|
|
70
|
-
audio = resp.audio;
|
|
71
|
-
msgs[ai] = resp;
|
|
72
|
-
msgs[ai].spoken = ctx.selectedAi.length === 1
|
|
73
|
-
&& !resp.text.split('\n').some(x => /^\s*```/.test(x))
|
|
74
|
-
? resp.spoken : null;
|
|
75
|
-
for (let img of resp?.images || []) {
|
|
76
|
-
await ctx.image(img.data, { caption: `🎨 by ${resp.model}` });
|
|
77
|
-
await ctx.timeout();
|
|
78
|
-
}
|
|
79
|
-
return resp;
|
|
80
|
-
} catch (err) {
|
|
81
|
-
msgs[ai] = {
|
|
82
|
-
...msgs[ai], text: `⚠️ ${err?.message || err}`,
|
|
83
|
-
spoken: null,
|
|
84
|
-
};
|
|
85
|
-
log(err);
|
|
86
|
-
}
|
|
87
|
-
})(n));
|
|
88
|
-
}
|
|
89
|
-
await Promise.all(pms);
|
|
90
|
-
await (Object.values(msgs).map(x => x.text).join('').trim()
|
|
91
|
-
? ok({ final: true }) : ctx.deleteMessage(sResp[0].message_id));
|
|
92
|
-
ctx.tts = audio || packMsg({ tts: true });
|
|
93
|
-
await next();
|
|
94
|
-
};
|
|
95
|
-
|
|
96
|
-
export const { name, run, priority, func } = {
|
|
97
|
-
name: 'Chat',
|
|
98
|
-
run: true,
|
|
99
|
-
priority: 70,
|
|
100
|
-
func: action,
|
|
101
|
-
};
|