halbot 1994.1.6 → 1994.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -78,7 +78,7 @@ All supported configuration fields:
78
78
  // REQUIRED, string.
79
79
  "telegramToken": "[[Telegram Bot API Token]]",
80
80
 
81
- // Set some of these fields if you need Openrouter's AI models.
81
+ // Set some of these fields if you need Openrouter's LLM/Embedding/STT models.
82
82
  // OPTIONAL, string.
83
83
  "openrouterApiKey": "[[OpenRouter API Key]]",
84
84
  // OPTIONAL, string, default: "*".
@@ -86,7 +86,13 @@ All supported configuration fields:
86
86
  // OPTIONAL, integer.
87
87
  "openrouterPriority": "[[Custom OpenRouter Priority]]",
88
88
 
89
- // Set some of these fields if you need OpenAI's ChatGPT, Whisper, Embedding features.
89
+ // Set some of these fields if you need Google's search, imagen, veo, TTS features.
90
+ // OPTIONAL, string.
91
+ "googleApiKey": "[[Google Cloud / Gemini API Key]]",
92
+ // OPTIONAL, string, set if you need Google Search as a tool.
93
+ "googleCx": "[[Google Search Engine ID]]",
94
+
95
+ // Set some of these fields if you need OpenAI's ChatGPT, TTS/STT, Embedding features.
90
96
  // OPTIONAL, string.
91
97
  "openaiApiKey": "[[OpenAI API Key]]",
92
98
  // OPTIONAL, string.
@@ -96,13 +102,7 @@ All supported configuration fields:
96
102
  // OPTIONAL, integer.
97
103
  "openaiPriority": "[[Custom OpenAI Priority]]",
98
104
 
99
- // Set some of these fields if you need Google's search, imagen, veo, TTS, STT, OCR, OBJECT_DETECT, Embedding features.
100
- // OPTIONAL, string.
101
- "googleApiKey": "[[Google Cloud / Gemini API Key]]",
102
- // OPTIONAL, string, set if you need Google Search as a tool.
103
- "googleCx": "[[Google Search Engine ID]]",
104
-
105
- // Set some of these fields if you need SiliconFlow's DeepSearch features.
105
+ // Set some of these fields if you need SiliconFlow's AI models.
106
106
  // OPTIONAL, string.
107
107
  "siliconflowApiKey": "[[SiliconFlow API Key]]",
108
108
  // OPTIONAL, string.
package/index.mjs CHANGED
@@ -13,34 +13,36 @@ const init = async (options = {}) => {
13
13
  const info = bot.lines([
14
14
  `[${hal.EMOJI_BOT} ${pkg.title}](${pkg.homepage})`, pkg.description
15
15
  ]);
16
- // use AI vision, AI stt if ChatGPT or Gemini is enabled
16
+ // use AI vision, AI stt if OpenRouter, Gemini or OpenAI is enabled
17
17
  if (options.openrouterApiKey
18
18
  || options.openaiApiKey || options.googleApiKey) {
19
19
  vision.read = alan.distillFile;
20
20
  vision.see = alan.distillFile;
21
21
  _speech?.stt || (_speech.stt = alan.distillFile);
22
22
  }
23
- // use openai's dall-e, embedding, tts if openai is enabled
24
- if (options.openaiApiKey) {
25
- opts = { provider: 'OPENAI', apiKey: options.openaiApiKey };
26
- await gen.init(opts);
27
- if (!_embedding) {
28
- await embedding.init(opts);
29
- _embedding = embedding.embed;
30
- }
31
- if (!_speech.tts) {
32
- await speech.init({ ...opts, ...speechOptions });
33
- _speech.tts = speech.tts;
34
- }
23
+ // use embedding if OpenRouter is enabled
24
+ if (options.openrouterApiKey && !_embedding) {
25
+ await embedding.init({
26
+ provider: 'OPENROUTER', apiKey: options.openrouterApiKey,
27
+ });
28
+ _embedding = embedding.embed;
35
29
  }
36
- // use google's imagen, veo, search if google is enabled
37
- // use google's embedding, tts if google is enabled and ChatGPT is not
30
+ // use google's imagen, veo, search, tts if google is enabled
38
31
  if (options.googleApiKey) {
39
32
  opts = { provider: 'GOOGLE', apiKey: options.googleApiKey };
40
33
  await gen.init(opts);
41
34
  options.googleCx && await web.initSearch({
42
35
  ...opts, cx: options.googleCx,
43
36
  });
37
+ if (!_speech.tts) {
38
+ await speech.init({ ...opts, ...speechOptions });
39
+ _speech.tts = speech.tts;
40
+ }
41
+ }
42
+ // use openai's dall-e, embedding, tts if openai is enabled, and google is not
43
+ if (options.openaiApiKey) {
44
+ opts = { provider: 'OPENAI', apiKey: options.openaiApiKey };
45
+ await gen.init(opts);
44
46
  if (!_embedding) {
45
47
  await embedding.init(opts);
46
48
  _embedding = embedding.embed;
package/lib/hal.mjs CHANGED
@@ -87,12 +87,12 @@ const initSql = {
87
87
  response_text TEXT NOT NULL,
88
88
  collected TEXT NOT NULL,
89
89
  distilled TEXT NOT NULL,
90
- distilled_vector VECTOR(1536) NOT NULL,
90
+ distilled_vector VECTOR(768) NOT NULL,
91
91
  created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
92
92
  updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
93
93
  )`)
94
94
  ], [
95
- `CREATE INDEX IF NOT EXISTS ${table}hal_id_index ON ${table} (bot_id)`,
95
+ `CREATE INDEX IF NOT EXISTS ${table}_bot_id_index ON ${table} (bot_id)`,
96
96
  ], [
97
97
  `CREATE INDEX IF NOT EXISTS ${table}_chat_id_index ON ${table} (chat_id)`,
98
98
  ], [
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "halbot",
3
3
  "description": "Just another AI powered Telegram bot, which is simple design, easy to use, extendable and fun.",
4
- "version": "1994.1.6",
4
+ "version": "1994.1.8",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/Leask/halbot",
7
7
  "type": "module",
@@ -31,11 +31,10 @@
31
31
  "dependencies": {
32
32
  "@ffmpeg-installer/ffmpeg": "^1.1.0",
33
33
  "@ffprobe-installer/ffprobe": "^2.1.2",
34
- "@google-cloud/speech": "^7.2.1",
35
- "@google-cloud/text-to-speech": "^6.4.0",
36
34
  "@google/genai": "^1.31.0",
37
35
  "@mozilla/readability": "^0.6.0",
38
36
  "fluent-ffmpeg": "^2.1.3",
37
+ "google-gax": "^5.0.6",
39
38
  "ioredis": "^5.8.2",
40
39
  "js-tiktoken": "^1.0.21",
41
40
  "jsdom": "^27.2.0",
@@ -48,7 +47,7 @@
48
47
  "pgvector": "^0.2.1",
49
48
  "telegraf": "^4.16.3",
50
49
  "tesseract.js": "^6.0.1",
51
- "utilitas": "^2000.3.23",
50
+ "utilitas": "^2000.3.26",
52
51
  "youtube-transcript": "^1.2.1"
53
52
  }
54
53
  }
@@ -1,10 +1,10 @@
1
1
  import { bot, storage } from '../index.mjs';
2
2
 
3
- const GEMINI = 'GEMINI';
3
+ const GOOGLE = 'GOOGLE';
4
4
  const types = { image: 'photo', video: 'video' };
5
5
 
6
6
  const action = async (ctx, next) => {
7
- let [provider, func, reference] = [GEMINI, 'image', null];
7
+ let [provider, func, reference] = [GOOGLE, 'image', null];
8
8
  switch (ctx.cmd.cmd) {
9
9
  case 'fantasy': func = 'video'; break;
10
10
  case 'gptimage':