chibi-bot 1.6.0b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chibi/__init__.py +0 -0
- chibi/__main__.py +343 -0
- chibi/cli.py +90 -0
- chibi/config/__init__.py +6 -0
- chibi/config/app.py +123 -0
- chibi/config/gpt.py +108 -0
- chibi/config/logging.py +15 -0
- chibi/config/telegram.py +43 -0
- chibi/config_generator.py +233 -0
- chibi/constants.py +362 -0
- chibi/exceptions.py +58 -0
- chibi/models.py +496 -0
- chibi/schemas/__init__.py +0 -0
- chibi/schemas/anthropic.py +20 -0
- chibi/schemas/app.py +54 -0
- chibi/schemas/cloudflare.py +65 -0
- chibi/schemas/mistralai.py +56 -0
- chibi/schemas/suno.py +83 -0
- chibi/service.py +135 -0
- chibi/services/bot.py +276 -0
- chibi/services/lock_manager.py +20 -0
- chibi/services/mcp/manager.py +242 -0
- chibi/services/metrics.py +54 -0
- chibi/services/providers/__init__.py +16 -0
- chibi/services/providers/alibaba.py +79 -0
- chibi/services/providers/anthropic.py +40 -0
- chibi/services/providers/cloudflare.py +98 -0
- chibi/services/providers/constants/suno.py +2 -0
- chibi/services/providers/customopenai.py +11 -0
- chibi/services/providers/deepseek.py +15 -0
- chibi/services/providers/eleven_labs.py +85 -0
- chibi/services/providers/gemini_native.py +489 -0
- chibi/services/providers/grok.py +40 -0
- chibi/services/providers/minimax.py +96 -0
- chibi/services/providers/mistralai_native.py +312 -0
- chibi/services/providers/moonshotai.py +20 -0
- chibi/services/providers/openai.py +74 -0
- chibi/services/providers/provider.py +892 -0
- chibi/services/providers/suno.py +130 -0
- chibi/services/providers/tools/__init__.py +23 -0
- chibi/services/providers/tools/cmd.py +132 -0
- chibi/services/providers/tools/common.py +127 -0
- chibi/services/providers/tools/constants.py +78 -0
- chibi/services/providers/tools/exceptions.py +1 -0
- chibi/services/providers/tools/file_editor.py +875 -0
- chibi/services/providers/tools/mcp_management.py +274 -0
- chibi/services/providers/tools/mcp_simple.py +72 -0
- chibi/services/providers/tools/media.py +451 -0
- chibi/services/providers/tools/memory.py +252 -0
- chibi/services/providers/tools/schemas.py +10 -0
- chibi/services/providers/tools/send.py +435 -0
- chibi/services/providers/tools/tool.py +163 -0
- chibi/services/providers/tools/utils.py +146 -0
- chibi/services/providers/tools/web.py +261 -0
- chibi/services/providers/utils.py +182 -0
- chibi/services/task_manager.py +93 -0
- chibi/services/user.py +269 -0
- chibi/storage/abstract.py +54 -0
- chibi/storage/database.py +86 -0
- chibi/storage/dynamodb.py +257 -0
- chibi/storage/local.py +70 -0
- chibi/storage/redis.py +91 -0
- chibi/utils/__init__.py +0 -0
- chibi/utils/app.py +249 -0
- chibi/utils/telegram.py +521 -0
- chibi_bot-1.6.0b0.dist-info/LICENSE +21 -0
- chibi_bot-1.6.0b0.dist-info/METADATA +340 -0
- chibi_bot-1.6.0b0.dist-info/RECORD +70 -0
- chibi_bot-1.6.0b0.dist-info/WHEEL +4 -0
- chibi_bot-1.6.0b0.dist-info/entry_points.txt +3 -0
chibi/__init__.py
ADDED
|
File without changes
|
chibi/__main__.py
ADDED
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
from contextvars import Context
|
|
2
|
+
from typing import Any, Coroutine, TypeVar
|
|
3
|
+
|
|
4
|
+
from loguru import logger
|
|
5
|
+
from telegram import (
|
|
6
|
+
BotCommand,
|
|
7
|
+
CallbackQuery,
|
|
8
|
+
InlineQueryResultArticle,
|
|
9
|
+
InputTextMessageContent,
|
|
10
|
+
Update,
|
|
11
|
+
constants,
|
|
12
|
+
)
|
|
13
|
+
from telegram.ext import (
|
|
14
|
+
Application,
|
|
15
|
+
ApplicationBuilder,
|
|
16
|
+
CallbackQueryHandler,
|
|
17
|
+
CommandHandler,
|
|
18
|
+
ContextTypes,
|
|
19
|
+
InlineQueryHandler,
|
|
20
|
+
MessageHandler,
|
|
21
|
+
filters,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
from chibi.config import application_settings, gpt_settings, telegram_settings
|
|
25
|
+
from chibi.constants import GROUP_CHAT_TYPES, UserAction, UserContext
|
|
26
|
+
from chibi.schemas.app import ModelChangeSchema
|
|
27
|
+
from chibi.services.bot import (
|
|
28
|
+
handle_available_model_options,
|
|
29
|
+
handle_available_provider_options,
|
|
30
|
+
handle_image_generation,
|
|
31
|
+
handle_model_selection,
|
|
32
|
+
handle_provider_api_key_set,
|
|
33
|
+
handle_reset,
|
|
34
|
+
handle_user_prompt,
|
|
35
|
+
)
|
|
36
|
+
from chibi.services.providers import RegisteredProviders
|
|
37
|
+
from chibi.services.task_manager import task_manager
|
|
38
|
+
from chibi.utils.app import log_application_settings, run_heartbeat
|
|
39
|
+
from chibi.utils.telegram import (
|
|
40
|
+
check_user_allowance,
|
|
41
|
+
current_user_action,
|
|
42
|
+
get_telegram_chat,
|
|
43
|
+
get_telegram_message,
|
|
44
|
+
get_user_context,
|
|
45
|
+
set_user_action,
|
|
46
|
+
set_user_context,
|
|
47
|
+
user_interacts_with_bot,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
_T = TypeVar("_T")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class ChibiBot:
|
|
54
|
+
def __init__(self) -> None:
|
|
55
|
+
self.commands = [
|
|
56
|
+
BotCommand(command="help", description="Show this help message"),
|
|
57
|
+
BotCommand(
|
|
58
|
+
command="ask",
|
|
59
|
+
description=(
|
|
60
|
+
"Ask me any question (in group chat, for example) (e.g. /ask which program language is the best?)"
|
|
61
|
+
),
|
|
62
|
+
),
|
|
63
|
+
BotCommand(
|
|
64
|
+
command="reset",
|
|
65
|
+
description="Reset your conversation history (will reduce prompt and save some tokens)",
|
|
66
|
+
),
|
|
67
|
+
]
|
|
68
|
+
if not application_settings.hide_imagine:
|
|
69
|
+
self.commands.append(
|
|
70
|
+
BotCommand(command="imagine", description="Generate image from prompt"),
|
|
71
|
+
)
|
|
72
|
+
self.commands.append(BotCommand(command="image_models", description="Select image generation model"))
|
|
73
|
+
if not application_settings.hide_models:
|
|
74
|
+
self.commands.append(BotCommand(command="gpt_models", description="Select GPT model"))
|
|
75
|
+
|
|
76
|
+
if gpt_settings.public_mode:
|
|
77
|
+
self.commands.append(
|
|
78
|
+
BotCommand(
|
|
79
|
+
command="set_api_key",
|
|
80
|
+
description="Set an API key (token) for any of supported providers",
|
|
81
|
+
)
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
def run_task(
|
|
85
|
+
self,
|
|
86
|
+
coro: Coroutine[Any, Any, _T],
|
|
87
|
+
name: str | None = None,
|
|
88
|
+
context: Context | None = None,
|
|
89
|
+
) -> None:
|
|
90
|
+
task_manager.run_task(coro)
|
|
91
|
+
|
|
92
|
+
async def help(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
93
|
+
telegram_message = get_telegram_message(update=update)
|
|
94
|
+
commands = [f"/{command.command} - {command.description}" for command in self.commands]
|
|
95
|
+
commands_desc = "\n".join(commands)
|
|
96
|
+
help_text = (
|
|
97
|
+
f"Hey! My name is {telegram_settings.bot_name}, and I'm your ChatGPT experience provider!\n\n"
|
|
98
|
+
f"{commands_desc}"
|
|
99
|
+
)
|
|
100
|
+
await telegram_message.reply_text(help_text, disable_web_page_preview=True)
|
|
101
|
+
|
|
102
|
+
@check_user_allowance
|
|
103
|
+
async def reset(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
104
|
+
self.run_task(handle_reset(update=update, context=context))
|
|
105
|
+
|
|
106
|
+
async def _handle_message_with_api_key(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
107
|
+
provider_name = get_user_context(context=context, key=UserContext.SELECTED_PROVIDER, expected_type=str)
|
|
108
|
+
if not provider_name:
|
|
109
|
+
return None
|
|
110
|
+
self.run_task(handle_provider_api_key_set(update=update, context=context, provider_name=provider_name))
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
@check_user_allowance
|
|
114
|
+
async def imagine(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
115
|
+
telegram_message = get_telegram_message(update=update)
|
|
116
|
+
assert telegram_message.text
|
|
117
|
+
prompt = telegram_message.text.replace("/imagine", "", 1).strip()
|
|
118
|
+
if prompt:
|
|
119
|
+
self.run_task(handle_image_generation(update=update, context=context, prompt=prompt))
|
|
120
|
+
return None
|
|
121
|
+
set_user_action(context=context, action=UserAction.IMAGINE)
|
|
122
|
+
await telegram_message.reply_text("Ok, now give me an image prompt.")
|
|
123
|
+
|
|
124
|
+
@check_user_allowance
|
|
125
|
+
async def prompt(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
126
|
+
telegram_chat = get_telegram_chat(update=update)
|
|
127
|
+
telegram_message = get_telegram_message(update=update)
|
|
128
|
+
if telegram_message.voice:
|
|
129
|
+
self.run_task(handle_user_prompt(update=update, context=context))
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
prompt = telegram_message.text
|
|
133
|
+
|
|
134
|
+
if not prompt:
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
if current_user_action(context=context) == UserAction.SET_API_KEY:
|
|
138
|
+
set_user_action(context=context, action=UserAction.NONE)
|
|
139
|
+
return await self._handle_message_with_api_key(update=update, context=context)
|
|
140
|
+
|
|
141
|
+
if current_user_action(context=context) == UserAction.IMAGINE:
|
|
142
|
+
self.run_task(handle_image_generation(update=update, context=context, prompt=prompt))
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
if (
|
|
146
|
+
telegram_chat.type in GROUP_CHAT_TYPES
|
|
147
|
+
and telegram_settings.answer_direct_messages_only
|
|
148
|
+
and "/ask" not in prompt
|
|
149
|
+
and not user_interacts_with_bot(update=update, context=context)
|
|
150
|
+
):
|
|
151
|
+
return None
|
|
152
|
+
self.run_task(handle_user_prompt(update=update, context=context))
|
|
153
|
+
|
|
154
|
+
@check_user_allowance
|
|
155
|
+
async def ask(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
156
|
+
self.run_task(handle_user_prompt(update=update, context=context))
|
|
157
|
+
|
|
158
|
+
@check_user_allowance
|
|
159
|
+
async def show_gpt_models_menu(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
160
|
+
telegram_message = get_telegram_message(update=update)
|
|
161
|
+
reply_markup = await handle_available_model_options(update=update, context=context)
|
|
162
|
+
|
|
163
|
+
if active_model := get_user_context(context=context, key=UserContext.ACTIVE_MODEL, expected_type=str):
|
|
164
|
+
message = f"Active model: {active_model}. You may select another one from the list below:"
|
|
165
|
+
else:
|
|
166
|
+
message = "Please, select model:"
|
|
167
|
+
set_user_action(context=context, action=UserAction.SELECT_MODEL)
|
|
168
|
+
await telegram_message.reply_text(text=message, reply_markup=reply_markup)
|
|
169
|
+
|
|
170
|
+
@check_user_allowance
|
|
171
|
+
async def show_image_models_menu(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
172
|
+
telegram_message = get_telegram_message(update=update)
|
|
173
|
+
reply_markup = await handle_available_model_options(update=update, context=context, image_generation=True)
|
|
174
|
+
|
|
175
|
+
if active_model := get_user_context(context=context, key=UserContext.ACTIVE_IMAGE_MODEL, expected_type=str):
|
|
176
|
+
message = f"Active model: {active_model}. You may select another one from the list below:"
|
|
177
|
+
else:
|
|
178
|
+
message = "Please, select model:"
|
|
179
|
+
set_user_action(context=context, action=UserAction.SELECT_MODEL)
|
|
180
|
+
await telegram_message.reply_text(text=message, reply_markup=reply_markup)
|
|
181
|
+
|
|
182
|
+
async def show_api_key_set_menu(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
183
|
+
telegram_message = get_telegram_message(update=update)
|
|
184
|
+
reply_markup = await handle_available_provider_options()
|
|
185
|
+
|
|
186
|
+
message = "Please, select a provider:"
|
|
187
|
+
await telegram_message.reply_text(text=message, reply_markup=reply_markup)
|
|
188
|
+
set_user_action(context=context, action=UserAction.SELECT_PROVIDER)
|
|
189
|
+
|
|
190
|
+
async def _compute_model_selection_action(
|
|
191
|
+
self, query: CallbackQuery, update: Update, context: ContextTypes.DEFAULT_TYPE
|
|
192
|
+
) -> None:
|
|
193
|
+
mapped_models = get_user_context(
|
|
194
|
+
context=context,
|
|
195
|
+
key=UserContext.MAPPED_MODELS,
|
|
196
|
+
expected_type=dict[str, ModelChangeSchema],
|
|
197
|
+
)
|
|
198
|
+
await query.answer()
|
|
199
|
+
|
|
200
|
+
if not mapped_models or not query.data:
|
|
201
|
+
await query.delete_message()
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
if query.data == "-1":
|
|
205
|
+
await query.delete_message()
|
|
206
|
+
return None
|
|
207
|
+
|
|
208
|
+
model = mapped_models.get(query.data)
|
|
209
|
+
if not model:
|
|
210
|
+
await query.delete_message()
|
|
211
|
+
return None
|
|
212
|
+
|
|
213
|
+
if model.image_generation:
|
|
214
|
+
set_user_context(context=context, key=UserContext.ACTIVE_IMAGE_MODEL, value=model.name)
|
|
215
|
+
else:
|
|
216
|
+
set_user_context(context=context, key=UserContext.ACTIVE_MODEL, value=model.name)
|
|
217
|
+
self.run_task(
|
|
218
|
+
handle_model_selection(
|
|
219
|
+
update=update,
|
|
220
|
+
context=context,
|
|
221
|
+
model=model,
|
|
222
|
+
query=query,
|
|
223
|
+
)
|
|
224
|
+
)
|
|
225
|
+
set_user_action(context=context, action=UserAction.NONE)
|
|
226
|
+
|
|
227
|
+
async def _compute_provider_selection_action(
|
|
228
|
+
self, query: CallbackQuery, context: ContextTypes.DEFAULT_TYPE
|
|
229
|
+
) -> None:
|
|
230
|
+
await query.answer()
|
|
231
|
+
provider_name = query.data
|
|
232
|
+
if not provider_name or provider_name not in RegisteredProviders.all.keys():
|
|
233
|
+
await query.delete_message()
|
|
234
|
+
return
|
|
235
|
+
set_user_context(context=context, key=UserContext.SELECTED_PROVIDER, value=provider_name)
|
|
236
|
+
await query.edit_message_text(
|
|
237
|
+
text=f"{provider_name} selected.\nNow please send me an API key",
|
|
238
|
+
)
|
|
239
|
+
set_user_action(context=context, action=UserAction.SET_API_KEY)
|
|
240
|
+
|
|
241
|
+
async def handle_selection(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
242
|
+
action = current_user_action(context=context)
|
|
243
|
+
if not action or action == UserAction.NONE:
|
|
244
|
+
return None
|
|
245
|
+
|
|
246
|
+
query = update.callback_query
|
|
247
|
+
if not query:
|
|
248
|
+
return None
|
|
249
|
+
|
|
250
|
+
if action == UserAction.SELECT_MODEL:
|
|
251
|
+
return await self._compute_model_selection_action(query=query, update=update, context=context)
|
|
252
|
+
|
|
253
|
+
if action == UserAction.SELECT_PROVIDER:
|
|
254
|
+
return await self._compute_provider_selection_action(query=query, context=context)
|
|
255
|
+
|
|
256
|
+
@check_user_allowance
|
|
257
|
+
async def inline_query(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
258
|
+
inline_query = update.inline_query
|
|
259
|
+
if not inline_query:
|
|
260
|
+
return
|
|
261
|
+
query = inline_query.query
|
|
262
|
+
results = [
|
|
263
|
+
InlineQueryResultArticle(
|
|
264
|
+
id=query,
|
|
265
|
+
title="Ask ChatGPT",
|
|
266
|
+
input_message_content=InputTextMessageContent(query),
|
|
267
|
+
description=query,
|
|
268
|
+
thumbnail_url="https://github.com/s-nagaev/chibi/raw/main/docs/logo.png",
|
|
269
|
+
)
|
|
270
|
+
]
|
|
271
|
+
|
|
272
|
+
await inline_query.answer(results)
|
|
273
|
+
|
|
274
|
+
async def error_handler(self, update: object, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|
275
|
+
logger.error(f"Error occurred while handling an update: {context.error}")
|
|
276
|
+
|
|
277
|
+
async def post_init(self, application: Application) -> None:
|
|
278
|
+
await application.bot.set_my_commands(self.commands)
|
|
279
|
+
|
|
280
|
+
def run(self) -> None:
|
|
281
|
+
builder = (
|
|
282
|
+
ApplicationBuilder()
|
|
283
|
+
.base_url(telegram_settings.telegram_base_url)
|
|
284
|
+
.base_file_url(telegram_settings.telegram_base_file_url)
|
|
285
|
+
.token(telegram_settings.token)
|
|
286
|
+
.post_init(self.post_init)
|
|
287
|
+
.post_shutdown(task_manager.shutdown)
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
if telegram_settings.proxy:
|
|
291
|
+
builder = builder.proxy(telegram_settings.proxy).get_updates_proxy(telegram_settings.proxy)
|
|
292
|
+
app = builder.build()
|
|
293
|
+
|
|
294
|
+
if not application_settings.hide_imagine:
|
|
295
|
+
app.add_handler(CommandHandler(command="imagine", callback=self.imagine))
|
|
296
|
+
|
|
297
|
+
if not application_settings.hide_models:
|
|
298
|
+
app.add_handler(CommandHandler("gpt_models", self.show_gpt_models_menu))
|
|
299
|
+
app.add_handler(CommandHandler("image_models", self.show_image_models_menu))
|
|
300
|
+
app.add_handler(CallbackQueryHandler(self.handle_selection))
|
|
301
|
+
|
|
302
|
+
if gpt_settings.public_mode:
|
|
303
|
+
app.add_handler(CommandHandler("set_api_key", self.show_api_key_set_menu))
|
|
304
|
+
|
|
305
|
+
app.add_handler(CommandHandler("ask", self.ask))
|
|
306
|
+
app.add_handler(CommandHandler("help", self.help))
|
|
307
|
+
app.add_handler(CommandHandler("reset", self.reset))
|
|
308
|
+
app.add_handler(CommandHandler("start", self.help))
|
|
309
|
+
|
|
310
|
+
app.add_handler(MessageHandler(filters.TEXT | filters.VOICE | filters.AUDIO & (~filters.COMMAND), self.prompt))
|
|
311
|
+
|
|
312
|
+
app.add_handler(
|
|
313
|
+
InlineQueryHandler(
|
|
314
|
+
self.inline_query,
|
|
315
|
+
chat_types=[
|
|
316
|
+
constants.ChatType.PRIVATE,
|
|
317
|
+
constants.ChatType.GROUP,
|
|
318
|
+
constants.ChatType.SUPERGROUP,
|
|
319
|
+
],
|
|
320
|
+
)
|
|
321
|
+
)
|
|
322
|
+
# app.add_error_handler(self.error_handler)
|
|
323
|
+
if application_settings.heartbeat_url:
|
|
324
|
+
if not app.job_queue:
|
|
325
|
+
logger.error("Could not launch heartbeat beacon: application job queue was shut down or never started.")
|
|
326
|
+
else:
|
|
327
|
+
url = application_settings.heartbeat_url
|
|
328
|
+
logger.info(
|
|
329
|
+
f"Launching heartbeat beacon: calling {url[:30]}..{url[-3:]} "
|
|
330
|
+
f"every {application_settings.heartbeat_frequency_call} seconds."
|
|
331
|
+
)
|
|
332
|
+
app.job_queue.run_repeating(
|
|
333
|
+
callback=run_heartbeat,
|
|
334
|
+
interval=application_settings.heartbeat_frequency_call,
|
|
335
|
+
first=0.0,
|
|
336
|
+
)
|
|
337
|
+
app.run_polling()
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
if __name__ == "__main__":
|
|
341
|
+
log_application_settings()
|
|
342
|
+
telegram_bot = ChibiBot()
|
|
343
|
+
telegram_bot.run()
|
chibi/cli.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import subprocess
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
from chibi.config_generator import CONFIG_PATH, generate_default_config
|
|
8
|
+
from chibi.service import Service
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _get_service() -> Service:
|
|
12
|
+
"""Get an instance of the Service class.
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
Service: Initialized service instance.
|
|
16
|
+
"""
|
|
17
|
+
return Service()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@click.group()
|
|
21
|
+
def main() -> None:
|
|
22
|
+
"""Chibi CLI for managing the bot service."""
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@main.command()
|
|
27
|
+
def start() -> None:
|
|
28
|
+
"""Start the Chibi bot service as a daemon."""
|
|
29
|
+
service = _get_service()
|
|
30
|
+
service.start()
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@main.command()
|
|
34
|
+
def stop() -> None:
|
|
35
|
+
"""Stop the running Chibi bot service."""
|
|
36
|
+
service = _get_service()
|
|
37
|
+
service.stop()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@main.command()
|
|
41
|
+
def restart() -> None:
|
|
42
|
+
"""Restart the Chibi bot service."""
|
|
43
|
+
service = _get_service()
|
|
44
|
+
service.restart()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@main.command()
|
|
48
|
+
def config() -> None:
|
|
49
|
+
"""Open the Chibi configuration file in the default editor."""
|
|
50
|
+
if not os.path.exists(CONFIG_PATH):
|
|
51
|
+
click.echo("Creating default configuration...")
|
|
52
|
+
generate_default_config()
|
|
53
|
+
|
|
54
|
+
editor = os.environ.get("EDITOR", "vi")
|
|
55
|
+
try:
|
|
56
|
+
result = subprocess.call([editor, CONFIG_PATH])
|
|
57
|
+
if result != 0:
|
|
58
|
+
click.echo("Warning: Editor exited with non-zero status.", err=True)
|
|
59
|
+
except FileNotFoundError:
|
|
60
|
+
click.echo(f"Error: Editor '{editor}' not found.", err=True)
|
|
61
|
+
sys.exit(1)
|
|
62
|
+
except Exception as e:
|
|
63
|
+
click.echo(f"Error opening editor: {e}", err=True)
|
|
64
|
+
sys.exit(1)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@main.command()
|
|
68
|
+
def logs() -> None:
|
|
69
|
+
"""Tail the Chibi log file."""
|
|
70
|
+
service = _get_service()
|
|
71
|
+
log_path = service.log_path
|
|
72
|
+
|
|
73
|
+
if not os.path.exists(log_path):
|
|
74
|
+
click.echo(f"Log file {log_path} does not exist yet. Start the service first.")
|
|
75
|
+
sys.exit(1)
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
subprocess.call(["tail", "-n", "50", "-f", log_path])
|
|
79
|
+
except KeyboardInterrupt:
|
|
80
|
+
click.echo("\nLog tailing stopped.")
|
|
81
|
+
except FileNotFoundError:
|
|
82
|
+
click.echo("Error: 'tail' command not found.", err=True)
|
|
83
|
+
sys.exit(1)
|
|
84
|
+
except Exception as e:
|
|
85
|
+
click.echo(f"Error tailing logs: {e}", err=True)
|
|
86
|
+
sys.exit(1)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
if __name__ == "__main__":
|
|
90
|
+
main()
|
chibi/config/__init__.py
ADDED
chibi/config/app.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from functools import lru_cache
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Literal
|
|
5
|
+
|
|
6
|
+
from loguru import logger
|
|
7
|
+
from pydantic import Field
|
|
8
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ApplicationSettings(BaseSettings):
|
|
12
|
+
"""
|
|
13
|
+
Application settings loaded from environment or .env file.
|
|
14
|
+
|
|
15
|
+
Attributes:
|
|
16
|
+
redis: Redis connection URL.
|
|
17
|
+
redis_password: Password for Redis.
|
|
18
|
+
aws_region: AWS region for DynamoDB.
|
|
19
|
+
aws_access_key_id: AWS access key ID.
|
|
20
|
+
aws_secret_access_key: AWS secret access key.
|
|
21
|
+
ddb_users_table: DynamoDB table name for users.
|
|
22
|
+
ddb_messages_table: DynamoDB table name for messages.
|
|
23
|
+
local_data_path: Filesystem path for local storage.
|
|
24
|
+
log_prompt_data: Whether to log prompt data.
|
|
25
|
+
hide_models: Hide model options in UI.
|
|
26
|
+
hide_imagine: Hide imagine commands.
|
|
27
|
+
heartbeat_url: URL for heartbeat check.
|
|
28
|
+
heartbeat_frequency_call: Interval between heartbeat calls.
|
|
29
|
+
heartbeat_retry_calls: Number of retries for heartbeat.
|
|
30
|
+
heartbeat_proxy: Proxy URL for heartbeat.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
model_config = SettingsConfigDict(
|
|
34
|
+
env_file=(".env",),
|
|
35
|
+
extra="ignore",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# Redis settings
|
|
39
|
+
redis: str | None = Field(default=None)
|
|
40
|
+
redis_password: str | None = Field(default=None)
|
|
41
|
+
|
|
42
|
+
# DynamoDB settings
|
|
43
|
+
aws_region: str | None = Field(default=None)
|
|
44
|
+
aws_access_key_id: str | None = Field(default=None)
|
|
45
|
+
aws_secret_access_key: str | None = Field(default=None)
|
|
46
|
+
ddb_users_table: str | None = Field(default=None)
|
|
47
|
+
ddb_messages_table: str | None = Field(default=None)
|
|
48
|
+
|
|
49
|
+
# Local storage settings
|
|
50
|
+
local_data_path: str = Field(default="/app/data")
|
|
51
|
+
|
|
52
|
+
# MCP settings
|
|
53
|
+
enable_mcp_sse: bool = Field(default=True)
|
|
54
|
+
enable_mcp_stdio: bool = Field(default=False)
|
|
55
|
+
|
|
56
|
+
# HeartBeat settings
|
|
57
|
+
heartbeat_url: str | None = Field(default=None)
|
|
58
|
+
heartbeat_frequency_call: int = Field(default=60, ge=30)
|
|
59
|
+
heartbeat_retry_calls: int = Field(default=3)
|
|
60
|
+
heartbeat_proxy: str | None = Field(default=None)
|
|
61
|
+
|
|
62
|
+
# InfluxDB settings
|
|
63
|
+
influxdb_url: str | None = Field(default=None)
|
|
64
|
+
influxdb_token: str | None = Field(default=None)
|
|
65
|
+
influxdb_org: str | None = Field(default=None)
|
|
66
|
+
influxdb_bucket: str | None = Field(default=None)
|
|
67
|
+
|
|
68
|
+
# Interface
|
|
69
|
+
hide_models: bool = Field(default=False)
|
|
70
|
+
hide_imagine: bool = Field(default=False)
|
|
71
|
+
|
|
72
|
+
# Other settings
|
|
73
|
+
log_prompt_data: bool = Field(default=False)
|
|
74
|
+
|
|
75
|
+
# Agent settings
|
|
76
|
+
home_dir: str = Field(default="~/chibi") # AI agent's home directory
|
|
77
|
+
working_dir: str = Field(default="~/chibi") # AI agent's CWD
|
|
78
|
+
skills_dir: str = Field(default=(Path(".") / "skills").absolute().as_posix()) # absolute path to dir with skills
|
|
79
|
+
# max_consecutive_tool_calls: int = Field(default=50)
|
|
80
|
+
|
|
81
|
+
@property
|
|
82
|
+
def is_influx_configured(self) -> bool:
|
|
83
|
+
return all((self.influxdb_url, self.influxdb_token, self.influxdb_org, self.influxdb_bucket))
|
|
84
|
+
|
|
85
|
+
@property
|
|
86
|
+
def storage_backend(self) -> Literal["local", "redis", "dynamodb"]:
|
|
87
|
+
if self.redis:
|
|
88
|
+
return "redis"
|
|
89
|
+
if self.aws_access_key_id and self.aws_secret_access_key:
|
|
90
|
+
return "dynamodb"
|
|
91
|
+
return "local"
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@lru_cache()
|
|
95
|
+
def _get_application_settings() -> ApplicationSettings:
|
|
96
|
+
return ApplicationSettings()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def add_user_context(record) -> bool:
|
|
100
|
+
user_id = record["extra"].get("user_id")
|
|
101
|
+
record["extra"]["user_id"] = f"[{user_id}] " if user_id else ""
|
|
102
|
+
return True
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
logger.remove()
|
|
106
|
+
logger.add(
|
|
107
|
+
sys.stderr,
|
|
108
|
+
format="<level>{level: <9}</level> | "
|
|
109
|
+
"<green>{time:YYYY-MM-DD HH:mm:ss.SSS zz}</green> | "
|
|
110
|
+
"{extra[user_id]}"
|
|
111
|
+
"<level>{message}</level>",
|
|
112
|
+
filter=add_user_context,
|
|
113
|
+
)
|
|
114
|
+
logger.level("TOOL", no=20, color="<light-blue>")
|
|
115
|
+
logger.level("THINK", no=20, color="<light-magenta>")
|
|
116
|
+
logger.level("CALL", no=20, color="<magenta>")
|
|
117
|
+
logger.level("CHECK", no=20, color="<light-red>")
|
|
118
|
+
logger.level("MODERATOR", no=20, color="<light-red>")
|
|
119
|
+
logger.level("SUBAGENT", no=20, color="<cyan>")
|
|
120
|
+
logger.level("DELEGATE", no=20, color="<blue>")
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
application_settings = _get_application_settings()
|
chibi/config/gpt.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
from functools import lru_cache
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
from pydantic import Field
|
|
5
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
6
|
+
|
|
7
|
+
from chibi.constants import IMAGE_ASPECT_RATIO_LITERAL, IMAGE_SIZE_LITERAL, get_llm_prompt
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GPTSettings(BaseSettings):
|
|
11
|
+
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", extra="ignore")
|
|
12
|
+
|
|
13
|
+
alibaba_key: str | None = Field(alias="ALIBABA_API_KEY", default=None)
|
|
14
|
+
anthropic_key: str | None = Field(alias="ANTHROPIC_API_KEY", default=None)
|
|
15
|
+
cloudflare_account_id: str | None = Field(alias="CLOUDFLARE_ACCOUNT_ID", default=None)
|
|
16
|
+
cloudflare_key: str | None = Field(alias="CLOUDFLARE_API_KEY", default=None)
|
|
17
|
+
customopenai_key: str | None = Field(alias="CUSTOMOPENAI_API_KEY", default=None)
|
|
18
|
+
customopenai_url: str = Field(alias="CUSTOMOPENAI_URL", default="http://localhost:1234/v1")
|
|
19
|
+
deepseek_key: str | None = Field(alias="DEEPSEEK_API_KEY", default=None)
|
|
20
|
+
gemini_key: str | None = Field(alias="GEMINI_API_KEY", default=None)
|
|
21
|
+
grok_key: str | None = Field(alias="GROK_API_KEY", default=None)
|
|
22
|
+
mistralai_key: str | None = Field(alias="MISTRALAI_API_KEY", default=None)
|
|
23
|
+
moonshotai_key: str | None = Field(alias="MOONSHOTAI_API_KEY", default=None)
|
|
24
|
+
openai_key: str | None = Field(alias="OPENAI_API_KEY", default=None)
|
|
25
|
+
suno_key: str | None = Field(alias="SUNO_API_ORG_API_KEY", default=None)
|
|
26
|
+
elevenlabs_api_key: str | None = Field(alias="ELEVEN_LABS_API_KEY", default=None)
|
|
27
|
+
minimax_api_key: str | None = Field(alias="MINIMAX_API_KEY", default=None)
|
|
28
|
+
|
|
29
|
+
frequency_penalty: float = Field(default=0)
|
|
30
|
+
max_tokens: int = Field(default=32000)
|
|
31
|
+
presence_penalty: float = Field(default=0)
|
|
32
|
+
temperature: float = Field(default=1)
|
|
33
|
+
|
|
34
|
+
backoff_factor: float = Field(default=0.5)
|
|
35
|
+
retries: int = Field(default=3)
|
|
36
|
+
timeout: int = Field(default=600)
|
|
37
|
+
|
|
38
|
+
image_generations_monthly_limit: int = Field(alias="IMAGE_GENERATIONS_LIMIT", default=0)
|
|
39
|
+
image_n_choices: int = Field(default=1, ge=1, le=4)
|
|
40
|
+
image_quality: Literal["standard", "hd"] = Field(default="standard")
|
|
41
|
+
image_size: IMAGE_SIZE_LITERAL = Field(default="1024x1024")
|
|
42
|
+
image_aspect_ratio: IMAGE_ASPECT_RATIO_LITERAL = Field(default="16:9")
|
|
43
|
+
image_size_nano_banana: Literal["1K", "2K", "4K"] = Field(default="2K")
|
|
44
|
+
image_size_imagen: Literal["1K", "2K"] = Field(default="2K")
|
|
45
|
+
image_size_alibaba: str = "1664*928"
|
|
46
|
+
|
|
47
|
+
default_model: str | None = Field(default=None)
|
|
48
|
+
default_provider: str | None = Field(default=None)
|
|
49
|
+
|
|
50
|
+
stt_provider: str | None = Field(default=None)
|
|
51
|
+
stt_model: str | None = Field(default=None)
|
|
52
|
+
tts_provider: str | None = Field(default=None)
|
|
53
|
+
tts_model: str | None = Field(default=None)
|
|
54
|
+
|
|
55
|
+
moderation_provider: str | None = Field(default=None)
|
|
56
|
+
moderation_model: str | None = Field(default=None)
|
|
57
|
+
|
|
58
|
+
max_conversation_age_minutes: int = Field(default=360)
|
|
59
|
+
max_history_tokens: int = Field(default=64000)
|
|
60
|
+
|
|
61
|
+
image_generations_whitelist_raw: str | None = Field(alias="IMAGE_GENERATIONS_WHITELIST", default=None)
|
|
62
|
+
models_whitelist_raw: str | None = Field(alias="MODELS_WHITELIST", default=None)
|
|
63
|
+
proxy: str | None = Field(default=None)
|
|
64
|
+
public_mode: bool = Field(default=False)
|
|
65
|
+
show_llm_thoughts: bool = Field(default=False)
|
|
66
|
+
|
|
67
|
+
filesystem_access: bool = Field(default=False)
|
|
68
|
+
allow_delegation: bool = Field(default=True)
|
|
69
|
+
tools_whitelist_raw: str | None = Field(alias="TOOLS_WHITELIST", default=None)
|
|
70
|
+
|
|
71
|
+
google_search_api_key: str | None = Field(default=None)
|
|
72
|
+
google_search_cx: str | None = Field(default=None)
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def google_search_client_set(self) -> bool:
|
|
76
|
+
return bool(self.google_search_api_key) and bool(self.google_search_cx)
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def assistant_prompt(self) -> str:
|
|
80
|
+
return get_llm_prompt(filesystem_access=self.filesystem_access, allow_delegation=self.allow_delegation)
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def models_whitelist(self) -> list[str]:
|
|
84
|
+
return [x.strip() for x in self.models_whitelist_raw.split(",")] if self.models_whitelist_raw else []
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def image_generations_whitelist(self) -> list[str]:
|
|
88
|
+
return (
|
|
89
|
+
[x.strip() for x in self.image_generations_whitelist_raw.split(",")]
|
|
90
|
+
if self.image_generations_whitelist_raw
|
|
91
|
+
else []
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def tools_whitelist(self) -> list[str]:
|
|
96
|
+
return [x.strip() for x in self.tools_whitelist_raw.split(",")] if self.tools_whitelist_raw else []
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def messages_ttl(self) -> int:
|
|
100
|
+
return self.max_conversation_age_minutes * 60
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@lru_cache()
|
|
104
|
+
def _get_gpt_settings() -> GPTSettings:
|
|
105
|
+
return GPTSettings()
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
gpt_settings: GPTSettings = _get_gpt_settings()
|
chibi/config/logging.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from loguru import logger
|
|
5
|
+
|
|
6
|
+
config: dict[Any, Any] = {
|
|
7
|
+
"handlers": [
|
|
8
|
+
{
|
|
9
|
+
"sink": sys.stdout,
|
|
10
|
+
"colorize": True,
|
|
11
|
+
"format": "<lvl>{level}</lvl>\t| <green>{time:YYYY-MM-DD HH:mm:ss.SSS zz}</green> | <lvl>{message}</lvl>",
|
|
12
|
+
},
|
|
13
|
+
],
|
|
14
|
+
}
|
|
15
|
+
logger.configure(**config)
|