Rubka 7.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. rubka/__init__.py +79 -0
  2. rubka/adaptorrubka/__init__.py +4 -0
  3. rubka/adaptorrubka/client/__init__.py +1 -0
  4. rubka/adaptorrubka/client/client.py +60 -0
  5. rubka/adaptorrubka/crypto/__init__.py +1 -0
  6. rubka/adaptorrubka/crypto/crypto.py +82 -0
  7. rubka/adaptorrubka/enums.py +36 -0
  8. rubka/adaptorrubka/exceptions.py +22 -0
  9. rubka/adaptorrubka/methods/__init__.py +1 -0
  10. rubka/adaptorrubka/methods/methods.py +90 -0
  11. rubka/adaptorrubka/network/__init__.py +3 -0
  12. rubka/adaptorrubka/network/helper.py +22 -0
  13. rubka/adaptorrubka/network/network.py +221 -0
  14. rubka/adaptorrubka/network/socket.py +31 -0
  15. rubka/adaptorrubka/sessions/__init__.py +1 -0
  16. rubka/adaptorrubka/sessions/sessions.py +72 -0
  17. rubka/adaptorrubka/types/__init__.py +1 -0
  18. rubka/adaptorrubka/types/socket/__init__.py +1 -0
  19. rubka/adaptorrubka/types/socket/message.py +187 -0
  20. rubka/adaptorrubka/utils/__init__.py +2 -0
  21. rubka/adaptorrubka/utils/configs.py +18 -0
  22. rubka/adaptorrubka/utils/utils.py +251 -0
  23. rubka/api.py +1723 -0
  24. rubka/asynco.py +2541 -0
  25. rubka/button.py +404 -0
  26. rubka/config.py +3 -0
  27. rubka/context.py +1077 -0
  28. rubka/decorators.py +30 -0
  29. rubka/exceptions.py +37 -0
  30. rubka/filters.py +330 -0
  31. rubka/helpers.py +1461 -0
  32. rubka/jobs.py +15 -0
  33. rubka/keyboards.py +16 -0
  34. rubka/keypad.py +298 -0
  35. rubka/logger.py +12 -0
  36. rubka/metadata.py +114 -0
  37. rubka/rubino.py +1271 -0
  38. rubka/tv.py +145 -0
  39. rubka/update.py +1038 -0
  40. rubka/utils.py +3 -0
  41. rubka-7.2.8.dist-info/METADATA +1047 -0
  42. rubka-7.2.8.dist-info/RECORD +45 -0
  43. rubka-7.2.8.dist-info/WHEEL +5 -0
  44. rubka-7.2.8.dist-info/entry_points.txt +2 -0
  45. rubka-7.2.8.dist-info/top_level.txt +1 -0
rubka/helpers.py ADDED
@@ -0,0 +1,1461 @@
1
+ import asyncio
2
+ import datetime
3
+ import json
4
+ import re
5
+ import logging,os
6
+ import uuid
7
+ import random
8
+ from typing import Any, Callable, Awaitable, Optional, Dict, List, Tuple
9
+ from collections import defaultdict, deque
10
+
11
+
12
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
13
+ logger = logging.getLogger(__name__)
14
+
15
+ class StateManager:
16
+ def __init__(self):
17
+ self._states: dict[str, str] = {}
18
+ self._lock = asyncio.Lock()
19
+ logger.info("StateManager initialized.")
20
+
21
+ async def set(self, user_id: str, state: str):
22
+ """Sets the state for a given user."""
23
+ async with self._lock:
24
+ self._states[user_id] = state
25
+ logger.debug(f"State set for user {user_id}: {state}")
26
+
27
+ async def get(self, user_id: str) -> Optional[str]:
28
+ """Gets the state for a given user."""
29
+ async with self._lock:
30
+ state = self._states.get(user_id)
31
+ logger.debug(f"State get for user {user_id}: {state}")
32
+ return state
33
+
34
+ async def clear(self, user_id: str):
35
+ """Clears the state for a given user."""
36
+ async with self._lock:
37
+ if user_id in self._states:
38
+ del self._states[user_id]
39
+ logger.debug(f"State cleared for user {user_id}.")
40
+
41
+ async def check(self, user_id: str, state: str) -> bool:
42
+ """Checks if the user's current state matches the given state."""
43
+ async with self._lock:
44
+ user_state = self._states.get(user_id)
45
+ is_match = user_state == state
46
+ logger.debug(f"State check for user {user_id}: expected '{state}', got '{user_state}'. Match: {is_match}")
47
+ return is_match
48
+
49
+ async def list_all(self) -> Dict[str, str]:
50
+ """Lists all stored states."""
51
+ async with self._lock:
52
+ logger.debug("Listing all states.")
53
+ return self._states.copy()
54
+
55
+ async def get_state_count(self) -> int:
56
+ """Returns the total number of states stored."""
57
+ async with self._lock:
58
+ count = len(self._states)
59
+ logger.debug(f"State count: {count}")
60
+ return count
61
+
62
+ class DataStorage:
63
+ def __init__(self, file_path: str = "data.json"):
64
+ self.file_path = file_path
65
+ self._data: Dict[str, Any] = {}
66
+ self._lock = asyncio.Lock()
67
+
68
+ try:
69
+ with open(self.file_path, "r", encoding="utf-8") as f:
70
+ self._data = json.load(f)
71
+ logger.info(f"DataStorage initialized. Loaded data from {self.file_path}.")
72
+ except (FileNotFoundError, json.JSONDecodeError) as e:
73
+ self._data = {}
74
+ logger.warning(f"Data file '{self.file_path}' not found or invalid JSON, starting with empty data. Error: {e}")
75
+
76
+ async def set(self, user_id: str, value: Any):
77
+ """Sets a value for a given user ID."""
78
+ async with self._lock:
79
+ self._data[user_id] = value
80
+ await self._async_save()
81
+ logger.debug(f"Data set for user {user_id}.")
82
+
83
+ async def get(self, user_id: str) -> Optional[Any]:
84
+ """Gets a value for a given user ID."""
85
+ async with self._lock:
86
+ value = self._data.get(user_id)
87
+ logger.debug(f"Data get for user {user_id}: {'Found' if value is not None else 'Not Found'}")
88
+ return value
89
+
90
+ async def delete(self, user_id: str):
91
+ """Deletes a value for a given user ID."""
92
+ async with self._lock:
93
+ if user_id in self._data:
94
+ del self._data[user_id]
95
+ await self._async_save()
96
+ logger.debug(f"Data deleted for user {user_id}.")
97
+
98
+ async def _async_save(self):
99
+ """Saves the current data to the file asynchronously."""
100
+
101
+ loop = asyncio.get_running_loop()
102
+ await loop.run_in_executor(None, self._save)
103
+
104
+ def _save(self):
105
+ """Saves the current data to the file synchronously."""
106
+ try:
107
+ with open(self.file_path, "w", encoding="utf-8") as f:
108
+ json.dump(self._data, f, ensure_ascii=False, indent=2)
109
+ logger.debug(f"Data saved to {self.file_path}.")
110
+ except IOError as e:
111
+ logger.error(f"Failed to save data to {self.file_path}: {e}")
112
+
113
+ async def get_all_data(self) -> Dict[str, Any]:
114
+ """Returns a copy of all stored data."""
115
+ async with self._lock:
116
+ logger.debug("Getting all data.")
117
+ return self._data.copy()
118
+
119
+ async def clear_all_data(self):
120
+ """Clears all stored data and saves the empty state."""
121
+ async with self._lock:
122
+ self._data = {}
123
+ await self._async_save()
124
+ logger.warning("All data cleared from DataStorage.")
125
+
126
+ class RateLimiter:
127
+ def __init__(self, limit: int = 5, per_seconds: int = 60):
128
+ self.limit = limit
129
+ self.per_seconds = per_seconds
130
+ self._records: Dict[str, deque[datetime.datetime]] = defaultdict(deque)
131
+ self._lock = asyncio.Lock()
132
+ logger.info(f"RateLimiter initialized with limit={limit}, per_seconds={per_seconds}.")
133
+
134
+ async def is_allowed(self, user_id: str) -> bool:
135
+ """Checks if a user is allowed to perform an action based on rate limits."""
136
+ async with self._lock:
137
+ now = datetime.datetime.now()
138
+
139
+
140
+ while self._records[user_id] and (now - self._records[user_id][0]).total_seconds() >= self.per_seconds:
141
+ self._records[user_id].popleft()
142
+
143
+ if len(self._records[user_id]) < self.limit:
144
+ self._records[user_id].append(now)
145
+ logger.debug(f"Rate limit allowed for user {user_id}.")
146
+ return True
147
+ else:
148
+ logger.warning(f"Rate limit exceeded for user {user_id}.")
149
+ return False
150
+
151
+ async def get_remaining_time(self, user_id: str) -> float:
152
+ """Calculates the time remaining until the user can make another request."""
153
+ async with self._lock:
154
+ now = datetime.datetime.now()
155
+ while self._records[user_id] and (now - self._records[user_id][0]).total_seconds() >= self.per_seconds:
156
+ self._records[user_id].popleft()
157
+
158
+ if len(self._records[user_id]) < self.limit:
159
+ return 0.0
160
+ else:
161
+ time_since_first_request = now - self._records[user_id][0]
162
+ return max(0.0, self.per_seconds - time_since_first_request.total_seconds())
163
+
164
+ async def reset_user_limit(self, user_id: str):
165
+ """Resets the rate limit for a specific user."""
166
+ async with self._lock:
167
+ if user_id in self._records:
168
+ del self._records[user_id]
169
+ logger.info(f"Rate limit reset for user {user_id}.")
170
+
171
+ class MiddlewareManager:
172
+ def __init__(self):
173
+ self.middlewares: List[Callable[..., Awaitable[bool]]] = []
174
+ logger.info("MiddlewareManager initialized.")
175
+
176
+ def add(self, func: Callable[..., Awaitable[bool]]):
177
+ """Adds a middleware function to the manager."""
178
+ self.middlewares.append(func)
179
+ logger.info(f"Middleware '{func.__name__}' added.")
180
+
181
+ async def run(self, bot, message) -> bool:
182
+ """Runs all registered middlewares sequentially."""
183
+ logger.debug("Running middlewares.")
184
+ for mw in self.middlewares:
185
+ try:
186
+ result = await mw(bot, message)
187
+ if result is False:
188
+ logger.debug(f"Middleware '{mw.__name__}' returned False. Stopping middleware chain.")
189
+ return False
190
+ except Exception as e:
191
+ logger.error(f"Error in middleware '{mw.__name__}': {e}", exc_info=True)
192
+
193
+ return False
194
+ logger.debug("All middlewares passed.")
195
+ return True
196
+
197
+ class CommandParser:
198
+ def __init__(self):
199
+ self.commands: Dict[str, Callable] = {}
200
+ logger.info("CommandParser initialized.")
201
+
202
+ def add(self, pattern: str, func: Callable):
203
+ """Adds a command with its regex pattern and handler function."""
204
+ self.commands[pattern] = func
205
+ logger.info(f"Command added: pattern='{pattern}', handler='{func.__name__}'")
206
+
207
+ async def run(self, bot, message) -> Optional[Any]:
208
+ """Parses the message text and executes the corresponding command handler."""
209
+ text = message.text or ""
210
+ logger.debug(f"Parsing command for message: '{text[:50]}...'")
211
+ for pattern, func in self.commands.items():
212
+ match = re.match(pattern, text)
213
+ if match:
214
+ try:
215
+ logger.info(f"Command matched: pattern='{pattern}', handler='{func.__name__}'.")
216
+ return await func(bot, message, *match.groups())
217
+ except Exception as e:
218
+ logger.error(f"Error executing command '{func.__name__}' for pattern '{pattern}': {e}", exc_info=True)
219
+
220
+ await message.reply("An error occurred while processing your command.")
221
+ return None
222
+ logger.debug("No command matched.")
223
+ return None
224
+
225
+ class Conversation:
226
+ def __init__(self, user_id: str, state_manager: StateManager, conversation_manager: 'ConversationManager'):
227
+ self.user_id = user_id
228
+ self.state_manager = state_manager
229
+ self.conversation_manager = conversation_manager
230
+ self.queue: asyncio.Queue[str] = asyncio.Queue()
231
+ self._is_active = False
232
+ logger.info(f"Conversation created for user {user_id}.")
233
+
234
+ async def start(self, initial_state: str):
235
+ """Starts the conversation by setting the initial state."""
236
+ await self.state_manager.set(self.user_id, initial_state)
237
+ self._is_active = True
238
+ logger.info(f"Conversation started for user {self.user_id} with state '{initial_state}'.")
239
+
240
+ async def end(self):
241
+ """Ends the conversation and clears the state."""
242
+ await self.state_manager.clear(self.user_id)
243
+ self._is_active = False
244
+
245
+ while not self.queue.empty():
246
+ try:
247
+ self.queue.get_nowait()
248
+ except asyncio.QueueEmpty:
249
+ pass
250
+ logger.info(f"Conversation ended for user {self.user_id}.")
251
+
252
+ async def ask(self, bot, message, text: str) -> Optional[str]:
253
+ """Asks a question to the user and waits for their response."""
254
+ if not self._is_active:
255
+ logger.warning(f"Attempted to ask a question in an inactive conversation for user {self.user_id}.")
256
+ return None
257
+ await message.reply(text)
258
+ try:
259
+ response = await asyncio.wait_for(self.queue.get(), timeout=60.0)
260
+ logger.debug(f"Received response for user {self.user_id}: '{response}'.")
261
+ return response
262
+ except asyncio.TimeoutError:
263
+ logger.warning(f"Timeout waiting for user {self.user_id}'s response.")
264
+ await self.end()
265
+ return None
266
+
267
+ async def next(self, text: str):
268
+ """Puts the user's response into the conversation queue."""
269
+ if not self._is_active:
270
+ logger.warning(f"Attempted to put message in queue for inactive conversation for user {self.user_id}.")
271
+ return
272
+ await self.queue.put(text)
273
+ logger.debug(f"User {self.user_id} response queued.")
274
+
275
+ async def is_active(self) -> bool:
276
+ """Checks if the conversation is currently active."""
277
+ return self._is_active
278
+
279
+ class ConversationManager:
280
+ def __init__(self, state_manager: StateManager):
281
+ self.conversations: Dict[str, Conversation] = {}
282
+ self.state_manager = state_manager
283
+ self._lock = asyncio.Lock()
284
+ logger.info("ConversationManager initialized.")
285
+
286
+ async def get_or_create(self, user_id: str) -> Conversation:
287
+ """Gets an existing conversation for a user or creates a new one."""
288
+ async with self._lock:
289
+ if user_id not in self.conversations:
290
+ self.conversations[user_id] = Conversation(user_id, self.state_manager, self)
291
+ logger.debug(f"Created new conversation for user {user_id}.")
292
+ return self.conversations[user_id]
293
+
294
+ async def end_conversation(self, user_id: str):
295
+ """Ends a specific user's conversation."""
296
+ async with self._lock:
297
+ if user_id in self.conversations:
298
+ conversation = self.conversations[user_id]
299
+ await conversation.end()
300
+ del self.conversations[user_id]
301
+ logger.info(f"Conversation ended and removed for user {user_id}.")
302
+
303
+ async def clean_up_inactive_conversations(self):
304
+ """Removes conversations that are no longer active."""
305
+ async with self._lock:
306
+ users_to_remove = []
307
+ for user_id, conversation in self.conversations.items():
308
+ if not await conversation.is_active():
309
+ users_to_remove.append(user_id)
310
+
311
+ for user_id in users_to_remove:
312
+ del self.conversations[user_id]
313
+ logger.info(f"Removed inactive conversation for user {user_id}.")
314
+
315
+ class Scheduler:
316
+ def __init__(self):
317
+ self.tasks: List[asyncio.Task] = []
318
+ self._running_tasks_lock = asyncio.Lock()
319
+ logger.info("Scheduler initialized.")
320
+
321
+ async def run_after(self, delay: int, func: Callable[..., Awaitable[Any]], *args, **kwargs) -> asyncio.Task:
322
+ """Schedules a function to run after a specified delay."""
323
+ async def task():
324
+ await asyncio.sleep(delay)
325
+ try:
326
+ logger.info(f"Running scheduled task '{func.__name__}' after {delay} seconds.")
327
+ await func(*args, **kwargs)
328
+ except Exception as e:
329
+ logger.error(f"Error in scheduled task '{func.__name__}': {e}", exc_info=True)
330
+ finally:
331
+ await self._remove_task_if_completed(asyncio.current_task())
332
+
333
+ t = asyncio.create_task(task())
334
+ async with self._running_tasks_lock:
335
+ self.tasks.append(t)
336
+ logger.info(f"Task '{func.__name__}' scheduled to run after {delay} seconds.")
337
+ return t
338
+
339
+ async def run_every(self, interval: int, func: Callable[..., Awaitable[Any]], *args, **kwargs) -> asyncio.Task:
340
+ """Schedules a function to run repeatedly at a specified interval."""
341
+ async def task():
342
+ while True:
343
+ await asyncio.sleep(interval)
344
+ try:
345
+ logger.info(f"Running periodic task '{func.__name__}' every {interval} seconds.")
346
+ await func(*args, **kwargs)
347
+ except asyncio.CancelledError:
348
+ logger.info(f"Periodic task '{func.__name__}' cancelled.")
349
+ break
350
+ except Exception as e:
351
+ logger.error(f"Error in periodic task '{func.__name__}': {e}", exc_info=True)
352
+
353
+ t = asyncio.create_task(task())
354
+ async with self._running_tasks_lock:
355
+ self.tasks.append(t)
356
+ logger.info(f"Task '{func.__name__}' scheduled to run every {interval} seconds.")
357
+ return t
358
+
359
+ async def cancel_all_tasks(self):
360
+ """Cancels all scheduled tasks."""
361
+ async with self._running_tasks_lock:
362
+ logger.info(f"Cancelling {len(self.tasks)} scheduled tasks.")
363
+ for task in self.tasks:
364
+ task.cancel()
365
+
366
+ await asyncio.gather(*self.tasks, return_exceptions=True)
367
+ self.tasks.clear()
368
+ logger.info("All scheduled tasks cancelled.")
369
+
370
+ async def _remove_task_if_completed(self, completed_task: asyncio.Task):
371
+ """Removes a completed task from the list of running tasks."""
372
+ async with self._running_tasks_lock:
373
+ if completed_task in self.tasks:
374
+ self.tasks.remove(completed_task)
375
+ logger.debug("Removed a completed task from the scheduler.")
376
+
377
+ class CacheManager:
378
+ def __init__(self):
379
+ self.cache: Dict[str, Tuple[Any, datetime.datetime]] = {}
380
+ self._lock = asyncio.Lock()
381
+ logger.info("CacheManager initialized.")
382
+
383
+ async def get(self, key: str) -> Optional[Any]:
384
+ """Gets a value from the cache, returning None if expired or not found."""
385
+ async with self._lock:
386
+ if key in self.cache:
387
+ value, expire = self.cache[key]
388
+ if expire > datetime.datetime.now():
389
+ logger.debug(f"Cache hit for key '{key}'.")
390
+ return value
391
+ else:
392
+ del self.cache[key]
393
+ logger.debug(f"Cache expired for key '{key}'.")
394
+ logger.debug(f"Cache miss for key '{key}'.")
395
+ return None
396
+
397
+ async def set(self, key: str, value: Any, ttl: int = 300):
398
+ """Sets a value in the cache with a Time To Live (TTL)."""
399
+ async with self._lock:
400
+ expire = datetime.datetime.now() + datetime.timedelta(seconds=ttl)
401
+ self.cache[key] = (value, expire)
402
+ logger.debug(f"Cache set for key '{key}' with TTL {ttl} seconds.")
403
+
404
+ async def delete(self, key: str):
405
+ """Deletes a key from the cache."""
406
+ async with self._lock:
407
+ if key in self.cache:
408
+ del self.cache[key]
409
+ logger.debug(f"Cache deleted for key '{key}'.")
410
+
411
+ async def clear(self):
412
+ """Clears all entries from the cache."""
413
+ async with self._lock:
414
+ self.cache.clear()
415
+ logger.warning("Cache cleared.")
416
+
417
+ async def prune_expired(self):
418
+ """Removes all expired entries from the cache."""
419
+ async with self._lock:
420
+ now = datetime.datetime.now()
421
+ keys_to_delete = [key for key, (value, expire) in self.cache.items() if expire <= now]
422
+ for key in keys_to_delete:
423
+ del self.cache[key]
424
+ if keys_to_delete:
425
+ logger.debug(f"Pruned {len(keys_to_delete)} expired cache entries.")
426
+
427
+ class ErrorHandler:
428
+ def __init__(self):
429
+ self.handlers: List[Callable[..., Awaitable[None]]] = []
430
+ logger.info("ErrorHandler initialized.")
431
+
432
+ def add(self, func: Callable[..., Awaitable[None]]):
433
+ """Adds an error handler function."""
434
+ self.handlers.append(func)
435
+ logger.info(f"Error handler '{func.__name__}' added.")
436
+
437
+ async def run(self, bot, error: Exception, message):
438
+ """Runs all registered error handlers."""
439
+ logger.error(f"An error occurred: {error}", exc_info=True)
440
+ for handler in self.handlers:
441
+ try:
442
+ await handler(bot, error, message)
443
+ except Exception as e:
444
+ logger.error(f"Error in error handler '{handler.__name__}': {e}", exc_info=True)
445
+
446
+
447
+
448
+ class LoggerConfigurator:
449
+ """Configures logging levels and formats."""
450
+ def __init__(self, level: int = logging.INFO, format_string: str = '%(asctime)s - %(levelname)s - %(message)s'):
451
+ self.level = level
452
+ self.format_string = format_string
453
+ logger.info("LoggerConfigurator initialized.")
454
+
455
+ def configure(self):
456
+ """Applies the logging configuration."""
457
+ logging.basicConfig(level=self.level, format=self.format_string)
458
+ logger.info(f"Logging configured with level {logging.getLevelName(self.level)} and format '{self.format_string}'.")
459
+
460
+ class UserManager:
461
+ """Manages user data, potentially with more complex profiles."""
462
+ def __init__(self, data_storage: DataStorage):
463
+ self.data_storage = data_storage
464
+ self._lock = asyncio.Lock()
465
+ logger.info("UserManager initialized.")
466
+
467
+ async def get_user_profile(self, user_id: str) -> Optional[Dict[str, Any]]:
468
+ """Retrieves a user's profile data."""
469
+ profile = await self.data_storage.get(f"profile_{user_id}")
470
+ logger.debug(f"Getting profile for user {user_id}: {'Found' if profile else 'Not Found'}")
471
+ return profile
472
+
473
+ async def set_user_profile(self, user_id: str, profile_data: Dict[str, Any]):
474
+ """Sets or updates a user's profile data."""
475
+ await self.data_storage.set(f"profile_{user_id}", profile_data)
476
+ logger.info(f"Profile updated for user {user_id}.")
477
+
478
+ async def delete_user_profile(self, user_id: str):
479
+ """Deletes a user's profile data."""
480
+ await self.data_storage.delete(f"profile_{user_id}")
481
+ logger.info(f"Profile deleted for user {user_id}.")
482
+
483
+ async def get_all_users(self) -> List[str]:
484
+ """Retrieves a list of all user IDs that have profile data."""
485
+ all_data = await self.data_storage.get_all_data()
486
+ user_ids = [key.replace("profile_", "") for key in all_data if key.startswith("profile_")]
487
+ logger.debug(f"Found {len(user_ids)} users with profiles.")
488
+ return user_ids
489
+
490
+ class FeatureFlagManager:
491
+ """Manages feature flags to enable/disable features dynamically."""
492
+ def __init__(self, data_storage: DataStorage, default_flags: Optional[Dict[str, bool]] = None):
493
+ self.data_storage = data_storage
494
+ self.default_flags = default_flags or {}
495
+ self._lock = asyncio.Lock()
496
+ logger.info("FeatureFlagManager initialized.")
497
+
498
+ async def is_feature_enabled(self, feature_name: str, user_id: Optional[str] = None) -> bool:
499
+ """Checks if a feature is enabled."""
500
+ async with self._lock:
501
+ flag_key = f"feature_flag_{feature_name}"
502
+ stored_flag = await self.data_storage.get(flag_key)
503
+
504
+ if stored_flag is not None:
505
+ return stored_flag
506
+
507
+
508
+ if user_id:
509
+ user_flag_key = f"feature_flag_{feature_name}_{user_id}"
510
+ user_specific_flag = await self.data_storage.get(user_flag_key)
511
+ if user_specific_flag is not None:
512
+ return user_specific_flag
513
+
514
+ return self.default_flags.get(feature_name, False)
515
+
516
+ async def enable_feature(self, feature_name: str):
517
+ """Enables a feature globally."""
518
+ await self.data_storage.set(f"feature_flag_{feature_name}", True)
519
+ logger.info(f"Feature '{feature_name}' globally enabled.")
520
+
521
+ async def disable_feature(self, feature_name: str):
522
+ """Disables a feature globally."""
523
+ await self.data_storage.set(f"feature_flag_{feature_name}", False)
524
+ logger.info(f"Feature '{feature_name}' globally disabled.")
525
+
526
+ async def enable_feature_for_user(self, feature_name: str, user_id: str):
527
+ """Enables a feature for a specific user."""
528
+ await self.data_storage.set(f"feature_flag_{feature_name}_{user_id}", True)
529
+ logger.info(f"Feature '{feature_name}' enabled for user {user_id}.")
530
+
531
+ async def disable_feature_for_user(self, feature_name: str, user_id: str):
532
+ """Disables a feature for a specific user."""
533
+ await self.data_storage.set(f"feature_flag_{feature_name}_{user_id}", False)
534
+ logger.info(f"Feature '{feature_name}' disabled for user {user_id}.")
535
+
536
+ class MessageQueue:
537
+ """Manages a queue for outgoing messages, useful for rate limiting or batching."""
538
+ def __init__(self, max_size: int = 1000):
539
+ self.queue: deque[Tuple[str, str]] = deque(maxlen=max_size)
540
+ self._lock = asyncio.Lock()
541
+ self._empty_event = asyncio.Event()
542
+ self._empty_event.set()
543
+ logger.info(f"MessageQueue initialized with max size {max_size}.")
544
+
545
+ async def put(self, user_id: str, message_text: str):
546
+ """Adds a message to the queue."""
547
+ async with self._lock:
548
+ if len(self.queue) < self.queue.maxlen:
549
+ self.queue.append((user_id, message_text))
550
+ self._empty_event.clear()
551
+ logger.debug(f"Message added to queue for user {user_id}.")
552
+ else:
553
+ logger.warning(f"MessageQueue is full. Could not add message for user {user_id}.")
554
+
555
+ async def get(self) -> Optional[Tuple[str, str]]:
556
+ """Gets a message from the queue. Waits if the queue is empty."""
557
+ await self._empty_event.wait()
558
+ async with self._lock:
559
+ if self.queue:
560
+ user_id, message_text = self.queue.popleft()
561
+ if not self.queue:
562
+ self._empty_event.set()
563
+ logger.debug(f"Message retrieved from queue for user {user_id}.")
564
+ return user_id, message_text
565
+ return None
566
+
567
+ def is_empty(self) -> bool:
568
+ """Checks if the queue is empty."""
569
+ return not self.queue
570
+
571
+ def qsize(self) -> int:
572
+ """Returns the current number of messages in the queue."""
573
+ return len(self.queue)
574
+
575
+ class TextGenerator:
576
+ """A simple text generator, can be expanded with more sophisticated models."""
577
+ def __init__(self, model_path: Optional[str] = None):
578
+
579
+ self.model_path = model_path
580
+ if model_path:
581
+ logger.info(f"TextGenerator initialized with model from: {model_path}")
582
+ else:
583
+ logger.info("TextGenerator initialized with basic functionality (no model loaded).")
584
+
585
+ async def generate_text(self, prompt: str, max_length: int = 100) -> str:
586
+ """Generates text based on a prompt."""
587
+
588
+ await asyncio.sleep(0.1)
589
+ generated_content = f"Generated text based on: '{prompt[:50]}...' (Model: {self.model_path or 'basic'})"
590
+ logger.info(f"Text generated for prompt: '{prompt[:50]}...'")
591
+ return generated_content[:max_length]
592
+
593
+ class WorkflowEngine:
594
+ """Orchestrates a sequence of steps (tasks) for a given process."""
595
+ def __init__(self):
596
+ self.workflows: Dict[str, List[Callable[..., Awaitable[Any]]]] = {}
597
+ logger.info("WorkflowEngine initialized.")
598
+
599
+ def add_workflow(self, name: str, steps: List[Callable[..., Awaitable[Any]]]):
600
+ """Adds a new workflow with a name and a list of step functions."""
601
+ self.workflows[name] = steps
602
+ logger.info(f"Workflow '{name}' added with {len(steps)} steps.")
603
+
604
+ async def run_workflow(self, name: str, initial_context: Dict[str, Any]) -> Dict[str, Any]:
605
+ """Runs a workflow and returns the final context."""
606
+ if name not in self.workflows:
607
+ logger.error(f"Workflow '{name}' not found.")
608
+ raise ValueError(f"Workflow '{name}' not found.")
609
+
610
+ context = initial_context.copy()
611
+ logger.info(f"Running workflow '{name}' with initial context: {context}")
612
+
613
+ for step_index, step_func in enumerate(self.workflows[name]):
614
+ try:
615
+ logger.debug(f"Executing step {step_index + 1} of workflow '{name}': '{step_func.__name__}'.")
616
+
617
+ context = await step_func(context)
618
+ logger.debug(f"Step {step_index + 1} completed. Current context: {context}")
619
+ except Exception as e:
620
+ logger.error(f"Error in step {step_index + 1} ('{step_func.__name__}') of workflow '{name}': {e}", exc_info=True)
621
+
622
+ raise RuntimeError(f"Error in workflow '{name}' step {step_index + 1}: {e}") from e
623
+
624
+ logger.info(f"Workflow '{name}' completed successfully. Final context: {context}")
625
+ return context
626
+
627
+ class AnalyticsTracker:
628
+ """Tracks events and metrics for analysis."""
629
+ def __init__(self, data_storage: DataStorage):
630
+ self.data_storage = data_storage
631
+ self._lock = asyncio.Lock()
632
+ logger.info("AnalyticsTracker initialized.")
633
+
634
+ async def track_event(self, event_name: str, properties: Optional[Dict[str, Any]] = None):
635
+ """Tracks a specific event with optional properties."""
636
+ timestamp = datetime.datetime.now().isoformat()
637
+ event_data = {"event": event_name, "timestamp": timestamp, "properties": properties or {}}
638
+
639
+ async with self._lock:
640
+ event_key = f"analytics_event_{timestamp}_{uuid.uuid4().hex[:8]}"
641
+ await self.data_storage.set(event_key, event_data)
642
+ logger.info(f"Tracked event: '{event_name}' with properties: {properties}")
643
+
644
+ async def get_events_by_name(self, event_name: str) -> List[Dict[str, Any]]:
645
+ """Retrieves all tracked events with a specific name."""
646
+ all_data = await self.data_storage.get_all_data()
647
+ matching_events = []
648
+ for key, value in all_data.items():
649
+ if key.startswith("analytics_event_") and isinstance(value, dict) and value.get("event") == event_name:
650
+ matching_events.append(value)
651
+ logger.debug(f"Retrieved {len(matching_events)} events for '{event_name}'.")
652
+ return matching_events
653
+
654
+ async def get_all_events(self) -> List[Dict[str, Any]]:
655
+ """Retrieves all tracked events."""
656
+ all_data = await self.data_storage.get_all_data()
657
+ all_events = [value for key, value in all_data.items() if key.startswith("analytics_event_") and isinstance(value, dict)]
658
+ logger.debug(f"Retrieved a total of {len(all_events)} analytics events.")
659
+ return all_events
660
+
661
+ class NotificationService:
662
+ """Handles sending notifications to users (e.g., push notifications, emails)."""
663
+ def __init__(self, bot_instance):
664
+ self.bot = bot_instance
665
+ self._lock = asyncio.Lock()
666
+ logger.info("NotificationService initialized.")
667
+
668
+ async def send_notification(self, user_id: str, message: str):
669
+ """Sends a notification to a specific user."""
670
+ try:
671
+
672
+
673
+
674
+ await self.bot.send_message(user_id, f"Notification: {message}")
675
+ logger.info(f"Notification sent to user {user_id}: '{message}'.")
676
+ except Exception as e:
677
+ logger.error(f"Failed to send notification to user {user_id}: {e}", exc_info=True)
678
+
679
+ async def broadcast_message(self, user_ids: List[str], message: str):
680
+ """Sends a message to multiple users."""
681
+ async def send_to_user(uid):
682
+ await self.send_notification(uid, message)
683
+
684
+ tasks = [send_to_user(uid) for uid in user_ids]
685
+ await asyncio.gather(*tasks, return_exceptions=True)
686
+ logger.info(f"Broadcasted message to {len(user_ids)} users.")
687
+
688
+ class ConfigurationManager:
689
+ """Loads and manages application configuration from files or environment variables."""
690
+ def __init__(self, config_file: str = "config.json"):
691
+ self.config_file = config_file
692
+ self.config: Dict[str, Any] = {}
693
+ self._lock = asyncio.Lock()
694
+ self._load_config()
695
+ logger.info("ConfigurationManager initialized.")
696
+
697
+ def _load_config(self):
698
+ """Loads configuration from a JSON file."""
699
+ try:
700
+ with open(self.config_file, "r", encoding="utf-8") as f:
701
+ self.config = json.load(f)
702
+ logger.info(f"Configuration loaded from '{self.config_file}'.")
703
+ except FileNotFoundError:
704
+ logger.warning(f"Configuration file '{self.config_file}' not found. Using empty configuration.")
705
+ self.config = {}
706
+ except json.JSONDecodeError:
707
+ logger.error(f"Error decoding JSON from configuration file '{self.config_file}'.")
708
+ self.config = {}
709
+ except IOError as e:
710
+ logger.error(f"Error reading configuration file '{self.config_file}': {e}")
711
+ self.config = {}
712
+
713
+ async def get(self, key: str, default: Optional[Any] = None) -> Any:
714
+ """Gets a configuration value by key."""
715
+ async with self._lock:
716
+ return self.config.get(key, default)
717
+
718
+ async def set(self, key: str, value: Any):
719
+ """Sets a configuration value and saves it to the file."""
720
+ async with self._lock:
721
+ self.config[key] = value
722
+ await self._async_save_config()
723
+ logger.info(f"Configuration updated for key '{key}'.")
724
+
725
+ async def _async_save_config(self):
726
+ """Saves the current configuration to the file asynchronously."""
727
+ loop = asyncio.get_running_loop()
728
+ await loop.run_in_executor(None, self._save_config)
729
+
730
+ def _save_config(self):
731
+ """Saves the current configuration to the file synchronously."""
732
+ try:
733
+ with open(self.config_file, "w", encoding="utf-8") as f:
734
+ json.dump(self.config, f, ensure_ascii=False, indent=2)
735
+ logger.debug(f"Configuration saved to '{self.config_file}'.")
736
+ except IOError as e:
737
+ logger.error(f"Failed to save configuration to '{self.config_file}': {e}")
738
+
739
+ class TaskScheduler:
740
+ """A more robust scheduler with capabilities for recurring tasks and monitoring."""
741
+ def __init__(self):
742
+ self.scheduled_tasks: Dict[str, asyncio.Task] = {}
743
+ self._task_id_counter = 0
744
+ self._lock = asyncio.Lock()
745
+ logger.info("TaskScheduler initialized.")
746
+
747
+ async def _generate_task_id(self) -> str:
748
+ """Generates a unique task ID."""
749
+ async with self._lock:
750
+ self._task_id_counter += 1
751
+ return f"task_{self._task_id_counter}"
752
+
753
+ async def schedule_once(self, delay_seconds: int, func: Callable[..., Awaitable[Any]], *args, **kwargs) -> str:
754
+ """Schedules a task to run once after a delay and returns its ID."""
755
+ task_id = await self._generate_task_id()
756
+
757
+ async def task_wrapper():
758
+ await asyncio.sleep(delay_seconds)
759
+ try:
760
+ logger.info(f"Executing one-time task '{func.__name__}' (ID: {task_id}) after {delay_seconds}s.")
761
+ await func(*args, **kwargs)
762
+ except asyncio.CancelledError:
763
+ logger.info(f"One-time task '{func.__name__}' (ID: {task_id}) was cancelled.")
764
+ except Exception as e:
765
+ logger.error(f"Error in one-time task '{func.__name__}' (ID: {task_id}): {e}", exc_info=True)
766
+ finally:
767
+ await self.cancel_task(task_id)
768
+
769
+ task = asyncio.create_task(task_wrapper(), name=f"OneTimeTask_{func.__name__}")
770
+ async with self._lock:
771
+ self.scheduled_tasks[task_id] = task
772
+ logger.info(f"Task '{func.__name__}' (ID: {task_id}) scheduled to run once in {delay_seconds} seconds.")
773
+ return task_id
774
+
775
+ async def schedule_recurring(self, interval_seconds: int, func: Callable[..., Awaitable[Any]], *args, **kwargs) -> str:
776
+ """Schedules a task to run recurringly at a given interval and returns its ID."""
777
+ task_id = await self._generate_task_id()
778
+
779
+ async def task_wrapper():
780
+ while True:
781
+ try:
782
+ await asyncio.sleep(interval_seconds)
783
+ logger.info(f"Executing recurring task '{func.__name__}' (ID: {task_id}) every {interval_seconds}s.")
784
+ await func(*args, **kwargs)
785
+ except asyncio.CancelledError:
786
+ logger.info(f"Recurring task '{func.__name__}' (ID: {task_id}) was cancelled.")
787
+ break
788
+ except Exception as e:
789
+ logger.error(f"Error in recurring task '{func.__name__}' (ID: {task_id}): {e}", exc_info=True)
790
+
791
+
792
+ task = asyncio.create_task(task_wrapper(), name=f"RecurringTask_{func.__name__}")
793
+ async with self._lock:
794
+ self.scheduled_tasks[task_id] = task
795
+ logger.info(f"Task '{func.__name__}' (ID: {task_id}) scheduled to run every {interval_seconds} seconds.")
796
+ return task_id
797
+
798
+ async def cancel_task(self, task_id: str) -> bool:
799
+ """Cancels a scheduled task by its ID."""
800
+ async with self._lock:
801
+ if task_id in self.scheduled_tasks:
802
+ task = self.scheduled_tasks[task_id]
803
+ task.cancel()
804
+ try:
805
+ await task
806
+ except asyncio.CancelledError:
807
+ pass
808
+ del self.scheduled_tasks[task_id]
809
+ logger.info(f"Task with ID '{task_id}' cancelled.")
810
+ return True
811
+ logger.warning(f"Task with ID '{task_id}' not found for cancellation.")
812
+ return False
813
+
814
+ async def get_task_status(self, task_id: str) -> str:
815
+ """Gets the status of a scheduled task."""
816
+ async with self._lock:
817
+ if task_id in self.scheduled_tasks:
818
+ task = self.scheduled_tasks[task_id]
819
+ if task.done():
820
+ return "completed" if not task.cancelled() else "cancelled"
821
+ return "running"
822
+ return "not_found"
823
+
824
+ async def get_all_task_ids(self) -> List[str]:
825
+ """Returns a list of all scheduled task IDs."""
826
+ async with self._lock:
827
+ return list(self.scheduled_tasks.keys())
828
+
829
+ class TokenBucket:
830
+ """Implements a token bucket for rate limiting."""
831
+ def __init__(self, capacity: int, fill_rate: float):
832
+ self.capacity = capacity
833
+ self.fill_rate = fill_rate
834
+ self.tokens = capacity
835
+ self.last_refill_time = datetime.datetime.now()
836
+ self._lock = asyncio.Lock()
837
+ logger.info(f"TokenBucket initialized with capacity={capacity}, fill_rate={fill_rate}/s.")
838
+
839
+ async def _refill_tokens(self):
840
+ """Refills tokens based on the time elapsed since last refill."""
841
+ now = datetime.datetime.now()
842
+ time_elapsed = (now - self.last_refill_time).total_seconds()
843
+ tokens_to_add = time_elapsed * self.fill_rate
844
+ self.tokens = min(self.capacity, self.tokens + tokens_to_add)
845
+ self.last_refill_time = now
846
+
847
+ async def consume(self, tokens_needed: float = 1.0) -> bool:
848
+ """Consumes tokens from the bucket. Returns True if successful, False otherwise."""
849
+ async with self._lock:
850
+ await self._refill_tokens()
851
+ if self.tokens >= tokens_needed:
852
+ self.tokens -= tokens_needed
853
+ logger.debug(f"Consumed {tokens_needed} tokens. Remaining: {self.tokens:.2f}")
854
+ return True
855
+ else:
856
+ logger.warning(f"Not enough tokens ({self.tokens:.2f}) to consume {tokens_needed}.")
857
+ return False
858
+
859
+ class AwaitableQueue:
860
+ """A queue that allows items to be put and retrieved, with ability to signal completion."""
861
+ def __init__(self):
862
+ self.queue: asyncio.Queue[Any] = asyncio.Queue()
863
+ self.completion_signals: Dict[Any, asyncio.Event] = {}
864
+ self._lock = asyncio.Lock()
865
+ logger.info("AwaitableQueue initialized.")
866
+
867
+ async def put(self, item: Any, completion_event: Optional[asyncio.Event] = None):
868
+ """Puts an item into the queue. If completion_event is provided, it will be set when the item is retrieved."""
869
+ await self.queue.put(item)
870
+ if completion_event:
871
+ async with self._lock:
872
+ self.completion_signals[item] = completion_event
873
+ logger.debug(f"Item put into AwaitableQueue.")
874
+
875
+ async def get(self) -> Any:
876
+ """Retrieves an item from the queue."""
877
+ item = await self.queue.get()
878
+ completion_event = None
879
+ async with self._lock:
880
+ if item in self.completion_signals:
881
+ completion_event = self.completion_signals.pop(item)
882
+
883
+ if completion_event:
884
+ completion_event.set()
885
+ logger.debug(f"Item retrieved from AwaitableQueue and completion signal set.")
886
+ else:
887
+ logger.debug(f"Item retrieved from AwaitableQueue.")
888
+ return item
889
+
890
+ def task_done(self):
891
+ """Indicates that a formerly enqueued task is complete."""
892
+ self.queue.task_done()
893
+
894
+ async def join(self):
895
+ """Blocks until all items in the queue have been gotten and processed."""
896
+ await self.queue.join()
897
+
898
+ class DataValidator:
899
+ """Validates data against predefined schemas or rules."""
900
+ def __init__(self):
901
+ self.schemas: Dict[str, Dict] = {}
902
+ logger.info("DataValidator initialized.")
903
+
904
+ def register_schema(self, schema_name: str, schema: Dict):
905
+ """Registers a validation schema."""
906
+ self.schemas[schema_name] = schema
907
+ logger.info(f"Schema '{schema_name}' registered.")
908
+
909
+ async def validate(self, data: Any, schema_name: str) -> bool:
910
+ """Validates data against a registered schema."""
911
+ if schema_name not in self.schemas:
912
+ logger.error(f"Schema '{schema_name}' not found for validation.")
913
+ raise ValueError(f"Schema '{schema_name}' not found.")
914
+
915
+ schema = self.schemas[schema_name]
916
+
917
+
918
+ is_valid = True
919
+ if isinstance(schema, dict) and isinstance(data, dict):
920
+ for key, expected_type in schema.items():
921
+ if key not in data:
922
+ is_valid = False
923
+ logger.warning(f"Validation failed: Key '{key}' missing in data.")
924
+ break
925
+ if not isinstance(data[key], expected_type):
926
+ is_valid = False
927
+ logger.warning(f"Validation failed: Type mismatch for key '{key}'. Expected {expected_type}, got {type(data[key])}.")
928
+ break
929
+ else:
930
+ is_valid = False
931
+ logger.warning("Validation failed: Schema or data is not in expected dictionary format.")
932
+
933
+ logger.info(f"Data validation against schema '{schema_name}': {'Success' if is_valid else 'Failed'}.")
934
+ return is_valid
935
+
936
+ class IdempotencyManager:
937
+ """Ensures operations are performed only once by tracking request IDs."""
938
+ def __init__(self, storage: DataStorage, expiry_seconds: int = 600):
939
+ self.storage = storage
940
+ self.expiry_seconds = expiry_seconds
941
+ logger.info(f"IdempotencyManager initialized with expiry {expiry_seconds}s.")
942
+
943
+ async def is_unique(self, request_id: str) -> bool:
944
+ """Checks if a request ID has been seen before."""
945
+ seen_id = await self.storage.get(f"idempotency_{request_id}")
946
+ return seen_id is None
947
+
948
+ async def mark_as_processed(self, request_id: str):
949
+ """Marks a request ID as processed and sets an expiry."""
950
+ await self.storage.set(f"idempotency_{request_id}", True)
951
+
952
+
953
+
954
+
955
+
956
+ logger.info(f"Marked request ID '{request_id}' as processed.")
957
+
958
+ async def protect(self, request_id: str, func: Callable[..., Awaitable[Any]], *args, **kwargs):
959
+ """Executes a function only if the request ID is unique."""
960
+ if await self.is_unique(request_id):
961
+ result = await func(*args, **kwargs)
962
+ await self.mark_as_processed(request_id)
963
+ return result
964
+ else:
965
+ logger.warning(f"Request ID '{request_id}' is not unique. Operation skipped.")
966
+ return None
967
+
968
+ class FileDownloader:
969
+ """Handles downloading files from URLs."""
970
+ def __init__(self, download_dir: str = "downloads"):
971
+ self.download_dir = download_dir
972
+ os.makedirs(self.download_dir, exist_ok=True)
973
+ logger.info(f"FileDownloader initialized. Download directory: '{self.download_dir}'.")
974
+
975
+ async def download_file(self, url: str, filename: Optional[str] = None) -> str:
976
+ """Downloads a file from a URL to the specified directory."""
977
+ import aiohttp
978
+ import os
979
+
980
+ if filename is None:
981
+ filename = url.split('/')[-1]
982
+ filepath = os.path.join(self.download_dir, filename)
983
+
984
+ async with aiohttp.ClientSession() as session:
985
+ async with session.get(url) as response:
986
+ response.raise_for_status()
987
+ with open(filepath, 'wb') as f:
988
+ while True:
989
+ chunk = await response.content.read(1024)
990
+ if not chunk:
991
+ break
992
+ f.write(chunk)
993
+ logger.info(f"File downloaded from {url} to {filepath}.")
994
+ return filepath
995
+
996
+ class TaskDistributor:
997
+ """Distributes tasks to multiple workers (e.g., for parallel processing)."""
998
+ def __init__(self, num_workers: int):
999
+ self.num_workers = num_workers
1000
+ self.task_queue: asyncio.Queue[Tuple[Callable[..., Awaitable[Any]], Tuple, Dict[str, Any]]] = asyncio.Queue()
1001
+ self.workers: List[asyncio.Task] = []
1002
+ self._workers_running = False
1003
+ logger.info(f"TaskDistributor initialized with {num_workers} workers.")
1004
+
1005
+ async def _worker(self, worker_id: int):
1006
+ """The function that each worker runs."""
1007
+ logger.info(f"Worker {worker_id} started.")
1008
+ while True:
1009
+ try:
1010
+ func, args, kwargs = await self.task_queue.get()
1011
+ logger.debug(f"Worker {worker_id} picked up task: {func.__name__}.")
1012
+ await func(*args, **kwargs)
1013
+ self.task_queue.task_done()
1014
+ logger.debug(f"Worker {worker_id} finished task: {func.__name__}.")
1015
+ except asyncio.CancelledError:
1016
+ logger.info(f"Worker {worker_id} received cancellation signal.")
1017
+ break
1018
+ except Exception as e:
1019
+ logger.error(f"Worker {worker_id} encountered an error: {e}", exc_info=True)
1020
+
1021
+
1022
+ async def start(self):
1023
+ """Starts the worker processes."""
1024
+ if not self._workers_running:
1025
+ self.workers = [
1026
+ asyncio.create_task(self._worker(i))
1027
+ for i in range(self.num_workers)
1028
+ ]
1029
+ self._workers_running = True
1030
+ logger.info("TaskDistributor workers started.")
1031
+
1032
+ async def stop(self):
1033
+ """Stops all worker processes gracefully."""
1034
+ if self._workers_running:
1035
+ for worker in self.workers:
1036
+ worker.cancel()
1037
+ await asyncio.gather(*self.workers, return_exceptions=True)
1038
+ self._workers_running = False
1039
+ logger.info("TaskDistributor workers stopped.")
1040
+
1041
+ async def add_task(self, func: Callable[..., Awaitable[Any]], *args, **kwargs):
1042
+ """Adds a task to the queue for workers to process."""
1043
+ await self.task_queue.put((func, args, kwargs))
1044
+ logger.debug(f"Task '{func.__name__}' added to TaskDistributor queue.")
1045
+
1046
+ async def wait_completion(self):
1047
+ """Waits for all tasks currently in the queue to be processed."""
1048
+ await self.task_queue.join()
1049
+ logger.info("All tasks in TaskDistributor queue have been processed.")
1050
+
1051
+ class EventBus:
1052
+ """A simple publish-subscribe system."""
1053
+ def __init__(self):
1054
+ self._subscribers: Dict[str, List[Callable[..., Awaitable[None]]]] = defaultdict(list)
1055
+ self._lock = asyncio.Lock()
1056
+ logger.info("EventBus initialized.")
1057
+
1058
+ async def subscribe(self, event_name: str, handler: Callable[..., Awaitable[None]]):
1059
+ """Subscribes a handler to a specific event."""
1060
+ async with self._lock:
1061
+ self._subscribers[event_name].append(handler)
1062
+ logger.info(f"Handler '{handler.__name__}' subscribed to event '{event_name}'.")
1063
+
1064
+ async def unsubscribe(self, event_name: str, handler: Callable[..., Awaitable[None]]):
1065
+ """Unsubscribes a handler from an event."""
1066
+ async with self._lock:
1067
+ if event_name in self._subscribers and handler in self._subscribers[event_name]:
1068
+ self._subscribers[event_name].remove(handler)
1069
+ logger.info(f"Handler '{handler.__name__}' unsubscribed from event '{event_name}'.")
1070
+ else:
1071
+ logger.warning(f"Handler '{handler.__name__}' not found for event '{event_name}' during unsubscription.")
1072
+
1073
+ async def publish(self, event_name: str, *args, **kwargs):
1074
+ """Publishes an event, notifying all subscribed handlers."""
1075
+ if event_name not in self._subscribers:
1076
+ logger.debug(f"No subscribers for event '{event_name}'.")
1077
+ return
1078
+
1079
+ handlers = self._subscribers[event_name]
1080
+ logger.info(f"Publishing event '{event_name}' to {len(handlers)} subscribers.")
1081
+
1082
+ tasks = []
1083
+ for handler in handlers:
1084
+ tasks.append(asyncio.create_task(handler(*args, **kwargs)))
1085
+
1086
+ await asyncio.gather(*tasks, return_exceptions=True)
1087
+ logger.debug(f"Event '{event_name}' publishing completed.")
1088
+
1089
+ class FileScanner:
1090
+ """Scans directories for files based on patterns."""
1091
+ def __init__(self, base_dir: str):
1092
+ self.base_dir = base_dir
1093
+ os.makedirs(self.base_dir, exist_ok=True)
1094
+ logger.info(f"FileScanner initialized for directory: '{self.base_dir}'.")
1095
+
1096
+ async def find_files(self, pattern: str, recursive: bool = False) -> List[str]:
1097
+ """Finds files matching a pattern (glob-style)."""
1098
+ import glob
1099
+ search_path = os.path.join(self.base_dir, "**" if recursive else "", pattern)
1100
+ logger.info(f"Scanning for files with pattern '{pattern}' in '{self.base_dir}' (recursive: {recursive}).")
1101
+
1102
+
1103
+ loop = asyncio.get_running_loop()
1104
+ found_files = await loop.run_in_executor(None, glob.glob, search_path, recursive=recursive)
1105
+
1106
+ logger.info(f"Found {len(found_files)} files matching the pattern.")
1107
+ return found_files
1108
+
1109
+ async def read_file_content(self, filepath: str) -> Optional[str]:
1110
+ """Reads the content of a given file."""
1111
+ if not os.path.isfile(filepath):
1112
+ logger.warning(f"File not found for reading: {filepath}")
1113
+ return None
1114
+
1115
+ try:
1116
+ loop = asyncio.get_running_loop()
1117
+ with open(filepath, "r", encoding="utf-8") as f:
1118
+ content = await loop.run_in_executor(None, f.read)
1119
+ logger.debug(f"Read content of file: {filepath}.")
1120
+ return content
1121
+ except IOError as e:
1122
+ logger.error(f"Error reading file '{filepath}': {e}", exc_info=True)
1123
+ return None
1124
+
1125
+ class WorkerPool:
1126
+ """Manages a pool of worker coroutines that process tasks from a queue."""
1127
+ def __init__(self, num_workers: int, task_queue: asyncio.Queue):
1128
+ self.num_workers = num_workers
1129
+ self.task_queue = task_queue
1130
+ self.workers: List[asyncio.Task] = []
1131
+ self._is_running = False
1132
+ logger.info(f"WorkerPool initialized with {num_workers} workers.")
1133
+
1134
+ async def _worker_coro(self, worker_id: int):
1135
+ """The coroutine each worker runs."""
1136
+ logger.info(f"Worker {worker_id} started.")
1137
+ while True:
1138
+ try:
1139
+ task_payload = await self.task_queue.get()
1140
+ logger.debug(f"Worker {worker_id} got task from queue.")
1141
+
1142
+ if callable(task_payload):
1143
+ await task_payload()
1144
+ else:
1145
+ logger.warning(f"Worker {worker_id} received non-callable task payload: {type(task_payload)}.")
1146
+ self.task_queue.task_done()
1147
+ except asyncio.CancelledError:
1148
+ logger.info(f"Worker {worker_id} cancelled.")
1149
+ break
1150
+ except Exception as e:
1151
+ logger.error(f"Worker {worker_id} error: {e}", exc_info=True)
1152
+
1153
+
1154
+ async def start(self):
1155
+ """Starts the worker pool."""
1156
+ if not self._is_running:
1157
+ self.workers = [asyncio.create_task(self._worker_coro(i)) for i in range(self.num_workers)]
1158
+ self._is_running = True
1159
+ logger.info("WorkerPool started.")
1160
+
1161
+ async def stop(self):
1162
+ """Stops the worker pool gracefully."""
1163
+ if self._is_running:
1164
+ for worker in self.workers:
1165
+ worker.cancel()
1166
+ await asyncio.gather(*self.workers, return_exceptions=True)
1167
+ self._is_running = False
1168
+ logger.info("WorkerPool stopped.")
1169
+
1170
+ class DistributedLock:
1171
+ """A simple distributed lock implementation (requires a shared backend like Redis or DataStorage)."""
1172
+ def __init__(self, storage: DataStorage, lock_name: str, timeout: int = 10):
1173
+ self.storage = storage
1174
+ self.lock_name = f"lock_{lock_name}"
1175
+ self.timeout = timeout
1176
+ self._lock = asyncio.Lock()
1177
+ logger.info(f"DistributedLock initialized for '{lock_name}' with timeout {timeout}s.")
1178
+
1179
+ async def acquire(self, blocking: bool = True, timeout: Optional[float] = None) -> bool:
1180
+ """Acquires the lock. Returns True if acquired, False otherwise."""
1181
+ effective_timeout = timeout if timeout is not None else self.timeout
1182
+
1183
+ async with self._lock:
1184
+ start_time = datetime.datetime.now()
1185
+ while True:
1186
+ acquired = await self.storage.set(self.lock_name, "locked", nx=True, px=int(effective_timeout * 1000))
1187
+ if acquired:
1188
+ logger.debug(f"Lock '{self.lock_name}' acquired.")
1189
+ return True
1190
+
1191
+ if not blocking:
1192
+ logger.debug(f"Lock '{self.lock_name}' could not be acquired (non-blocking).")
1193
+ return False
1194
+
1195
+
1196
+ if (datetime.datetime.now() - start_time).total_seconds() > effective_timeout:
1197
+ logger.warning(f"Timeout while trying to acquire lock '{self.lock_name}'.")
1198
+ return False
1199
+
1200
+ await asyncio.sleep(0.1)
1201
+
1202
+ async def release(self) -> bool:
1203
+ """Releases the lock."""
1204
+ async with self._lock:
1205
+
1206
+
1207
+
1208
+ await self.storage.delete(self.lock_name)
1209
+ logger.debug(f"Lock '{self.lock_name}' released.")
1210
+ return True
1211
+
1212
+ async def __aenter__(self):
1213
+ """Context manager entry point for acquiring the lock."""
1214
+ if not await self.acquire():
1215
+ raise RuntimeError(f"Failed to acquire lock '{self.lock_name}'.")
1216
+ return self
1217
+
1218
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
1219
+ """Context manager exit point for releasing the lock."""
1220
+ await self.release()
1221
+
1222
+ class ModelRegistry:
1223
+ """Manages a registry of machine learning models."""
1224
+ def __init__(self, storage: DataStorage):
1225
+ self.storage = storage
1226
+ self._registry: Dict[str, Any] = {}
1227
+ self._loaded_models: Dict[str, Any] = {}
1228
+ self._lock = asyncio.Lock()
1229
+ logger.info("ModelRegistry initialized.")
1230
+
1231
+ async def register_model(self, model_name: str, model_path_or_instance: Any):
1232
+ """Registers a model by name."""
1233
+ async with self._lock:
1234
+ self._registry[model_name] = model_path_or_instance
1235
+
1236
+ if not isinstance(model_path_or_instance, str):
1237
+ self._loaded_models[model_name] = model_path_or_instance
1238
+ logger.info(f"Model '{model_name}' registered.")
1239
+
1240
+ async def load_model(self, model_name: str) -> Optional[Any]:
1241
+ """Loads a model by name. Caches loaded models."""
1242
+ async with self._lock:
1243
+ if model_name in self._loaded_models:
1244
+ logger.debug(f"Returning cached loaded model: '{model_name}'.")
1245
+ return self._loaded_models[model_name]
1246
+
1247
+ if model_name not in self._registry:
1248
+ logger.error(f"Model '{model_name}' not found in registry.")
1249
+ return None
1250
+
1251
+ model_source = self._registry[model_name]
1252
+
1253
+
1254
+
1255
+
1256
+ if isinstance(model_source, str):
1257
+ try:
1258
+
1259
+
1260
+ loaded_model = f"Loaded_model_from_{model_source}"
1261
+ self._loaded_models[model_name] = loaded_model
1262
+ logger.info(f"Model '{model_name}' loaded from '{model_source}'.")
1263
+ return loaded_model
1264
+ except Exception as e:
1265
+ logger.error(f"Failed to load model '{model_name}' from '{model_source}': {e}", exc_info=True)
1266
+ return None
1267
+ else:
1268
+ self._loaded_models[model_name] = model_source
1269
+ logger.info(f"Model '{model_name}' is an instance and already loaded.")
1270
+ return model_source
1271
+
1272
+ async def predict(self, model_name: str, data: Any) -> Any:
1273
+ """Makes a prediction using a loaded model."""
1274
+ model = await self.load_model(model_name)
1275
+ if model:
1276
+
1277
+ prediction = f"Prediction for {model_name} with data {data}"
1278
+ logger.debug(f"Prediction made for model '{model_name}'.")
1279
+ return prediction
1280
+ return None
1281
+
1282
+ class MetricsReporter:
1283
+ """Collects and reports application metrics."""
1284
+ def __init__(self, storage: DataStorage):
1285
+ self.storage = storage
1286
+ self._metrics: Dict[str, float] = {}
1287
+ self._lock = asyncio.Lock()
1288
+ logger.info("MetricsReporter initialized.")
1289
+
1290
+ async def increment_counter(self, metric_name: str, value: float = 1.0):
1291
+ """Increments a counter metric."""
1292
+ async with self._lock:
1293
+ self._metrics[metric_name] = self._metrics.get(metric_name, 0.0) + value
1294
+ logger.debug(f"Metric '{metric_name}' incremented by {value}. New value: {self._metrics[metric_name]}.")
1295
+
1296
+ async def set_gauge(self, metric_name: str, value: float):
1297
+ """Sets a gauge metric to a specific value."""
1298
+ async with self._lock:
1299
+ self._metrics[metric_name] = value
1300
+ logger.debug(f"Metric '{metric_name}' set to {value}.")
1301
+
1302
+ async def get_metrics(self) -> Dict[str, float]:
1303
+ """Returns a snapshot of all current metrics."""
1304
+ async with self._lock:
1305
+ return self._metrics.copy()
1306
+
1307
+ async def report_metrics(self):
1308
+ """Saves the current metrics to storage (e.g., for later analysis)."""
1309
+ current_metrics = await self.get_metrics()
1310
+ timestamp = datetime.datetime.now().isoformat()
1311
+ await self.storage.set(f"metrics_report_{timestamp}", current_metrics)
1312
+ logger.info(f"Metrics reported at {timestamp}.")
1313
+
1314
+ class StateMachine:
1315
+ """Manages complex state transitions."""
1316
+ def __init__(self, initial_state: str):
1317
+ self.current_state = initial_state
1318
+ self.transitions: Dict[Tuple[str, str], Callable[..., Awaitable[None]]] = {}
1319
+ self._lock = asyncio.Lock()
1320
+ logger.info(f"StateMachine initialized with initial state: '{initial_state}'.")
1321
+
1322
+ def add_transition(self, from_state: str, to_state: str, action: Callable[..., Awaitable[None]]):
1323
+ """Adds a transition rule from one state to another, with an optional action."""
1324
+ self.transitions[(from_state, to_state)] = action
1325
+ logger.info(f"Transition added: '{from_state}' -> '{to_state}' with action '{action.__name__}'.")
1326
+
1327
+ async def transition(self, event: str, *args, **kwargs) -> bool:
1328
+ """Triggers a state transition based on an event."""
1329
+ async with self._lock:
1330
+ potential_next_state = None
1331
+ action = None
1332
+
1333
+
1334
+
1335
+
1336
+
1337
+
1338
+
1339
+
1340
+
1341
+
1342
+
1343
+
1344
+
1345
+
1346
+
1347
+
1348
+
1349
+
1350
+
1351
+
1352
+
1353
+
1354
+
1355
+
1356
+
1357
+ possible_transitions = [(fs, ts) for fs, ts in self.transitions.keys() if fs == self.current_state]
1358
+
1359
+ found_transition = False
1360
+ for fs, ts in possible_transitions:
1361
+
1362
+
1363
+ if event == ts:
1364
+ potential_next_state = ts
1365
+ action = self.transitions[(fs, ts)]
1366
+ found_transition = True
1367
+ break
1368
+
1369
+ if found_transition:
1370
+ logger.info(f"Transitioning from '{self.current_state}' to '{potential_next_state}' on event '{event}'.")
1371
+ if action:
1372
+ await action(*args, **kwargs)
1373
+ self.current_state = potential_next_state
1374
+ logger.info(f"Current state is now: '{self.current_state}'.")
1375
+ return True
1376
+ else:
1377
+ logger.warning(f"No valid transition found from state '{self.current_state}' on event '{event}'.")
1378
+ return False
1379
+
1380
+ async def get_state(self) -> str:
1381
+ """Gets the current state."""
1382
+ async with self._lock:
1383
+ return self.current_state
1384
+
1385
+ class RoleBasedAccessControl:
1386
+ """Manages user roles and permissions."""
1387
+ def __init__(self, storage: DataStorage):
1388
+ self.storage = storage
1389
+ self._roles: Dict[str, List[str]] = {}
1390
+ self._permissions: Dict[str, List[str]] = {}
1391
+ self._lock = asyncio.Lock()
1392
+ logger.info("RoleBasedAccessControl initialized.")
1393
+
1394
+ async def add_role_to_user(self, user_id: str, role: str):
1395
+ """Assigns a role to a user."""
1396
+ async with self._lock:
1397
+ if user_id not in self._roles:
1398
+ self._roles[user_id] = []
1399
+ if role not in self._roles[user_id]:
1400
+ self._roles[user_id].append(role)
1401
+ await self._save_roles()
1402
+ logger.info(f"Role '{role}' assigned to user {user_id}.")
1403
+
1404
+ async def remove_role_from_user(self, user_id: str, role: str):
1405
+ """Removes a role from a user."""
1406
+ async with self._lock:
1407
+ if user_id in self._roles and role in self._roles[user_id]:
1408
+ self._roles[user_id].remove(role)
1409
+ await self._save_roles()
1410
+ logger.info(f"Role '{role}' removed from user {user_id}.")
1411
+
1412
+ async def define_role_permissions(self, role: str, permissions: List[str]):
1413
+ """Defines the permissions for a specific role."""
1414
+ async with self._lock:
1415
+ self._permissions[role] = permissions
1416
+ await self._save_permissions()
1417
+ logger.info(f"Permissions defined for role '{role}': {permissions}.")
1418
+
1419
+ async def user_has_permission(self, user_id: str, required_permission: str) -> bool:
1420
+ """Checks if a user has a specific permission."""
1421
+ async with self._lock:
1422
+ user_roles = self._roles.get(user_id, [])
1423
+ for role in user_roles:
1424
+ role_permissions = self._permissions.get(role, [])
1425
+ if required_permission in role_permissions:
1426
+ logger.debug(f"User {user_id} has permission '{required_permission}' via role '{role}'.")
1427
+ return True
1428
+ logger.debug(f"User {user_id} does NOT have permission '{required_permission}'.")
1429
+ return False
1430
+
1431
+ async def _save_roles(self):
1432
+ """Saves the current role assignments."""
1433
+ await self.storage.set("rbac_roles", self._roles)
1434
+
1435
+ async def _save_permissions(self):
1436
+ """Saves the current role permissions."""
1437
+ await self.storage.set("rbac_permissions", self._permissions)
1438
+
1439
+ async def load_from_storage(self):
1440
+ """Loads roles and permissions from storage."""
1441
+ async with self._lock:
1442
+ roles_data = await self.storage.get("rbac_roles")
1443
+ if roles_data:
1444
+ self._roles = roles_data
1445
+ logger.info("RBAC roles loaded from storage.")
1446
+ perms_data = await self.storage.get("rbac_permissions")
1447
+ if perms_data:
1448
+ self._permissions = perms_data
1449
+ logger.info("RBAC permissions loaded from storage.")
1450
+
1451
+
1452
+ class MockMessage:
1453
+ def __init__(self, text: str = "", user_id: str = "test_user", chat_id: str = "test_chat"):
1454
+ self.text = text
1455
+ self.from_user = type('User', (object,), {'id': user_id})()
1456
+ self.chat = type('Chat', (object,), {'id': chat_id})()
1457
+
1458
+ async def reply(self, text: str):
1459
+ print(f"Mock Reply to {self.from_user.id}: {text}")
1460
+ await asyncio.sleep(0.01)
1461
+