matplobbot-shared 0.1.19__py3-none-any.whl → 0.1.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of matplobbot-shared might be problematic. Click here for more details.

@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matplobbot-shared
3
- Version: 0.1.19
3
+ Version: 0.1.22
4
4
  Summary: Shared library for the Matplobbot ecosystem (database, services, i18n).
5
5
  Author: Ackrome
6
6
  Author-email: ivansergeyevich@gmail.com
7
7
  Requires-Python: >=3.11
8
- Requires-Dist: asyncpg>=0.28.0
9
- Requires-Dist: aiohttp>=3.9.0
10
- Requires-Dist: certifi>=2023.7.22
8
+ Requires-Dist: asyncpg
9
+ Requires-Dist: aiohttp
10
+ Requires-Dist: certifi
11
11
  Dynamic: author
12
12
  Dynamic: author-email
13
13
  Dynamic: requires-dist
@@ -0,0 +1,10 @@
1
+ shared_lib/__init__.py,sha256=Wxuw1wbvCOsKqs6WfIQ05cx_vndhPs6rH2krMatFRqA,45
2
+ shared_lib/database.py,sha256=ZAsi33HNxc3exhTCMgqvaEoqDqLq28k4c1H5GFGGcY4,13066
3
+ shared_lib/i18n.py,sha256=VBWQWVF-k_HDiidYo_RUPyUCM7oL897z5hOw9jvOoYY,1762
4
+ shared_lib/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ shared_lib/services/schedule_service.py,sha256=tKXgwZQWeaagQeqG_tKP2gLflWo_6kxKZOhW9PFvbK4,1835
6
+ shared_lib/services/university_api.py,sha256=Ui-zjfKOHCCf2Imh8CNtVOWegwuep7IB8gO9IKNUrrE,1898
7
+ matplobbot_shared-0.1.22.dist-info/METADATA,sha256=Us0ghoLjukX3b5e0ng0zCKnmJ5N2zC2Ffm4G6JmQSao,395
8
+ matplobbot_shared-0.1.22.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
9
+ matplobbot_shared-0.1.22.dist-info/top_level.txt,sha256=L8mrC50YWCe19jmh_zrUZFvXSkhsnES5K6y027G1838,11
10
+ matplobbot_shared-0.1.22.dist-info/RECORD,,
@@ -0,0 +1 @@
1
+ shared_lib
shared_lib/__init__.py ADDED
@@ -0,0 +1 @@
1
+ # This file makes shared_lib a Python package
shared_lib/database.py ADDED
@@ -0,0 +1,285 @@
1
+ import asyncpg
2
+ import datetime
3
+ import logging
4
+ import json
5
+ import os
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ # --- PostgreSQL Database Configuration ---
10
+ POSTGRES_USER = os.getenv("POSTGRES_USER", "user")
11
+ POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", "password")
12
+ POSTGRES_HOST = os.getenv("POSTGRES_HOST", "localhost")
13
+ POSTGRES_PORT = os.getenv("POSTGRES_PORT", "5432")
14
+ POSTGRES_DB = os.getenv("POSTGRES_DB", "matplobbot_db")
15
+
16
+ DATABASE_URL = f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
17
+
18
+ # Global connection pool
19
+ pool = None
20
+
21
+ async def init_db_pool():
22
+ global pool
23
+ if pool is None:
24
+ try:
25
+ pool = await asyncpg.create_pool(DATABASE_URL, min_size=5, max_size=20)
26
+ logger.info("Shared DB Pool: Database connection pool created successfully.")
27
+ except Exception as e:
28
+ logger.error(f"Failed to create database connection pool: {e}", exc_info=True)
29
+ raise
30
+
31
+ async def close_db_pool():
32
+ global pool
33
+ if pool:
34
+ await pool.close()
35
+ logger.info("Shared DB Pool: Database connection pool closed.")
36
+
37
+ def get_db_connection_obj():
38
+ if pool is None:
39
+ # In FastAPI context, this would be an HTTPException
40
+ raise ConnectionError("Database connection pool is not initialized.")
41
+ return pool.acquire()
42
+
43
+ # --- User Settings Defaults ---
44
+ DEFAULT_SETTINGS = {
45
+ 'show_docstring': True,
46
+ 'latex_padding': 15,
47
+ 'md_display_mode': 'md_file',
48
+ 'latex_dpi': 300,
49
+ 'language': 'en',
50
+ }
51
+
52
+ async def init_db():
53
+ """Initializes the database and creates tables if they don't exist."""
54
+ if pool is None:
55
+ await init_db_pool()
56
+
57
+ async with pool.acquire() as connection:
58
+ async with connection.transaction():
59
+ await connection.execute('''
60
+ CREATE TABLE IF NOT EXISTS users (
61
+ user_id BIGINT PRIMARY KEY,
62
+ username TEXT,
63
+ full_name TEXT NOT NULL,
64
+ avatar_pic_url TEXT,
65
+ settings JSONB DEFAULT '{}'::jsonb,
66
+ onboarding_completed BOOLEAN DEFAULT FALSE
67
+ )
68
+ ''')
69
+ await connection.execute('''
70
+ CREATE TABLE IF NOT EXISTS user_actions (
71
+ id SERIAL PRIMARY KEY,
72
+ user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
73
+ action_type TEXT NOT NULL,
74
+ action_details TEXT,
75
+ timestamp TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
76
+ )
77
+ ''')
78
+ await connection.execute('''
79
+ CREATE TABLE IF NOT EXISTS user_favorites (
80
+ id SERIAL PRIMARY KEY,
81
+ user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
82
+ code_path TEXT NOT NULL,
83
+ UNIQUE(user_id, code_path)
84
+ )
85
+ ''')
86
+ await connection.execute('''
87
+ CREATE TABLE IF NOT EXISTS latex_cache (
88
+ formula_hash TEXT PRIMARY KEY,
89
+ image_url TEXT NOT NULL,
90
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
91
+ )
92
+ ''')
93
+ await connection.execute('''
94
+ CREATE TABLE IF NOT EXISTS user_github_repos (
95
+ id SERIAL PRIMARY KEY,
96
+ user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
97
+ repo_path TEXT NOT NULL,
98
+ UNIQUE(user_id, repo_path)
99
+ )
100
+ ''')
101
+ await connection.execute('''
102
+ CREATE TABLE IF NOT EXISTS user_schedule_subscriptions (
103
+ id SERIAL PRIMARY KEY,
104
+ user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
105
+ entity_type TEXT NOT NULL,
106
+ entity_id TEXT NOT NULL,
107
+ entity_name TEXT NOT NULL,
108
+ notification_time TIME NOT NULL,
109
+ is_active BOOLEAN DEFAULT TRUE,
110
+ UNIQUE(user_id, entity_type, entity_id)
111
+ )
112
+ ''')
113
+ logger.info("Database tables initialized.")
114
+
115
+ async def log_user_action(user_id: int, username: str | None, full_name: str, avatar_pic_url: str | None, action_type: str, action_details: str | None):
116
+ async with pool.acquire() as connection:
117
+ try:
118
+ await connection.execute('''
119
+ INSERT INTO users (user_id, username, full_name, avatar_pic_url)
120
+ VALUES ($1, $2, $3, $4)
121
+ ON CONFLICT(user_id) DO UPDATE SET
122
+ username = EXCLUDED.username,
123
+ full_name = EXCLUDED.full_name,
124
+ avatar_pic_url = EXCLUDED.avatar_pic_url;
125
+ ''', user_id, username, full_name, avatar_pic_url)
126
+ await connection.execute('''
127
+ INSERT INTO user_actions (user_id, action_type, action_details)
128
+ VALUES ($1, $2, $3);
129
+ ''', user_id, action_type, action_details)
130
+ except Exception as e:
131
+ logger.error(f"Error logging user action to DB: {e}", exc_info=True)
132
+
133
+ async def get_user_settings(user_id: int) -> dict:
134
+ async with pool.acquire() as connection:
135
+ settings_json = await connection.fetchval("SELECT settings FROM users WHERE user_id = $1", user_id)
136
+ db_settings = json.loads(settings_json) if settings_json else {}
137
+ merged_settings = DEFAULT_SETTINGS.copy()
138
+ merged_settings.update(db_settings)
139
+ return merged_settings
140
+
141
+ async def update_user_settings_db(user_id: int, settings: dict):
142
+ async with pool.acquire() as connection:
143
+ await connection.execute("UPDATE users SET settings = $1 WHERE user_id = $2", json.dumps(settings), user_id)
144
+
145
+ # --- Favorites ---
146
+ async def add_favorite(user_id: int, code_path: str):
147
+ async with pool.acquire() as connection:
148
+ try:
149
+ await connection.execute("INSERT INTO user_favorites (user_id, code_path) VALUES ($1, $2)", user_id, code_path)
150
+ return True
151
+ except asyncpg.UniqueViolationError:
152
+ return False
153
+
154
+ async def remove_favorite(user_id: int, code_path: str):
155
+ async with pool.acquire() as connection:
156
+ await connection.execute("DELETE FROM user_favorites WHERE user_id = $1 AND code_path = $2", user_id, code_path)
157
+
158
+ async def get_favorites(user_id: int) -> list:
159
+ async with pool.acquire() as connection:
160
+ rows = await connection.fetch("SELECT code_path FROM user_favorites WHERE user_id = $1", user_id)
161
+ return [row['code_path'] for row in rows]
162
+
163
+ # --- LaTeX Cache ---
164
+ async def clear_latex_cache():
165
+ async with pool.acquire() as connection:
166
+ await connection.execute("TRUNCATE TABLE latex_cache")
167
+
168
+ # --- GitHub Repos ---
169
+ async def add_user_repo(user_id: int, repo_path: str) -> bool:
170
+ async with pool.acquire() as connection:
171
+ try:
172
+ await connection.execute("INSERT INTO user_github_repos (user_id, repo_path) VALUES ($1, $2)", user_id, repo_path)
173
+ return True
174
+ except asyncpg.UniqueViolationError:
175
+ return False
176
+
177
+ async def get_user_repos(user_id: int) -> list[str]:
178
+ async with pool.acquire() as connection:
179
+ rows = await connection.fetch("SELECT repo_path FROM user_github_repos WHERE user_id = $1 ORDER BY added_at ASC", user_id)
180
+ return [row['repo_path'] for row in rows]
181
+
182
+ async def remove_user_repo(user_id: int, repo_path: str):
183
+ async with pool.acquire() as connection:
184
+ await connection.execute("DELETE FROM user_github_repos WHERE user_id = $1 AND repo_path = $2", user_id, repo_path)
185
+
186
+ async def update_user_repo(user_id: int, old_repo_path: str, new_repo_path: str):
187
+ async with pool.acquire() as connection:
188
+ await connection.execute("UPDATE user_github_repos SET repo_path = $1 WHERE user_id = $2 AND repo_path = $3", new_repo_path, user_id, old_repo_path)
189
+
190
+ # --- Onboarding ---
191
+ async def is_onboarding_completed(user_id: int) -> bool:
192
+ async with pool.acquire() as connection:
193
+ completed = await connection.fetchval("SELECT onboarding_completed FROM users WHERE user_id = $1", user_id)
194
+ return completed or False
195
+
196
+ async def set_onboarding_completed(user_id: int):
197
+ async with pool.acquire() as connection:
198
+ await connection.execute("UPDATE users SET onboarding_completed = TRUE WHERE user_id = $1", user_id)
199
+
200
+ # --- Schedule Subscriptions ---
201
+ async def add_schedule_subscription(user_id: int, entity_type: str, entity_id: str, entity_name: str, notification_time: datetime.time) -> bool:
202
+ async with pool.acquire() as connection:
203
+ try:
204
+ await connection.execute('''
205
+ INSERT INTO user_schedule_subscriptions (user_id, entity_type, entity_id, entity_name, notification_time)
206
+ VALUES ($1, $2, $3, $4, $5)
207
+ ON CONFLICT (user_id, entity_type, entity_id) DO UPDATE SET
208
+ entity_name = EXCLUDED.entity_name,
209
+ notification_time = EXCLUDED.notification_time,
210
+ is_active = TRUE;
211
+ ''', user_id, entity_type, entity_id, entity_name, notification_time)
212
+ return True
213
+ except Exception as e:
214
+ logger.error(f"Failed to add schedule subscription for user {user_id}: {e}", exc_info=True)
215
+ return False
216
+
217
+ async def get_subscriptions_for_notification(notification_time: str) -> list:
218
+ async with pool.acquire() as connection:
219
+ rows = await connection.fetch("""
220
+ SELECT user_id, entity_type, entity_id, entity_name
221
+ FROM user_schedule_subscriptions
222
+ WHERE is_active = TRUE AND TO_CHAR(notification_time, 'HH24:MI') = $1
223
+ """, notification_time)
224
+ return [dict(row) for row in rows]
225
+
226
+ # --- FastAPI Specific Queries ---
227
+ async def get_leaderboard_data_from_db(db_conn):
228
+ query = """
229
+ SELECT u.user_id, u.full_name, COALESCE(u.username, 'N/A') AS username, u.avatar_pic_url, COUNT(ua.id)::int AS actions_count
230
+ FROM users u JOIN user_actions ua ON u.user_id = ua.user_id
231
+ GROUP BY u.user_id ORDER BY actions_count DESC LIMIT 100;
232
+ """
233
+ rows = await db_conn.fetch(query)
234
+ return [dict(row) for row in rows]
235
+
236
+ async def get_popular_commands_data_from_db(db_conn):
237
+ query = """
238
+ SELECT action_details as command, COUNT(id) as command_count FROM user_actions
239
+ WHERE action_type = 'command' GROUP BY action_details ORDER BY command_count DESC LIMIT 10;
240
+ """
241
+ rows = await db_conn.fetch(query)
242
+ return [{"command": row['command'], "count": row['command_count']} for row in rows]
243
+
244
+ async def get_popular_messages_data_from_db(db_conn):
245
+ query = """
246
+ SELECT CASE WHEN LENGTH(action_details) > 30 THEN SUBSTR(action_details, 1, 27) || '...' ELSE action_details END as message_snippet,
247
+ COUNT(id) as message_count FROM user_actions
248
+ WHERE action_type = 'text_message' AND action_details IS NOT NULL AND action_details != ''
249
+ GROUP BY message_snippet ORDER BY message_count DESC LIMIT 10;
250
+ """
251
+ rows = await db_conn.fetch(query)
252
+ return [{"message": row['message_snippet'], "count": row['message_count']} for row in rows]
253
+
254
+ async def get_action_types_distribution_from_db(db_conn):
255
+ query = "SELECT action_type, COUNT(id) as type_count FROM user_actions GROUP BY action_type ORDER BY type_count DESC;"
256
+ rows = await db_conn.fetch(query)
257
+ return [{"action_type": row['action_type'], "count": row['type_count']} for row in rows]
258
+
259
+ async def get_activity_over_time_data_from_db(db_conn, period='day'):
260
+ date_format = {'day': 'YYYY-MM-DD', 'week': 'IYYY-IW', 'month': 'YYYY-MM'}.get(period, 'YYYY-MM-DD')
261
+ query = f"SELECT TO_CHAR(timestamp, '{date_format}') as period_start, COUNT(id) as actions_count FROM user_actions GROUP BY period_start ORDER BY period_start ASC;"
262
+ rows = await db_conn.fetch(query)
263
+ return [{"period": row['period_start'], "count": row['actions_count']} for row in rows]
264
+
265
+ async def get_user_profile_data_from_db(db_conn, user_id: int, page: int = 1, page_size: int = 50):
266
+ offset = (page - 1) * page_size
267
+ query = """
268
+ WITH UserActions AS (
269
+ SELECT id, action_type, action_details, TO_CHAR(timestamp, 'YYYY-MM-DD HH24:MI:SS') AS timestamp
270
+ FROM user_actions WHERE user_id = $1
271
+ )
272
+ SELECT u.user_id, u.full_name, COALESCE(u.username, 'N/A') AS username, u.avatar_pic_url,
273
+ (SELECT COUNT(*) FROM UserActions) as total_actions,
274
+ ua.id as action_id, ua.action_type, ua.action_details, ua.timestamp
275
+ FROM users u LEFT JOIN UserActions ua ON 1=1
276
+ WHERE u.user_id = $2 ORDER BY ua.timestamp DESC LIMIT $3 OFFSET $4;
277
+ """
278
+ rows = await db_conn.fetch(query, user_id, user_id, page_size, offset)
279
+ if not rows: return None
280
+
281
+ first_row = dict(rows[0])
282
+ user_details = {k: first_row[k] for k in ["user_id", "full_name", "username", "avatar_pic_url"]}
283
+ actions = [dict(r) for r in rows if r["action_id"] is not None]
284
+
285
+ return {"user_details": user_details, "actions": actions, "total_actions": first_row["total_actions"]}
shared_lib/i18n.py ADDED
@@ -0,0 +1,47 @@
1
+ import json
2
+ from pathlib import Path
3
+ import logging
4
+
5
+ from .database import get_user_settings
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class Translator:
11
+ def __init__(self, locales_dir: Path, default_lang: str = "en"):
12
+ self.locales_dir = locales_dir
13
+ self.default_lang = default_lang
14
+ self.translations = {}
15
+ self._load_translations()
16
+
17
+ def _load_translations(self):
18
+ """Loads all .json language files from the locales directory."""
19
+ for lang_file in self.locales_dir.glob("*.json"):
20
+ lang_code = lang_file.stem
21
+ try:
22
+ with open(lang_file, 'r', encoding='utf-8') as f:
23
+ self.translations[lang_code] = json.load(f)
24
+ logger.info(f"Successfully loaded language: {lang_code}")
25
+ except (json.JSONDecodeError, IOError) as e:
26
+ logger.error(f"Failed to load language file {lang_file}: {e}")
27
+
28
+ async def get_user_language(self, user_id: int) -> str:
29
+ """Fetches the user's language from settings, falling back to default."""
30
+ settings = await get_user_settings(user_id)
31
+ return settings.get('language', self.default_lang)
32
+
33
+ def gettext(self, lang: str, key: str, **kwargs) -> str:
34
+ """
35
+ Gets a translated string for a given key and language.
36
+ Falls back to the default language if the key is not found.
37
+ """
38
+ text = self.translations.get(lang, {}).get(key)
39
+ if text is None:
40
+ # Fallback to default language
41
+ text = self.translations.get(self.default_lang, {}).get(key, f"_{key}_")
42
+
43
+ return text.format(**kwargs)
44
+
45
+
46
+ # Create a single instance of the translator
47
+ translator = Translator(locales_dir=Path(__file__).parent / "locales")
@@ -1,7 +0,0 @@
1
- services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- services/schedule_service.py,sha256=tKXgwZQWeaagQeqG_tKP2gLflWo_6kxKZOhW9PFvbK4,1835
3
- services/university_api.py,sha256=Ui-zjfKOHCCf2Imh8CNtVOWegwuep7IB8gO9IKNUrrE,1898
4
- matplobbot_shared-0.1.19.dist-info/METADATA,sha256=SEBKSuEVLU0nIYMGY_ajZZFxD8tPFvIYnn5eMOLq0ig,421
5
- matplobbot_shared-0.1.19.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
6
- matplobbot_shared-0.1.19.dist-info/top_level.txt,sha256=Xau8qVy0H_tHbGDNnS7i0vFz4WqB9lBwZHg0pGziYuM,9
7
- matplobbot_shared-0.1.19.dist-info/RECORD,,
@@ -1 +0,0 @@
1
- services
File without changes
File without changes
File without changes