arionxiv 1.0.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arionxiv/__init__.py +40 -0
- arionxiv/__main__.py +10 -0
- arionxiv/arxiv_operations/__init__.py +0 -0
- arionxiv/arxiv_operations/client.py +225 -0
- arionxiv/arxiv_operations/fetcher.py +173 -0
- arionxiv/arxiv_operations/searcher.py +122 -0
- arionxiv/arxiv_operations/utils.py +293 -0
- arionxiv/cli/__init__.py +4 -0
- arionxiv/cli/commands/__init__.py +1 -0
- arionxiv/cli/commands/analyze.py +587 -0
- arionxiv/cli/commands/auth.py +365 -0
- arionxiv/cli/commands/chat.py +714 -0
- arionxiv/cli/commands/daily.py +482 -0
- arionxiv/cli/commands/fetch.py +217 -0
- arionxiv/cli/commands/library.py +295 -0
- arionxiv/cli/commands/preferences.py +426 -0
- arionxiv/cli/commands/search.py +254 -0
- arionxiv/cli/commands/settings_unified.py +1407 -0
- arionxiv/cli/commands/trending.py +41 -0
- arionxiv/cli/commands/welcome.py +168 -0
- arionxiv/cli/main.py +407 -0
- arionxiv/cli/ui/__init__.py +1 -0
- arionxiv/cli/ui/global_theme_manager.py +173 -0
- arionxiv/cli/ui/logo.py +127 -0
- arionxiv/cli/ui/splash.py +89 -0
- arionxiv/cli/ui/theme.py +32 -0
- arionxiv/cli/ui/theme_system.py +391 -0
- arionxiv/cli/utils/__init__.py +54 -0
- arionxiv/cli/utils/animations.py +522 -0
- arionxiv/cli/utils/api_client.py +583 -0
- arionxiv/cli/utils/api_config.py +505 -0
- arionxiv/cli/utils/command_suggestions.py +147 -0
- arionxiv/cli/utils/db_config_manager.py +254 -0
- arionxiv/github_actions_runner.py +206 -0
- arionxiv/main.py +23 -0
- arionxiv/prompts/__init__.py +9 -0
- arionxiv/prompts/prompts.py +247 -0
- arionxiv/rag_techniques/__init__.py +8 -0
- arionxiv/rag_techniques/basic_rag.py +1531 -0
- arionxiv/scheduler_daemon.py +139 -0
- arionxiv/server.py +1000 -0
- arionxiv/server_main.py +24 -0
- arionxiv/services/__init__.py +73 -0
- arionxiv/services/llm_client.py +30 -0
- arionxiv/services/llm_inference/__init__.py +58 -0
- arionxiv/services/llm_inference/groq_client.py +469 -0
- arionxiv/services/llm_inference/llm_utils.py +250 -0
- arionxiv/services/llm_inference/openrouter_client.py +564 -0
- arionxiv/services/unified_analysis_service.py +872 -0
- arionxiv/services/unified_auth_service.py +457 -0
- arionxiv/services/unified_config_service.py +456 -0
- arionxiv/services/unified_daily_dose_service.py +823 -0
- arionxiv/services/unified_database_service.py +1633 -0
- arionxiv/services/unified_llm_service.py +366 -0
- arionxiv/services/unified_paper_service.py +604 -0
- arionxiv/services/unified_pdf_service.py +522 -0
- arionxiv/services/unified_prompt_service.py +344 -0
- arionxiv/services/unified_scheduler_service.py +589 -0
- arionxiv/services/unified_user_service.py +954 -0
- arionxiv/utils/__init__.py +51 -0
- arionxiv/utils/api_helpers.py +200 -0
- arionxiv/utils/file_cleanup.py +150 -0
- arionxiv/utils/ip_helper.py +96 -0
- arionxiv-1.0.32.dist-info/METADATA +336 -0
- arionxiv-1.0.32.dist-info/RECORD +69 -0
- arionxiv-1.0.32.dist-info/WHEEL +5 -0
- arionxiv-1.0.32.dist-info/entry_points.txt +4 -0
- arionxiv-1.0.32.dist-info/licenses/LICENSE +21 -0
- arionxiv-1.0.32.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,954 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unified User Service for ArionXiv
|
|
3
|
+
Consolidates user_service.py, session_manager.py, and preferences_service.py
|
|
4
|
+
Provides comprehensive user management, session handling, and preferences
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import secrets
|
|
10
|
+
import asyncio
|
|
11
|
+
import threading
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Dict, Any, List, Optional
|
|
14
|
+
from datetime import datetime, timedelta
|
|
15
|
+
import logging
|
|
16
|
+
|
|
17
|
+
from bson import ObjectId, errors as bson_errors
|
|
18
|
+
|
|
19
|
+
from .unified_database_service import unified_database_service
|
|
20
|
+
from arionxiv.cli.ui.global_theme_manager import global_theme_manager
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class UnifiedUserService:
|
|
26
|
+
"""
|
|
27
|
+
Comprehensive user service that handles:
|
|
28
|
+
1. User account management (user_service.py functionality)
|
|
29
|
+
2. Session management for CLI authentication (session_manager.py functionality)
|
|
30
|
+
3. Paper preferences and relevance scoring (preferences_service.py functionality)
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def __init__(self):
|
|
34
|
+
# Session management
|
|
35
|
+
self.session_dir = Path.home() / ".arionxiv"
|
|
36
|
+
self.session_file = self.session_dir / "session.json"
|
|
37
|
+
self.session_duration_days = 30
|
|
38
|
+
|
|
39
|
+
# Ensure session directory exists
|
|
40
|
+
self.session_dir.mkdir(exist_ok=True)
|
|
41
|
+
|
|
42
|
+
# ArXiv categories for preferences
|
|
43
|
+
self.arxiv_categories = {
|
|
44
|
+
# Computer Science
|
|
45
|
+
"cs.AI": "Artificial Intelligence",
|
|
46
|
+
"cs.CL": "Computation and Language",
|
|
47
|
+
"cs.CV": "Computer Vision and Pattern Recognition",
|
|
48
|
+
"cs.LG": "Machine Learning",
|
|
49
|
+
"cs.NE": "Neural and Evolutionary Computing",
|
|
50
|
+
"cs.RO": "Robotics",
|
|
51
|
+
"cs.CR": "Cryptography and Security",
|
|
52
|
+
"cs.DC": "Distributed, Parallel, and Cluster Computing",
|
|
53
|
+
"cs.DS": "Data Structures and Algorithms",
|
|
54
|
+
"cs.IR": "Information Retrieval",
|
|
55
|
+
"cs.IT": "Information Theory",
|
|
56
|
+
"cs.SE": "Software Engineering",
|
|
57
|
+
"cs.SY": "Systems and Control",
|
|
58
|
+
|
|
59
|
+
# Mathematics
|
|
60
|
+
"math.ST": "Statistics Theory",
|
|
61
|
+
"math.PR": "Probability",
|
|
62
|
+
"math.OC": "Optimization and Control",
|
|
63
|
+
"math.NA": "Numerical Analysis",
|
|
64
|
+
|
|
65
|
+
# Physics
|
|
66
|
+
"physics.data-an": "Data Analysis, Statistics and Probability",
|
|
67
|
+
"physics.comp-ph": "Computational Physics",
|
|
68
|
+
|
|
69
|
+
# Statistics
|
|
70
|
+
"stat.ML": "Machine Learning",
|
|
71
|
+
"stat.AP": "Applications",
|
|
72
|
+
"stat.CO": "Computation",
|
|
73
|
+
"stat.ME": "Methodology",
|
|
74
|
+
|
|
75
|
+
# Quantitative Biology
|
|
76
|
+
"q-bio.QM": "Quantitative Methods",
|
|
77
|
+
"q-bio.NC": "Neurons and Cognition",
|
|
78
|
+
|
|
79
|
+
# Economics
|
|
80
|
+
"econ.EM": "Econometrics",
|
|
81
|
+
"econ.TH": "Theoretical Economics"
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
logger.info("UnifiedUserService initialized")
|
|
85
|
+
|
|
86
|
+
# ====================
|
|
87
|
+
# INTERNAL HELPERS
|
|
88
|
+
# ====================
|
|
89
|
+
|
|
90
|
+
def _to_object_id(self, user_id: str) -> Optional[ObjectId]:
|
|
91
|
+
"""Best-effort conversion of a user identifier to ObjectId"""
|
|
92
|
+
if not user_id:
|
|
93
|
+
return None
|
|
94
|
+
try:
|
|
95
|
+
return ObjectId(str(user_id))
|
|
96
|
+
except (bson_errors.InvalidId, TypeError):
|
|
97
|
+
return None
|
|
98
|
+
|
|
99
|
+
def _build_user_lookup_filter(self, user_id: str) -> Dict[str, Any]:
|
|
100
|
+
"""Create a Mongo filter for user lookups with ObjectId guardrails"""
|
|
101
|
+
object_id = self._to_object_id(user_id)
|
|
102
|
+
if object_id:
|
|
103
|
+
return {'_id': object_id}
|
|
104
|
+
# Fallback to alternate identifiers when ObjectId conversion fails
|
|
105
|
+
return {'$or': [{'id': user_id}, {'email': user_id}]}
|
|
106
|
+
|
|
107
|
+
# ====================
|
|
108
|
+
# USER MANAGEMENT (from user_service.py)
|
|
109
|
+
# ====================
|
|
110
|
+
|
|
111
|
+
async def create_or_get_user(self, user_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
112
|
+
"""Create a new user or get existing user"""
|
|
113
|
+
try:
|
|
114
|
+
# Check if database is available
|
|
115
|
+
if unified_database_service.db is None:
|
|
116
|
+
logger.warning("Database not available, returning demo user")
|
|
117
|
+
return {
|
|
118
|
+
"success": True,
|
|
119
|
+
"user": {
|
|
120
|
+
"id": "demo-user",
|
|
121
|
+
"name": user_data.get("name", "Demo User"),
|
|
122
|
+
"email": user_data.get("email", "demo@arionxiv.com"),
|
|
123
|
+
"created_at": datetime.utcnow().isoformat()
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
email = user_data.get("email")
|
|
128
|
+
if not email:
|
|
129
|
+
return {"success": False, "message": "Email is required"}
|
|
130
|
+
|
|
131
|
+
# Check if user exists
|
|
132
|
+
existing_user = await unified_database_service.get_user(email)
|
|
133
|
+
if existing_user:
|
|
134
|
+
# Update last login
|
|
135
|
+
await self._update_last_login(str(existing_user["_id"]))
|
|
136
|
+
existing_user["_id"] = str(existing_user["_id"])
|
|
137
|
+
return {"success": True, "user": existing_user}
|
|
138
|
+
|
|
139
|
+
# Create new user
|
|
140
|
+
new_user_data = {
|
|
141
|
+
"email": email,
|
|
142
|
+
"name": user_data.get("name", ""),
|
|
143
|
+
"picture": user_data.get("picture", ""),
|
|
144
|
+
"preferences": {
|
|
145
|
+
"categories": ["cs.AI", "cs.LG", "cs.CV"], # Default categories
|
|
146
|
+
"keywords": [],
|
|
147
|
+
"authors": [],
|
|
148
|
+
"exclude_keywords": [],
|
|
149
|
+
"min_relevance_score": 0.2,
|
|
150
|
+
"max_papers_per_day": 10,
|
|
151
|
+
"daily_digest": True,
|
|
152
|
+
"email_notifications": True,
|
|
153
|
+
"theme_color": "blue"
|
|
154
|
+
},
|
|
155
|
+
"stats": {
|
|
156
|
+
"papers_read": 0,
|
|
157
|
+
"papers_bookmarked": 0,
|
|
158
|
+
"analysis_count": 0
|
|
159
|
+
},
|
|
160
|
+
"last_login": datetime.utcnow()
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
success = await unified_database_service.create_user(new_user_data)
|
|
164
|
+
if success:
|
|
165
|
+
logger.info("New user created", email=email)
|
|
166
|
+
# Get the created user
|
|
167
|
+
created_user = await unified_database_service.get_user(email)
|
|
168
|
+
if created_user:
|
|
169
|
+
created_user["_id"] = str(created_user["_id"])
|
|
170
|
+
return {"success": True, "user": created_user}
|
|
171
|
+
|
|
172
|
+
return {"success": False, "message": "Failed to create user"}
|
|
173
|
+
|
|
174
|
+
except Exception as e:
|
|
175
|
+
logger.error("Failed to create or get user", error=str(e))
|
|
176
|
+
# Return demo user as fallback
|
|
177
|
+
return {
|
|
178
|
+
"success": True,
|
|
179
|
+
"user": {
|
|
180
|
+
"id": "demo-user-fallback",
|
|
181
|
+
"name": user_data.get("name", "Demo User"),
|
|
182
|
+
"email": user_data.get("email", "demo@arionxiv.com"),
|
|
183
|
+
"created_at": datetime.utcnow().isoformat()
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
async def get_user_by_id(self, user_id: str) -> Dict[str, Any]:
|
|
188
|
+
"""Get user by ID"""
|
|
189
|
+
try:
|
|
190
|
+
# Check if database is available
|
|
191
|
+
if unified_database_service.db is None:
|
|
192
|
+
logger.warning("Database not available, returning demo user")
|
|
193
|
+
return {
|
|
194
|
+
"success": True,
|
|
195
|
+
"user": {
|
|
196
|
+
"_id": user_id,
|
|
197
|
+
"id": user_id,
|
|
198
|
+
"name": "Demo User",
|
|
199
|
+
"email": "demo@arionxiv.com",
|
|
200
|
+
"preferences": {
|
|
201
|
+
"categories": ["cs.AI", "cs.LG"],
|
|
202
|
+
"keywords": [],
|
|
203
|
+
"maxDailyPapers": 10,
|
|
204
|
+
"analysisDepth": "standard",
|
|
205
|
+
"emailNotifications": True,
|
|
206
|
+
"language": "en"
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
query = self._build_user_lookup_filter(user_id)
|
|
212
|
+
user = await unified_database_service.db.users.find_one(query)
|
|
213
|
+
if user:
|
|
214
|
+
user["_id"] = str(user["_id"])
|
|
215
|
+
return {"success": True, "user": user}
|
|
216
|
+
|
|
217
|
+
return {"success": False, "message": "User not found"}
|
|
218
|
+
|
|
219
|
+
except Exception as e:
|
|
220
|
+
logger.error("Failed to get user by ID", error=str(e))
|
|
221
|
+
# Return fallback demo user
|
|
222
|
+
return {
|
|
223
|
+
"success": True,
|
|
224
|
+
"user": {
|
|
225
|
+
"_id": user_id,
|
|
226
|
+
"id": user_id,
|
|
227
|
+
"name": "Demo User",
|
|
228
|
+
"email": "demo@arionxiv.com",
|
|
229
|
+
"preferences": {
|
|
230
|
+
"categories": ["cs.AI", "cs.LG"],
|
|
231
|
+
"keywords": [],
|
|
232
|
+
"maxDailyPapers": 10,
|
|
233
|
+
"analysisDepth": "standard",
|
|
234
|
+
"emailNotifications": True,
|
|
235
|
+
"language": "en"
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
async def update_user_preferences(self, user_id: str, preferences: Dict[str, Any]) -> Dict[str, Any]:
|
|
241
|
+
"""Update user preferences - tries API first, then local DB"""
|
|
242
|
+
try:
|
|
243
|
+
# Try API first for hosted users (no local MongoDB)
|
|
244
|
+
try:
|
|
245
|
+
from ..cli.utils.api_client import api_client
|
|
246
|
+
if api_client.is_authenticated():
|
|
247
|
+
# Flatten preferences for API
|
|
248
|
+
settings_to_update = preferences.get("preferences", preferences)
|
|
249
|
+
result = await api_client.update_settings(settings_to_update)
|
|
250
|
+
if result.get("success"):
|
|
251
|
+
return {"success": True, "message": "Preferences updated via API"}
|
|
252
|
+
except Exception as api_err:
|
|
253
|
+
logger.debug(f"API preferences update failed, trying local DB: {api_err}")
|
|
254
|
+
|
|
255
|
+
# Fall back to local database
|
|
256
|
+
if unified_database_service.db is None:
|
|
257
|
+
logger.warning("Database not available for updating preferences")
|
|
258
|
+
return {"success": True, "message": "Preferences updated (offline mode - may not persist)"}
|
|
259
|
+
|
|
260
|
+
update_data = {
|
|
261
|
+
"preferences": preferences.get("preferences", preferences),
|
|
262
|
+
"updated_at": datetime.utcnow()
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
query = self._build_user_lookup_filter(user_id)
|
|
266
|
+
result = await unified_database_service.db.users.update_one(
|
|
267
|
+
query,
|
|
268
|
+
{"$set": update_data}
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
if result.modified_count > 0:
|
|
272
|
+
logger.info("User preferences updated successfully", user_id=user_id)
|
|
273
|
+
return {"success": True, "message": "Preferences updated"}
|
|
274
|
+
|
|
275
|
+
# If no document was modified, try to find if user exists
|
|
276
|
+
user_count = await unified_database_service.db.users.count_documents(query)
|
|
277
|
+
if user_count == 0:
|
|
278
|
+
logger.warning("User not found for preference update", user_id=user_id)
|
|
279
|
+
return {"success": False, "message": "User not found"}
|
|
280
|
+
else:
|
|
281
|
+
logger.info("No changes made to user preferences", user_id=user_id)
|
|
282
|
+
return {"success": True, "message": "No changes made (preferences already up to date)"}
|
|
283
|
+
|
|
284
|
+
except Exception as e:
|
|
285
|
+
logger.error("Failed to update preferences", error=str(e), user_id=user_id)
|
|
286
|
+
return {"success": False, "message": str(e)}
|
|
287
|
+
|
|
288
|
+
async def mark_paper_viewed(self, user_id: str, paper_id: str) -> Dict[str, Any]:
|
|
289
|
+
"""Mark a paper as viewed by user"""
|
|
290
|
+
try:
|
|
291
|
+
# Check if database is available
|
|
292
|
+
if unified_database_service.db is None:
|
|
293
|
+
logger.warning("Database not available for marking paper viewed")
|
|
294
|
+
return {"success": True, "message": "Paper marked as viewed (offline mode)"}
|
|
295
|
+
|
|
296
|
+
interaction_data = {
|
|
297
|
+
"user_id": user_id,
|
|
298
|
+
"paper_id": paper_id,
|
|
299
|
+
"action": "view",
|
|
300
|
+
"timestamp": datetime.utcnow()
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
# Use upsert to avoid duplicates
|
|
304
|
+
await unified_database_service.db.user_papers.update_one(
|
|
305
|
+
{"user_id": user_id, "paper_id": paper_id},
|
|
306
|
+
{"$set": interaction_data, "$inc": {"view_count": 1}},
|
|
307
|
+
upsert=True
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
# Update user stats
|
|
311
|
+
await self._increment_user_stat(user_id, "papers_read")
|
|
312
|
+
|
|
313
|
+
return {"success": True, "message": "Paper marked as viewed"}
|
|
314
|
+
except Exception as e:
|
|
315
|
+
logger.error("Failed to mark paper as viewed", error=str(e))
|
|
316
|
+
return {"success": False, "message": str(e)}
|
|
317
|
+
|
|
318
|
+
async def bookmark_paper(self, user_id: str, paper_id: str) -> Dict[str, Any]:
|
|
319
|
+
"""Bookmark a paper for user"""
|
|
320
|
+
try:
|
|
321
|
+
# Check if database is available
|
|
322
|
+
if unified_database_service.db is None:
|
|
323
|
+
logger.warning("Database not available for bookmarking")
|
|
324
|
+
return {"success": True, "message": "Paper bookmarked (offline mode)"}
|
|
325
|
+
|
|
326
|
+
# Check if already bookmarked
|
|
327
|
+
existing = await unified_database_service.db.user_papers.find_one({
|
|
328
|
+
"user_id": user_id,
|
|
329
|
+
"paper_id": paper_id,
|
|
330
|
+
"bookmarked": True
|
|
331
|
+
})
|
|
332
|
+
|
|
333
|
+
if existing:
|
|
334
|
+
return {"success": False, "message": "Paper already bookmarked"}
|
|
335
|
+
|
|
336
|
+
bookmark_data = {
|
|
337
|
+
"user_id": user_id,
|
|
338
|
+
"paper_id": paper_id,
|
|
339
|
+
"action": "bookmark",
|
|
340
|
+
"bookmarked": True,
|
|
341
|
+
"timestamp": datetime.utcnow()
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
await unified_database_service.db.user_papers.update_one(
|
|
345
|
+
{"user_id": user_id, "paper_id": paper_id},
|
|
346
|
+
{"$set": bookmark_data},
|
|
347
|
+
upsert=True
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
# Update user stats
|
|
351
|
+
await self._increment_user_stat(user_id, "papers_bookmarked")
|
|
352
|
+
|
|
353
|
+
return {"success": True, "message": "Paper bookmarked successfully"}
|
|
354
|
+
except Exception as e:
|
|
355
|
+
logger.error("Failed to bookmark paper", error=str(e))
|
|
356
|
+
return {"success": True, "message": "Paper bookmarked (fallback mode)"}
|
|
357
|
+
|
|
358
|
+
async def get_user_bookmarks(self, user_id: str) -> Dict[str, Any]:
|
|
359
|
+
"""Get user's bookmarked papers"""
|
|
360
|
+
try:
|
|
361
|
+
pipeline = [
|
|
362
|
+
{"$match": {"user_id": user_id, "bookmarked": True}},
|
|
363
|
+
{
|
|
364
|
+
"$lookup": {
|
|
365
|
+
"from": "papers",
|
|
366
|
+
"localField": "paper_id",
|
|
367
|
+
"foreignField": "arxiv_id",
|
|
368
|
+
"as": "paper_details"
|
|
369
|
+
}
|
|
370
|
+
},
|
|
371
|
+
{"$unwind": "$paper_details"},
|
|
372
|
+
{"$sort": {"timestamp": -1}}
|
|
373
|
+
]
|
|
374
|
+
|
|
375
|
+
bookmarks = []
|
|
376
|
+
async for bookmark in unified_database_service.db.user_papers.aggregate(pipeline):
|
|
377
|
+
bookmark["paper_details"]["_id"] = str(bookmark["paper_details"]["_id"])
|
|
378
|
+
bookmarks.append(bookmark["paper_details"])
|
|
379
|
+
|
|
380
|
+
return {"success": True, "bookmarks": bookmarks}
|
|
381
|
+
except Exception as e:
|
|
382
|
+
logger.error("Failed to get user bookmarks", error=str(e))
|
|
383
|
+
return {"success": False, "message": str(e)}
|
|
384
|
+
|
|
385
|
+
async def get_user_stats(self, user_id: str) -> Dict[str, Any]:
|
|
386
|
+
"""Get user statistics"""
|
|
387
|
+
try:
|
|
388
|
+
user_result = await self.get_user_by_id(user_id)
|
|
389
|
+
if not user_result["success"]:
|
|
390
|
+
return {"success": False, "message": "User not found"}
|
|
391
|
+
|
|
392
|
+
stats = user_result["user"].get("stats", {})
|
|
393
|
+
|
|
394
|
+
# Add real-time stats
|
|
395
|
+
bookmark_count = await unified_database_service.db.user_papers.count_documents({
|
|
396
|
+
"user_id": user_id,
|
|
397
|
+
"bookmarked": True
|
|
398
|
+
})
|
|
399
|
+
|
|
400
|
+
view_count = await unified_database_service.db.user_papers.count_documents({
|
|
401
|
+
"user_id": user_id,
|
|
402
|
+
"action": "view"
|
|
403
|
+
})
|
|
404
|
+
|
|
405
|
+
stats.update({
|
|
406
|
+
"papers_bookmarked": bookmark_count,
|
|
407
|
+
"papers_read": view_count
|
|
408
|
+
})
|
|
409
|
+
|
|
410
|
+
return {"success": True, "stats": stats}
|
|
411
|
+
except Exception as e:
|
|
412
|
+
logger.error("Failed to get user stats", error=str(e))
|
|
413
|
+
return {"success": False, "message": str(e)}
|
|
414
|
+
|
|
415
|
+
async def _update_last_login(self, user_id: str):
|
|
416
|
+
"""Update user's last login time"""
|
|
417
|
+
try:
|
|
418
|
+
object_id = self._to_object_id(user_id)
|
|
419
|
+
if not object_id:
|
|
420
|
+
logger.warning("Cannot update last login for non-ObjectId user_id", user_id=user_id)
|
|
421
|
+
return
|
|
422
|
+
await unified_database_service.db.users.update_one(
|
|
423
|
+
{"_id": object_id},
|
|
424
|
+
{"$set": {"last_login": datetime.utcnow()}}
|
|
425
|
+
)
|
|
426
|
+
except Exception as e:
|
|
427
|
+
logger.warning("Failed to update last login", error=str(e))
|
|
428
|
+
|
|
429
|
+
async def _increment_user_stat(self, user_id: str, stat_name: str):
|
|
430
|
+
"""Increment user statistic"""
|
|
431
|
+
try:
|
|
432
|
+
object_id = self._to_object_id(user_id)
|
|
433
|
+
if not object_id:
|
|
434
|
+
logger.warning("Cannot increment stat for non-ObjectId user_id", user_id=user_id, stat=stat_name)
|
|
435
|
+
return
|
|
436
|
+
await unified_database_service.db.users.update_one(
|
|
437
|
+
{"_id": object_id},
|
|
438
|
+
{"$inc": {f"stats.{stat_name}": 1}}
|
|
439
|
+
)
|
|
440
|
+
except Exception as e:
|
|
441
|
+
logger.warning("Failed to increment user stat", error=str(e))
|
|
442
|
+
|
|
443
|
+
# ====================
|
|
444
|
+
# SESSION MANAGEMENT (from session_manager.py)
|
|
445
|
+
# ====================
|
|
446
|
+
|
|
447
|
+
def create_session(self, user_data: Dict[str, Any]) -> str:
|
|
448
|
+
"""Create a new session for user and initialize theme"""
|
|
449
|
+
try:
|
|
450
|
+
session_token = secrets.token_urlsafe(32)
|
|
451
|
+
|
|
452
|
+
session_data = {
|
|
453
|
+
"user_id": user_data["id"],
|
|
454
|
+
"email": user_data["email"],
|
|
455
|
+
"user_name": user_data["user_name"],
|
|
456
|
+
"full_name": user_data.get("full_name", ""),
|
|
457
|
+
"session_token": session_token,
|
|
458
|
+
"created_at": datetime.utcnow().isoformat(),
|
|
459
|
+
"expires_at": (datetime.utcnow() + timedelta(days=self.session_duration_days)).isoformat(),
|
|
460
|
+
"last_activity": datetime.utcnow().isoformat()
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
# Save session to file
|
|
464
|
+
with open(self.session_file, 'w') as f:
|
|
465
|
+
json.dump(session_data, f, indent=2)
|
|
466
|
+
|
|
467
|
+
# Set file permissions (readable only by user)
|
|
468
|
+
os.chmod(self.session_file, 0o600)
|
|
469
|
+
|
|
470
|
+
# Initialize user theme
|
|
471
|
+
self._initialize_user_theme(user_data["id"])
|
|
472
|
+
|
|
473
|
+
logger.info("Session created", user_id=user_data["id"])
|
|
474
|
+
return session_token
|
|
475
|
+
|
|
476
|
+
except Exception as e:
|
|
477
|
+
logger.error("Failed to create session", error=str(e))
|
|
478
|
+
return ""
|
|
479
|
+
|
|
480
|
+
def get_current_session(self) -> Optional[Dict[str, Any]]:
|
|
481
|
+
"""Get current active session"""
|
|
482
|
+
try:
|
|
483
|
+
if not self.session_file.exists():
|
|
484
|
+
return None
|
|
485
|
+
|
|
486
|
+
with open(self.session_file, 'r') as f:
|
|
487
|
+
session_data = json.load(f)
|
|
488
|
+
|
|
489
|
+
# Check if session is expired
|
|
490
|
+
expires_at = datetime.fromisoformat(session_data["expires_at"])
|
|
491
|
+
if datetime.utcnow() > expires_at:
|
|
492
|
+
self.clear_session()
|
|
493
|
+
return None
|
|
494
|
+
|
|
495
|
+
# Update last activity
|
|
496
|
+
session_data["last_activity"] = datetime.utcnow().isoformat()
|
|
497
|
+
with open(self.session_file, 'w') as f:
|
|
498
|
+
json.dump(session_data, f, indent=2)
|
|
499
|
+
|
|
500
|
+
# Initialize user theme if not already done
|
|
501
|
+
try:
|
|
502
|
+
from arionxiv.cli.ui.global_theme_manager import global_theme_manager
|
|
503
|
+
if not global_theme_manager.is_initialized():
|
|
504
|
+
self._initialize_user_theme(session_data["user_id"])
|
|
505
|
+
except Exception as e:
|
|
506
|
+
logger.debug(f"Theme initialization skipped: {e}")
|
|
507
|
+
|
|
508
|
+
return session_data
|
|
509
|
+
|
|
510
|
+
except Exception as e:
|
|
511
|
+
logger.error("Failed to get current session", error=str(e))
|
|
512
|
+
return None
|
|
513
|
+
|
|
514
|
+
def is_authenticated(self) -> bool:
|
|
515
|
+
"""Check if user is currently authenticated"""
|
|
516
|
+
session = self.get_current_session()
|
|
517
|
+
return session is not None
|
|
518
|
+
|
|
519
|
+
def get_current_user(self) -> Optional[Dict[str, Any]]:
|
|
520
|
+
"""Get current authenticated user info"""
|
|
521
|
+
session = self.get_current_session()
|
|
522
|
+
if session:
|
|
523
|
+
return {
|
|
524
|
+
"id": session["user_id"],
|
|
525
|
+
"email": session["email"],
|
|
526
|
+
"user_name": session["user_name"],
|
|
527
|
+
"full_name": session["full_name"]
|
|
528
|
+
}
|
|
529
|
+
return None
|
|
530
|
+
|
|
531
|
+
def clear_session(self):
|
|
532
|
+
"""Clear current session (logout)"""
|
|
533
|
+
try:
|
|
534
|
+
if self.session_file.exists():
|
|
535
|
+
self.session_file.unlink()
|
|
536
|
+
logger.info("Session cleared")
|
|
537
|
+
except Exception as e:
|
|
538
|
+
logger.error("Failed to clear session", error=str(e))
|
|
539
|
+
|
|
540
|
+
def extend_session(self, days: int = None):
|
|
541
|
+
"""Extend current session expiry"""
|
|
542
|
+
try:
|
|
543
|
+
if not self.session_file.exists():
|
|
544
|
+
return False
|
|
545
|
+
|
|
546
|
+
with open(self.session_file, 'r') as f:
|
|
547
|
+
session_data = json.load(f)
|
|
548
|
+
|
|
549
|
+
if days is None:
|
|
550
|
+
days = self.session_duration_days
|
|
551
|
+
|
|
552
|
+
session_data["expires_at"] = (datetime.utcnow() + timedelta(days=days)).isoformat()
|
|
553
|
+
session_data["last_activity"] = datetime.utcnow().isoformat()
|
|
554
|
+
|
|
555
|
+
with open(self.session_file, 'w') as f:
|
|
556
|
+
json.dump(session_data, f, indent=2)
|
|
557
|
+
|
|
558
|
+
return True
|
|
559
|
+
|
|
560
|
+
except Exception as e:
|
|
561
|
+
logger.error("Failed to extend session", error=str(e))
|
|
562
|
+
return False
|
|
563
|
+
|
|
564
|
+
def get_session_info(self) -> Optional[Dict[str, Any]]:
|
|
565
|
+
"""Get session information for display"""
|
|
566
|
+
session = self.get_current_session()
|
|
567
|
+
if session:
|
|
568
|
+
created_at = datetime.fromisoformat(session["created_at"])
|
|
569
|
+
expires_at = datetime.fromisoformat(session["expires_at"])
|
|
570
|
+
last_activity = datetime.fromisoformat(session["last_activity"])
|
|
571
|
+
|
|
572
|
+
return {
|
|
573
|
+
"user": {
|
|
574
|
+
"user_name": session["user_name"],
|
|
575
|
+
"email": session["email"],
|
|
576
|
+
"full_name": session["full_name"]
|
|
577
|
+
},
|
|
578
|
+
"session": {
|
|
579
|
+
"created": created_at.strftime("%Y-%m-%d %H:%M"),
|
|
580
|
+
"expires": expires_at.strftime("%Y-%m-%d %H:%M"),
|
|
581
|
+
"last_activity": last_activity.strftime("%Y-%m-%d %H:%M"),
|
|
582
|
+
"days_remaining": (expires_at - datetime.utcnow()).days
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
return None
|
|
586
|
+
|
|
587
|
+
def _initialize_user_theme(self, user_id: str):
|
|
588
|
+
"""Initialize user theme from database preferences"""
|
|
589
|
+
try:
|
|
590
|
+
def init_theme():
|
|
591
|
+
try:
|
|
592
|
+
loop = asyncio.new_event_loop()
|
|
593
|
+
asyncio.set_event_loop(loop)
|
|
594
|
+
|
|
595
|
+
# Import here to avoid circular imports
|
|
596
|
+
from arionxiv.cli.ui.global_theme_manager import global_theme_manager
|
|
597
|
+
theme = loop.run_until_complete(global_theme_manager.initialize_user_theme(user_id))
|
|
598
|
+
logger.info(f"User theme initialized: {theme}", user_id=user_id)
|
|
599
|
+
loop.close()
|
|
600
|
+
except Exception as e:
|
|
601
|
+
logger.error(f"Failed to initialize user theme: {e}")
|
|
602
|
+
|
|
603
|
+
# Initialize in background thread to avoid blocking
|
|
604
|
+
theme_thread = threading.Thread(target=init_theme, daemon=True)
|
|
605
|
+
theme_thread.start()
|
|
606
|
+
|
|
607
|
+
except Exception as e:
|
|
608
|
+
logger.error(f"Error starting theme initialization: {e}")
|
|
609
|
+
|
|
610
|
+
# ====================
|
|
611
|
+
# PREFERENCES MANAGEMENT (from preferences_service.py)
|
|
612
|
+
# ====================
|
|
613
|
+
|
|
614
|
+
async def get_user_preferences(self, user_id: str) -> Dict[str, Any]:
|
|
615
|
+
"""Get user's paper preferences - tries API first, then local DB"""
|
|
616
|
+
try:
|
|
617
|
+
# Try API first for hosted users (no local MongoDB)
|
|
618
|
+
try:
|
|
619
|
+
from ..cli.utils.api_client import api_client
|
|
620
|
+
if api_client.is_authenticated():
|
|
621
|
+
result = await api_client.get_settings()
|
|
622
|
+
if result.get("success"):
|
|
623
|
+
settings = result.get("settings", {})
|
|
624
|
+
return {
|
|
625
|
+
"success": True,
|
|
626
|
+
"preferences": {
|
|
627
|
+
"categories": settings.get("categories", ["cs.AI", "cs.LG", "cs.CV"]),
|
|
628
|
+
"keywords": settings.get("keywords", []),
|
|
629
|
+
"authors": settings.get("authors", []),
|
|
630
|
+
"exclude_keywords": settings.get("exclude_keywords", []),
|
|
631
|
+
"min_relevance_score": settings.get("min_relevance_score", 0.2),
|
|
632
|
+
"max_papers_per_day": settings.get("max_papers_per_day", 10),
|
|
633
|
+
"daily_dose": settings.get("daily_dose", {})
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
except Exception as api_err:
|
|
637
|
+
logger.debug(f"API preferences fetch failed, trying local DB: {api_err}")
|
|
638
|
+
|
|
639
|
+
# Fall back to local database
|
|
640
|
+
if unified_database_service.db is None:
|
|
641
|
+
# Return defaults if no DB and API failed
|
|
642
|
+
return {"success": True, "preferences": self._get_default_preferences()}
|
|
643
|
+
|
|
644
|
+
# Get user and their preferences from local DB
|
|
645
|
+
user_result = await self.get_user_by_id(user_id)
|
|
646
|
+
if user_result["success"]:
|
|
647
|
+
user = user_result["user"]
|
|
648
|
+
preferences = user.get("preferences", {})
|
|
649
|
+
|
|
650
|
+
# Return structured preferences with defaults
|
|
651
|
+
return {
|
|
652
|
+
"success": True,
|
|
653
|
+
"preferences": {
|
|
654
|
+
"categories": preferences.get("categories", ["cs.AI", "cs.LG", "cs.CV"]),
|
|
655
|
+
"keywords": preferences.get("keywords", []),
|
|
656
|
+
"authors": preferences.get("authors", []),
|
|
657
|
+
"exclude_keywords": preferences.get("exclude_keywords", []),
|
|
658
|
+
"min_relevance_score": preferences.get("min_relevance_score", 0.2),
|
|
659
|
+
"max_papers_per_day": preferences.get("max_papers_per_day", 10)
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
else:
|
|
663
|
+
return {"success": True, "preferences": self._get_default_preferences()}
|
|
664
|
+
|
|
665
|
+
except Exception as e:
|
|
666
|
+
logger.error("Failed to get user preferences", error=str(e))
|
|
667
|
+
return {"success": True, "preferences": self._get_default_preferences()}
|
|
668
|
+
|
|
669
|
+
async def save_user_preferences(self, user_id: str, preferences: Dict[str, Any]) -> Dict[str, Any]:
|
|
670
|
+
"""Save user's paper preferences"""
|
|
671
|
+
try:
|
|
672
|
+
# Validate preferences
|
|
673
|
+
validated_prefs = self._validate_preferences(preferences)
|
|
674
|
+
|
|
675
|
+
# Update user preferences
|
|
676
|
+
return await self.update_user_preferences(user_id, {"preferences": validated_prefs})
|
|
677
|
+
|
|
678
|
+
except Exception as e:
|
|
679
|
+
logger.error("Failed to save user preferences", error=str(e))
|
|
680
|
+
return {"success": False, "message": str(e)}
|
|
681
|
+
|
|
682
|
+
async def update_user_preferences_partial(self, user_id: str, updates: Dict[str, Any]) -> Dict[str, Any]:
|
|
683
|
+
"""Update specific user preferences without overwriting all preferences"""
|
|
684
|
+
try:
|
|
685
|
+
# Get current preferences
|
|
686
|
+
current_prefs_result = await self.get_user_preferences(user_id)
|
|
687
|
+
if not current_prefs_result["success"]:
|
|
688
|
+
return current_prefs_result
|
|
689
|
+
|
|
690
|
+
current_prefs = current_prefs_result["preferences"]
|
|
691
|
+
|
|
692
|
+
# Apply updates
|
|
693
|
+
for key, value in updates.items():
|
|
694
|
+
if key in ["categories", "keywords", "authors", "exclude_keywords"]:
|
|
695
|
+
# For list fields, handle append/remove operations
|
|
696
|
+
if isinstance(value, dict):
|
|
697
|
+
if "add" in value:
|
|
698
|
+
if key not in current_prefs:
|
|
699
|
+
current_prefs[key] = []
|
|
700
|
+
for item in value["add"]:
|
|
701
|
+
if item not in current_prefs[key]:
|
|
702
|
+
current_prefs[key].append(item)
|
|
703
|
+
if "remove" in value:
|
|
704
|
+
if key in current_prefs:
|
|
705
|
+
for item in value["remove"]:
|
|
706
|
+
if item in current_prefs[key]:
|
|
707
|
+
current_prefs[key].remove(item)
|
|
708
|
+
else:
|
|
709
|
+
# Direct assignment
|
|
710
|
+
current_prefs[key] = value
|
|
711
|
+
else:
|
|
712
|
+
# Direct assignment for non-list fields
|
|
713
|
+
current_prefs[key] = value
|
|
714
|
+
|
|
715
|
+
# Save updated preferences
|
|
716
|
+
return await self.save_user_preferences(user_id, current_prefs)
|
|
717
|
+
|
|
718
|
+
except Exception as e:
|
|
719
|
+
logger.error("Failed to update user preferences", error=str(e))
|
|
720
|
+
return {"success": False, "message": str(e)}
|
|
721
|
+
|
|
722
|
+
async def get_relevant_papers(self, user_id: str, days_back: int = 1) -> Dict[str, Any]:
|
|
723
|
+
"""Get papers relevant to user's preferences"""
|
|
724
|
+
try:
|
|
725
|
+
# Ensure database is connected
|
|
726
|
+
if unified_database_service.db is None:
|
|
727
|
+
await unified_database_service.connect_mongodb()
|
|
728
|
+
|
|
729
|
+
# Get user preferences
|
|
730
|
+
prefs_result = await self.get_user_preferences(user_id)
|
|
731
|
+
if not prefs_result["success"]:
|
|
732
|
+
return prefs_result
|
|
733
|
+
|
|
734
|
+
preferences = prefs_result["preferences"]
|
|
735
|
+
|
|
736
|
+
# Calculate date range
|
|
737
|
+
end_date = datetime.utcnow()
|
|
738
|
+
start_date = end_date - timedelta(days=days_back)
|
|
739
|
+
|
|
740
|
+
# Build query based on preferences
|
|
741
|
+
query = {
|
|
742
|
+
"published_date": {
|
|
743
|
+
"$gte": start_date,
|
|
744
|
+
"$lte": end_date
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
# Add category filter - this should be the PRIMARY filter
|
|
749
|
+
if preferences["categories"]:
|
|
750
|
+
query["categories"] = {"$in": preferences["categories"]}
|
|
751
|
+
|
|
752
|
+
# Exclude papers with unwanted keywords
|
|
753
|
+
if preferences["exclude_keywords"]:
|
|
754
|
+
exclude_conditions = []
|
|
755
|
+
for exclude_kw in preferences["exclude_keywords"]:
|
|
756
|
+
exclude_conditions.extend([
|
|
757
|
+
{"title": {"$not": {"$regex": exclude_kw, "$options": "i"}}},
|
|
758
|
+
{"abstract": {"$not": {"$regex": exclude_kw, "$options": "i"}}}
|
|
759
|
+
])
|
|
760
|
+
if exclude_conditions:
|
|
761
|
+
query["$and"] = exclude_conditions
|
|
762
|
+
|
|
763
|
+
# Fetch papers from database
|
|
764
|
+
papers = await unified_database_service.db.papers.find(query).limit(
|
|
765
|
+
preferences["max_papers_per_day"]
|
|
766
|
+
).to_list(length=None)
|
|
767
|
+
|
|
768
|
+
# Calculate relevance scores
|
|
769
|
+
scored_papers = []
|
|
770
|
+
for paper in papers:
|
|
771
|
+
score = self._calculate_relevance_score(paper, preferences)
|
|
772
|
+
if score >= preferences["min_relevance_score"]:
|
|
773
|
+
paper["relevance_score"] = score
|
|
774
|
+
scored_papers.append(paper)
|
|
775
|
+
|
|
776
|
+
# Sort by relevance score
|
|
777
|
+
scored_papers.sort(key=lambda x: x["relevance_score"], reverse=True)
|
|
778
|
+
|
|
779
|
+
return {
|
|
780
|
+
"success": True,
|
|
781
|
+
"papers": scored_papers[:preferences["max_papers_per_day"]],
|
|
782
|
+
"total_found": len(scored_papers),
|
|
783
|
+
"date_range": {
|
|
784
|
+
"start": start_date.isoformat(),
|
|
785
|
+
"end": end_date.isoformat()
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
|
|
789
|
+
except Exception as e:
|
|
790
|
+
logger.error("Failed to get relevant papers", error=str(e))
|
|
791
|
+
return {"success": False, "message": str(e)}
|
|
792
|
+
|
|
793
|
+
def _get_default_preferences(self) -> Dict[str, Any]:
|
|
794
|
+
"""Get default paper preferences"""
|
|
795
|
+
return {
|
|
796
|
+
"categories": ["cs.AI", "cs.LG", "cs.CV"],
|
|
797
|
+
"keywords": [],
|
|
798
|
+
"authors": [],
|
|
799
|
+
"exclude_keywords": [],
|
|
800
|
+
"min_relevance_score": 0.2,
|
|
801
|
+
"max_papers_per_day": 10,
|
|
802
|
+
"daily_dose_enabled": False,
|
|
803
|
+
"daily_dose_time": "08:00"
|
|
804
|
+
}
|
|
805
|
+
|
|
806
|
+
def _validate_preferences(self, preferences: Dict[str, Any]) -> Dict[str, Any]:
|
|
807
|
+
"""Validate and clean preferences"""
|
|
808
|
+
validated = {}
|
|
809
|
+
|
|
810
|
+
# Validate categories
|
|
811
|
+
categories = preferences.get("categories", [])
|
|
812
|
+
if isinstance(categories, list):
|
|
813
|
+
validated["categories"] = [cat for cat in categories if cat in self.arxiv_categories]
|
|
814
|
+
else:
|
|
815
|
+
validated["categories"] = []
|
|
816
|
+
|
|
817
|
+
# Validate keywords
|
|
818
|
+
keywords = preferences.get("keywords", [])
|
|
819
|
+
if isinstance(keywords, list):
|
|
820
|
+
validated["keywords"] = [kw.strip() for kw in keywords if kw.strip()]
|
|
821
|
+
else:
|
|
822
|
+
validated["keywords"] = []
|
|
823
|
+
|
|
824
|
+
# Validate authors
|
|
825
|
+
authors = preferences.get("authors", [])
|
|
826
|
+
if isinstance(authors, list):
|
|
827
|
+
validated["authors"] = [auth.strip() for auth in authors if auth.strip()]
|
|
828
|
+
else:
|
|
829
|
+
validated["authors"] = []
|
|
830
|
+
|
|
831
|
+
# Validate exclude keywords
|
|
832
|
+
exclude_keywords = preferences.get("exclude_keywords", [])
|
|
833
|
+
if isinstance(exclude_keywords, list):
|
|
834
|
+
validated["exclude_keywords"] = [kw.strip() for kw in exclude_keywords if kw.strip()]
|
|
835
|
+
else:
|
|
836
|
+
validated["exclude_keywords"] = []
|
|
837
|
+
|
|
838
|
+
# Validate min relevance score
|
|
839
|
+
min_score = preferences.get("min_relevance_score", 0.2)
|
|
840
|
+
if isinstance(min_score, (int, float)) and 0 <= min_score <= 1:
|
|
841
|
+
validated["min_relevance_score"] = min_score
|
|
842
|
+
else:
|
|
843
|
+
validated["min_relevance_score"] = 0.2
|
|
844
|
+
|
|
845
|
+
# Validate max papers per day
|
|
846
|
+
max_papers = preferences.get("max_papers_per_day", 10)
|
|
847
|
+
if isinstance(max_papers, int) and 1 <= max_papers <= 50:
|
|
848
|
+
validated["max_papers_per_day"] = max_papers
|
|
849
|
+
else:
|
|
850
|
+
validated["max_papers_per_day"] = 10
|
|
851
|
+
|
|
852
|
+
# Validate daily dose enabled
|
|
853
|
+
validated["daily_dose_enabled"] = bool(preferences.get("daily_dose_enabled", False))
|
|
854
|
+
|
|
855
|
+
# Validate daily dose time
|
|
856
|
+
daily_time = preferences.get("daily_dose_time", "08:00")
|
|
857
|
+
if isinstance(daily_time, str) and len(daily_time.split(":")) == 2:
|
|
858
|
+
try:
|
|
859
|
+
hour, minute = map(int, daily_time.split(":"))
|
|
860
|
+
if 0 <= hour <= 23 and 0 <= minute <= 59:
|
|
861
|
+
validated["daily_dose_time"] = daily_time
|
|
862
|
+
else:
|
|
863
|
+
validated["daily_dose_time"] = "08:00"
|
|
864
|
+
except ValueError:
|
|
865
|
+
validated["daily_dose_time"] = "08:00"
|
|
866
|
+
else:
|
|
867
|
+
validated["daily_dose_time"] = "08:00"
|
|
868
|
+
|
|
869
|
+
return validated
|
|
870
|
+
|
|
871
|
+
def _calculate_relevance_score(self, paper: Dict[str, Any], preferences: Dict[str, Any]) -> float:
|
|
872
|
+
"""Calculate relevance score for a paper based on user preferences"""
|
|
873
|
+
score = 0.0
|
|
874
|
+
|
|
875
|
+
title = paper.get("title", "").lower()
|
|
876
|
+
abstract = paper.get("abstract", "").lower()
|
|
877
|
+
categories = paper.get("categories", [])
|
|
878
|
+
authors = paper.get("authors", [])
|
|
879
|
+
|
|
880
|
+
# Category match is PRIMARY (70% of score)
|
|
881
|
+
if preferences["categories"]:
|
|
882
|
+
category_matches = len(set(categories) & set(preferences["categories"]))
|
|
883
|
+
if category_matches > 0:
|
|
884
|
+
category_score = 0.7 + (category_matches - 1) * 0.1
|
|
885
|
+
score += min(category_score, 0.8)
|
|
886
|
+
else:
|
|
887
|
+
score += 0.3
|
|
888
|
+
|
|
889
|
+
# Keyword match in title (15% of score)
|
|
890
|
+
if preferences["keywords"]:
|
|
891
|
+
title_matches = sum(1 for kw in preferences["keywords"] if kw.lower() in title)
|
|
892
|
+
if title_matches > 0:
|
|
893
|
+
title_score = min(title_matches / len(preferences["keywords"]) * 0.15, 0.15)
|
|
894
|
+
score += title_score
|
|
895
|
+
|
|
896
|
+
# Keyword match in abstract (10% of score)
|
|
897
|
+
if preferences["keywords"]:
|
|
898
|
+
abstract_matches = sum(1 for kw in preferences["keywords"] if kw.lower() in abstract)
|
|
899
|
+
if abstract_matches > 0:
|
|
900
|
+
abstract_score = min(abstract_matches / len(preferences["keywords"]) * 0.10, 0.10)
|
|
901
|
+
score += abstract_score
|
|
902
|
+
|
|
903
|
+
# Author match (5% of score)
|
|
904
|
+
if preferences["authors"]:
|
|
905
|
+
author_matches = sum(1 for auth in preferences["authors"]
|
|
906
|
+
if any(auth.lower() in author.lower() for author in authors))
|
|
907
|
+
if author_matches > 0:
|
|
908
|
+
author_score = min(author_matches / len(preferences["authors"]) * 0.05, 0.05)
|
|
909
|
+
score += author_score
|
|
910
|
+
|
|
911
|
+
# Penalty for exclude keywords
|
|
912
|
+
if preferences["exclude_keywords"]:
|
|
913
|
+
for exclude_kw in preferences["exclude_keywords"]:
|
|
914
|
+
if exclude_kw.lower() in title or exclude_kw.lower() in abstract:
|
|
915
|
+
score *= 0.3 # 70% penalty
|
|
916
|
+
|
|
917
|
+
return min(score, 1.0)
|
|
918
|
+
|
|
919
|
+
def get_available_categories(self) -> Dict[str, str]:
|
|
920
|
+
"""Get available arXiv categories"""
|
|
921
|
+
return self.arxiv_categories
|
|
922
|
+
|
|
923
|
+
|
|
924
|
+
# Global instances
|
|
925
|
+
unified_user_service = UnifiedUserService()
|
|
926
|
+
|
|
927
|
+
# Backwards compatibility
|
|
928
|
+
user_service = unified_user_service
|
|
929
|
+
session_manager = unified_user_service
|
|
930
|
+
preferences_service = unified_user_service
|
|
931
|
+
|
|
932
|
+
# Export commonly used functions
|
|
933
|
+
create_or_get_user = unified_user_service.create_or_get_user
|
|
934
|
+
get_user_by_id = unified_user_service.get_user_by_id
|
|
935
|
+
create_session = unified_user_service.create_session
|
|
936
|
+
get_current_session = unified_user_service.get_current_session
|
|
937
|
+
is_authenticated = unified_user_service.is_authenticated
|
|
938
|
+
get_user_preferences = unified_user_service.get_user_preferences
|
|
939
|
+
save_user_preferences = unified_user_service.save_user_preferences
|
|
940
|
+
|
|
941
|
+
__all__ = [
|
|
942
|
+
'UnifiedUserService',
|
|
943
|
+
'unified_user_service',
|
|
944
|
+
'user_service',
|
|
945
|
+
'session_manager',
|
|
946
|
+
'preferences_service',
|
|
947
|
+
'create_or_get_user',
|
|
948
|
+
'get_user_by_id',
|
|
949
|
+
'create_session',
|
|
950
|
+
'get_current_session',
|
|
951
|
+
'is_authenticated',
|
|
952
|
+
'get_user_preferences',
|
|
953
|
+
'save_user_preferences'
|
|
954
|
+
]
|