arionxiv 1.0.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. arionxiv/__init__.py +40 -0
  2. arionxiv/__main__.py +10 -0
  3. arionxiv/arxiv_operations/__init__.py +0 -0
  4. arionxiv/arxiv_operations/client.py +225 -0
  5. arionxiv/arxiv_operations/fetcher.py +173 -0
  6. arionxiv/arxiv_operations/searcher.py +122 -0
  7. arionxiv/arxiv_operations/utils.py +293 -0
  8. arionxiv/cli/__init__.py +4 -0
  9. arionxiv/cli/commands/__init__.py +1 -0
  10. arionxiv/cli/commands/analyze.py +587 -0
  11. arionxiv/cli/commands/auth.py +365 -0
  12. arionxiv/cli/commands/chat.py +714 -0
  13. arionxiv/cli/commands/daily.py +482 -0
  14. arionxiv/cli/commands/fetch.py +217 -0
  15. arionxiv/cli/commands/library.py +295 -0
  16. arionxiv/cli/commands/preferences.py +426 -0
  17. arionxiv/cli/commands/search.py +254 -0
  18. arionxiv/cli/commands/settings_unified.py +1407 -0
  19. arionxiv/cli/commands/trending.py +41 -0
  20. arionxiv/cli/commands/welcome.py +168 -0
  21. arionxiv/cli/main.py +407 -0
  22. arionxiv/cli/ui/__init__.py +1 -0
  23. arionxiv/cli/ui/global_theme_manager.py +173 -0
  24. arionxiv/cli/ui/logo.py +127 -0
  25. arionxiv/cli/ui/splash.py +89 -0
  26. arionxiv/cli/ui/theme.py +32 -0
  27. arionxiv/cli/ui/theme_system.py +391 -0
  28. arionxiv/cli/utils/__init__.py +54 -0
  29. arionxiv/cli/utils/animations.py +522 -0
  30. arionxiv/cli/utils/api_client.py +583 -0
  31. arionxiv/cli/utils/api_config.py +505 -0
  32. arionxiv/cli/utils/command_suggestions.py +147 -0
  33. arionxiv/cli/utils/db_config_manager.py +254 -0
  34. arionxiv/github_actions_runner.py +206 -0
  35. arionxiv/main.py +23 -0
  36. arionxiv/prompts/__init__.py +9 -0
  37. arionxiv/prompts/prompts.py +247 -0
  38. arionxiv/rag_techniques/__init__.py +8 -0
  39. arionxiv/rag_techniques/basic_rag.py +1531 -0
  40. arionxiv/scheduler_daemon.py +139 -0
  41. arionxiv/server.py +1000 -0
  42. arionxiv/server_main.py +24 -0
  43. arionxiv/services/__init__.py +73 -0
  44. arionxiv/services/llm_client.py +30 -0
  45. arionxiv/services/llm_inference/__init__.py +58 -0
  46. arionxiv/services/llm_inference/groq_client.py +469 -0
  47. arionxiv/services/llm_inference/llm_utils.py +250 -0
  48. arionxiv/services/llm_inference/openrouter_client.py +564 -0
  49. arionxiv/services/unified_analysis_service.py +872 -0
  50. arionxiv/services/unified_auth_service.py +457 -0
  51. arionxiv/services/unified_config_service.py +456 -0
  52. arionxiv/services/unified_daily_dose_service.py +823 -0
  53. arionxiv/services/unified_database_service.py +1633 -0
  54. arionxiv/services/unified_llm_service.py +366 -0
  55. arionxiv/services/unified_paper_service.py +604 -0
  56. arionxiv/services/unified_pdf_service.py +522 -0
  57. arionxiv/services/unified_prompt_service.py +344 -0
  58. arionxiv/services/unified_scheduler_service.py +589 -0
  59. arionxiv/services/unified_user_service.py +954 -0
  60. arionxiv/utils/__init__.py +51 -0
  61. arionxiv/utils/api_helpers.py +200 -0
  62. arionxiv/utils/file_cleanup.py +150 -0
  63. arionxiv/utils/ip_helper.py +96 -0
  64. arionxiv-1.0.32.dist-info/METADATA +336 -0
  65. arionxiv-1.0.32.dist-info/RECORD +69 -0
  66. arionxiv-1.0.32.dist-info/WHEEL +5 -0
  67. arionxiv-1.0.32.dist-info/entry_points.txt +4 -0
  68. arionxiv-1.0.32.dist-info/licenses/LICENSE +21 -0
  69. arionxiv-1.0.32.dist-info/top_level.txt +1 -0
@@ -0,0 +1,589 @@
1
+ """
2
+ Unified Scheduler Service for ArionXiv - Consolidates scheduler.py and daily_dose_scheduler.py
3
+ Handles both daily analysis automation and user-specific daily dose scheduling
4
+ """
5
+
6
+ import asyncio
7
+ import logging
8
+ from datetime import datetime, timedelta
9
+ from typing import Dict, Any, List
10
+ from pathlib import Path
11
+ import sys
12
+
13
+ from apscheduler.schedulers.asyncio import AsyncIOScheduler
14
+ from apscheduler.triggers.cron import CronTrigger
15
+ from apscheduler.jobstores.memory import MemoryJobStore
16
+
17
+ from .unified_config_service import unified_config_service
18
+ from .unified_paper_service import unified_paper_service
19
+ from .unified_analysis_service import unified_analysis_service
20
+ from .unified_database_service import unified_database_service
21
+
22
+ # Import daily dose service lazily to avoid circular imports
23
+ def get_daily_dose_service():
24
+ from .unified_daily_dose_service import unified_daily_dose_service
25
+ return unified_daily_dose_service
26
+
27
+ logger = logging.getLogger(__name__)
28
+
29
+ class UnifiedSchedulerService:
30
+ """
31
+ Unified scheduler service that handles:
32
+ 1. Daily analysis pipeline (scheduler.py functionality)
33
+ 2. User-specific daily dose scheduling (daily_dose_scheduler.py functionality)
34
+ """
35
+
36
+ def __init__(self):
37
+ # Configure job store
38
+ jobstores = {
39
+ 'default': MemoryJobStore()
40
+ }
41
+
42
+ # Configure scheduler
43
+ self.scheduler = AsyncIOScheduler(
44
+ jobstores=jobstores,
45
+ timezone=unified_config_service.get_cron_schedule()["timezone"]
46
+ )
47
+
48
+ self.is_running = False
49
+
50
+ # ================================
51
+ # CORE SCHEDULER MANAGEMENT
52
+ # ================================
53
+
54
+ async def start(self):
55
+ """Start the scheduler service"""
56
+ if not self.is_running:
57
+ try:
58
+ # Schedule daily analysis pipeline
59
+ await self._schedule_daily_analysis()
60
+
61
+ # Start the scheduler
62
+ self.scheduler.start()
63
+ self.is_running = True
64
+
65
+ logger.info("Unified scheduler service started successfully")
66
+
67
+ except Exception as e:
68
+ logger.error(f"Failed to start scheduler service: {e}")
69
+ raise
70
+
71
+ async def stop(self):
72
+ """Stop the scheduler service"""
73
+ if self.is_running:
74
+ try:
75
+ self.scheduler.shutdown()
76
+ self.is_running = False
77
+ logger.info("Scheduler service stopped")
78
+ except Exception as e:
79
+ logger.error(f"Error stopping scheduler: {e}")
80
+
81
+ async def get_status(self) -> Dict[str, Any]:
82
+ """Get scheduler status and job information"""
83
+ try:
84
+ jobs = self.scheduler.get_jobs()
85
+
86
+ return {
87
+ 'status': 'running' if self.is_running else 'stopped',
88
+ 'timezone': str(self.scheduler.timezone),
89
+ 'job_count': len(jobs),
90
+ 'jobs': [
91
+ {
92
+ 'id': job.id,
93
+ 'name': job.name,
94
+ 'next_run': job.next_run_time.isoformat() if job.next_run_time else None,
95
+ 'trigger': str(job.trigger)
96
+ }
97
+ for job in jobs
98
+ ]
99
+ }
100
+ except Exception as e:
101
+ logger.error(f"Error getting scheduler status: {e}")
102
+ return {'status': 'error', 'error': str(e)}
103
+
104
+ # ================================
105
+ # DAILY ANALYSIS PIPELINE (from scheduler.py)
106
+ # ================================
107
+
108
+ async def _schedule_daily_analysis(self):
109
+ """Schedule the daily analysis pipeline"""
110
+ try:
111
+ cron_config = unified_config_service.get_cron_schedule()
112
+
113
+ # Create cron trigger
114
+ trigger = CronTrigger(
115
+ hour=cron_config["hour"],
116
+ minute=cron_config["minute"],
117
+ timezone=cron_config["timezone"]
118
+ )
119
+
120
+ # Schedule the job
121
+ self.scheduler.add_job(
122
+ func=self.run_daily_analysis_pipeline,
123
+ trigger=trigger,
124
+ id='daily_analysis_pipeline',
125
+ name='Daily ArXiv Analysis Pipeline',
126
+ replace_existing=True,
127
+ max_instances=1
128
+ )
129
+
130
+ logger.info(f"Daily analysis scheduled for {cron_config['hour']:02d}:{cron_config['minute']:02d} {cron_config['timezone']}")
131
+
132
+ except Exception as e:
133
+ logger.error(f"Failed to schedule daily analysis: {e}")
134
+ raise
135
+
136
+ async def run_daily_analysis_pipeline(self):
137
+ """Execute the complete daily analysis pipeline"""
138
+ start_time = datetime.now()
139
+ logger.info("Starting daily analysis pipeline")
140
+
141
+ try:
142
+ # Step 1: Fetch latest papers from ArXiv
143
+ logger.info("Fetching latest papers from ArXiv...")
144
+ papers = await unified_paper_service.fetch_daily_papers()
145
+
146
+ if not papers:
147
+ logger.warning("No papers fetched from ArXiv")
148
+ return
149
+
150
+ logger.info(f"Fetched {len(papers)} papers from ArXiv")
151
+
152
+ # Step 2: Get all users for analysis
153
+ users = await unified_database_service.get_all_active_users()
154
+ logger.info(f"Processing daily analysis for {len(users)} users")
155
+
156
+ # Step 3: Process each user's daily analysis
157
+ successful_analyses = 0
158
+ failed_analyses = 0
159
+
160
+ for user in users:
161
+ try:
162
+ user_id = str(user['_id'])
163
+
164
+ # Cleanup previous daily analysis for this user
165
+ await unified_analysis_service.cleanup_previous_daily_analysis(user_id)
166
+
167
+ # Run analysis for this user
168
+ result = await unified_analysis_service.analyze_papers_for_user(
169
+ user_id=user_id,
170
+ papers=papers,
171
+ analysis_type='daily_automated'
172
+ )
173
+
174
+ if result['success']:
175
+ successful_analyses += 1
176
+ logger.info(f"Daily analysis completed for user {user_id}")
177
+ else:
178
+ failed_analyses += 1
179
+ logger.error(f"Daily analysis failed for user {user_id}: {result.get('error', 'Unknown error')}")
180
+
181
+ except Exception as e:
182
+ failed_analyses += 1
183
+ logger.error(f"Error processing daily analysis for user {user.get('_id', 'unknown')}: {e}")
184
+
185
+ # Step 4: Store pipeline execution stats
186
+ execution_time = (datetime.now() - start_time).total_seconds()
187
+
188
+ stats = {
189
+ 'execution_date': start_time,
190
+ 'papers_fetched': len(papers),
191
+ 'users_processed': len(users),
192
+ 'successful_analyses': successful_analyses,
193
+ 'failed_analyses': failed_analyses,
194
+ 'execution_time_seconds': execution_time,
195
+ 'status': 'completed'
196
+ }
197
+
198
+ await unified_database_service.store_pipeline_stats(stats)
199
+
200
+ logger.info(f"Daily analysis pipeline completed in {execution_time:.2f} seconds")
201
+ logger.info(f"Success: {successful_analyses}, Failed: {failed_analyses}")
202
+
203
+ except Exception as e:
204
+ execution_time = (datetime.now() - start_time).total_seconds()
205
+ logger.error(f"Daily analysis pipeline failed after {execution_time:.2f} seconds: {e}")
206
+
207
+ # Store failure stats
208
+ stats = {
209
+ 'execution_date': start_time,
210
+ 'execution_time_seconds': execution_time,
211
+ 'status': 'failed',
212
+ 'error': str(e)
213
+ }
214
+
215
+ await unified_database_service.store_pipeline_stats(stats)
216
+ raise
217
+
218
+ async def trigger_manual_analysis(self) -> Dict[str, Any]:
219
+ """Manually trigger the daily analysis pipeline"""
220
+ try:
221
+ logger.info("Manual daily analysis pipeline triggered")
222
+ await self.run_daily_analysis_pipeline()
223
+ return {'success': True, 'message': 'Manual analysis completed successfully'}
224
+ except Exception as e:
225
+ logger.error(f"Manual analysis failed: {e}")
226
+ return {'success': False, 'error': str(e)}
227
+
228
+ # ================================
229
+ # USER DAILY DOSE SCHEDULING (from daily_dose_scheduler.py)
230
+ # ================================
231
+
232
+ async def schedule_user_daily_dose(self, user_id: str, time_str: str, days_back: int = 1) -> Dict[str, Any]:
233
+ """Schedule daily dose for a specific user"""
234
+ try:
235
+ # Parse time
236
+ hour, minute = map(int, time_str.split(':'))
237
+
238
+ # Validate time
239
+ if not (0 <= hour <= 23 and 0 <= minute <= 59):
240
+ return {
241
+ 'success': False,
242
+ 'error': 'Invalid time format. Hour must be 0-23, minute must be 0-59'
243
+ }
244
+
245
+ # Create job ID
246
+ job_id = f'daily_dose_{user_id}'
247
+
248
+ # Create cron trigger for this user
249
+ trigger = CronTrigger(
250
+ hour=hour,
251
+ minute=minute,
252
+ timezone=unified_config_service.get_cron_schedule()["timezone"]
253
+ )
254
+
255
+ # Schedule the job
256
+ self.scheduler.add_job(
257
+ func=self._execute_user_daily_dose,
258
+ trigger=trigger,
259
+ args=[user_id, days_back],
260
+ id=job_id,
261
+ name=f'Daily Dose for User {user_id}',
262
+ replace_existing=True,
263
+ max_instances=1
264
+ )
265
+
266
+ logger.info(f"Daily dose scheduled for user {user_id} at {time_str}")
267
+
268
+ return {
269
+ 'success': True,
270
+ 'message': f'Daily dose scheduled for {time_str}',
271
+ 'user_id': user_id,
272
+ 'time': time_str,
273
+ 'job_id': job_id
274
+ }
275
+
276
+ except ValueError as e:
277
+ return {
278
+ 'success': False,
279
+ 'error': f'Invalid time format: {e}. Use HH:MM format'
280
+ }
281
+ except Exception as e:
282
+ logger.error(f"Error scheduling daily dose for user {user_id}: {e}")
283
+ return {
284
+ 'success': False,
285
+ 'error': str(e)
286
+ }
287
+
288
+ async def trigger_user_daily_dose(self, user_id: str, days_back: int = 1) -> Dict[str, Any]:
289
+ """Trigger daily dose execution immediately for a specific user"""
290
+ try:
291
+ result = await self._execute_user_daily_dose(user_id, days_back)
292
+ if result.get("success"):
293
+ return {
294
+ 'success': True,
295
+ 'message': f'Daily dose executed successfully for user {user_id}',
296
+ 'user_id': user_id,
297
+ 'papers_count': result.get('papers_count', 0),
298
+ 'analysis_id': result.get('analysis_id')
299
+ }
300
+ else:
301
+ return {
302
+ 'success': False,
303
+ 'message': result.get('message', 'Unknown error'),
304
+ 'user_id': user_id
305
+ }
306
+ except Exception as e:
307
+ logger.error(f"Failed to execute daily dose for user {user_id}: {str(e)}")
308
+ return {
309
+ 'success': False,
310
+ 'error': f'Failed to execute daily dose: {str(e)}',
311
+ 'user_id': user_id
312
+ }
313
+
314
+ async def _execute_user_daily_dose(self, user_id: str, days_back: int):
315
+ """Execute daily dose for a specific user using the daily dose service."""
316
+ try:
317
+ logger.info(f"Executing daily dose for user {user_id}")
318
+
319
+ # Use the daily dose service for execution
320
+ daily_dose_service = get_daily_dose_service()
321
+ result = await daily_dose_service.execute_daily_dose(user_id)
322
+
323
+ if result["success"]:
324
+ logger.info(f"Daily dose completed for user {user_id}: {result.get('papers_count', 0)} papers analyzed")
325
+ else:
326
+ logger.error(f"Daily dose failed for user {user_id}: {result.get('message')}")
327
+
328
+ return result
329
+
330
+ except Exception as e:
331
+ logger.error(f"Error executing daily dose for user {user_id}: {e}")
332
+ return {
333
+ "success": False,
334
+ "message": str(e),
335
+ "papers_count": 0
336
+ }
337
+
338
+ async def _fetch_personalized_papers(self, preferences: Dict[str, Any], start_date: datetime, end_date: datetime) -> List[Dict[str, Any]]:
339
+ """Fetch papers based on user preferences"""
340
+ try:
341
+ # Get preferred categories
342
+ categories = preferences.get('categories', [])
343
+ keywords = preferences.get('keywords', [])
344
+ authors = preferences.get('authors', [])
345
+ exclude_keywords = preferences.get('exclude_keywords', [])
346
+ max_papers = preferences.get('max_papers_per_day', 10)
347
+
348
+ # Build search query
349
+ query_parts = []
350
+
351
+ # Add categories to query
352
+ if categories:
353
+ category_query = ' OR '.join([f'cat:{cat}' for cat in categories])
354
+ query_parts.append(f'({category_query})')
355
+
356
+ # Add keywords to query
357
+ if keywords:
358
+ keyword_query = ' OR '.join(keywords)
359
+ query_parts.append(f'({keyword_query})')
360
+
361
+ # Add authors to query
362
+ if authors:
363
+ author_query = ' OR '.join([f'au:{author}' for author in authors])
364
+ query_parts.append(f'({author_query})')
365
+
366
+ # Combine query parts
367
+ query = ' AND '.join(query_parts) if query_parts else 'cat:cs.*'
368
+
369
+ # Fetch papers from ArXiv
370
+ papers = await unified_paper_service.search_papers(
371
+ query=query,
372
+ max_results=max_papers * 2, # Fetch more to allow for filtering
373
+ sort_by='submittedDate',
374
+ sort_order='descending'
375
+ )
376
+
377
+ # Filter by date range
378
+ filtered_papers = []
379
+ for paper in papers:
380
+ paper_date = datetime.fromisoformat(paper.get('published', '').replace('Z', '+00:00'))
381
+ if start_date <= paper_date <= end_date:
382
+ # Check exclude keywords
383
+ if exclude_keywords:
384
+ title_and_abstract = f"{paper.get('title', '')} {paper.get('summary', '')}".lower()
385
+ if any(exclude_kw.lower() in title_and_abstract for exclude_kw in exclude_keywords):
386
+ continue
387
+
388
+ filtered_papers.append(paper)
389
+
390
+ if len(filtered_papers) >= max_papers:
391
+ break
392
+
393
+ return filtered_papers
394
+
395
+ except Exception as e:
396
+ logger.error(f"Error fetching personalized papers: {e}")
397
+ return []
398
+
399
+ async def cancel_user_daily_dose(self, user_id: str) -> Dict[str, Any]:
400
+ """Cancel daily dose for a specific user"""
401
+ try:
402
+ job_id = f'daily_dose_{user_id}'
403
+
404
+ # Remove the job
405
+ self.scheduler.remove_job(job_id)
406
+
407
+ logger.info(f"Daily dose cancelled for user {user_id}")
408
+
409
+ return {
410
+ 'success': True,
411
+ 'message': f'Daily dose cancelled for user {user_id}',
412
+ 'user_id': user_id
413
+ }
414
+
415
+ except Exception as e:
416
+ logger.error(f"Error cancelling daily dose for user {user_id}: {e}")
417
+ return {
418
+ 'success': False,
419
+ 'error': str(e)
420
+ }
421
+
422
+ async def get_user_daily_dose_status(self, user_id: str) -> Dict[str, Any]:
423
+ """Get daily dose status for a specific user"""
424
+ try:
425
+ job_id = f'daily_dose_{user_id}'
426
+
427
+ try:
428
+ job = self.scheduler.get_job(job_id)
429
+ if job:
430
+ return {
431
+ 'success': True,
432
+ 'scheduled': True,
433
+ 'next_run': job.next_run_time.isoformat() if job.next_run_time else None,
434
+ 'trigger': str(job.trigger)
435
+ }
436
+ else:
437
+ return {
438
+ 'success': True,
439
+ 'scheduled': False,
440
+ 'message': 'No daily dose scheduled for this user'
441
+ }
442
+ except Exception:
443
+ return {
444
+ 'success': True,
445
+ 'scheduled': False,
446
+ 'message': 'No daily dose scheduled for this user'
447
+ }
448
+
449
+ except Exception as e:
450
+ logger.error(f"Error getting daily dose status for user {user_id}: {e}")
451
+ return {
452
+ 'success': False,
453
+ 'error': str(e)
454
+ }
455
+
456
+ # ================================
457
+ # STATISTICS AND MONITORING
458
+ # ================================
459
+
460
+ async def get_pipeline_stats(self, days: int = 7) -> Dict[str, Any]:
461
+ """Get pipeline execution statistics"""
462
+ try:
463
+ stats = await unified_database_service.get_pipeline_stats(days)
464
+ return {
465
+ 'success': True,
466
+ 'stats': stats
467
+ }
468
+ except Exception as e:
469
+ logger.error(f"Error getting pipeline stats: {e}")
470
+ return {
471
+ 'success': False,
472
+ 'error': str(e)
473
+ }
474
+
475
+ async def get_user_dose_history(self, user_id: str, days: int = 30) -> Dict[str, Any]:
476
+ """Get user's daily dose execution history"""
477
+ try:
478
+ # Get user's daily analyses from the last N days
479
+ end_date = datetime.now()
480
+ start_date = end_date - timedelta(days=days)
481
+
482
+ analyses = await unified_database_service.get_user_analyses(
483
+ user_id=user_id,
484
+ start_date=start_date,
485
+ end_date=end_date,
486
+ analysis_type='daily_dose'
487
+ )
488
+
489
+ return {
490
+ 'success': True,
491
+ 'user_id': user_id,
492
+ 'history': analyses,
493
+ 'count': len(analyses)
494
+ }
495
+
496
+ except Exception as e:
497
+ logger.error(f"Error getting dose history for user {user_id}: {e}")
498
+ return {
499
+ 'success': False,
500
+ 'error': str(e)
501
+ }
502
+
503
+ # ================================
504
+ # UTILITY FUNCTIONS
505
+ # ================================
506
+
507
+ async def reschedule_daily_analysis(self, hour: int, minute: int) -> Dict[str, Any]:
508
+ """Reschedule the daily analysis pipeline"""
509
+ try:
510
+ # Remove existing job
511
+ try:
512
+ self.scheduler.remove_job('daily_analysis_pipeline')
513
+ except Exception:
514
+ pass # Job might not exist
515
+
516
+ # Create new trigger
517
+ trigger = CronTrigger(
518
+ hour=hour,
519
+ minute=minute,
520
+ timezone=unified_config_service.get_cron_schedule()["timezone"]
521
+ )
522
+
523
+ # Schedule new job
524
+ self.scheduler.add_job(
525
+ func=self.run_daily_analysis_pipeline,
526
+ trigger=trigger,
527
+ id='daily_analysis_pipeline',
528
+ name='Daily ArXiv Analysis Pipeline',
529
+ replace_existing=True,
530
+ max_instances=1
531
+ )
532
+
533
+ logger.info(f"Daily analysis rescheduled for {hour:02d}:{minute:02d}")
534
+
535
+ return {
536
+ 'success': True,
537
+ 'message': f'Daily analysis rescheduled for {hour:02d}:{minute:02d}'
538
+ }
539
+
540
+ except Exception as e:
541
+ logger.error(f"Error rescheduling daily analysis: {e}")
542
+ return {
543
+ 'success': False,
544
+ 'error': str(e)
545
+ }
546
+
547
+ # ================================
548
+ # SINGLETON INSTANCE
549
+ # ================================
550
+
551
+ # Create singleton instance
552
+ unified_scheduler = UnifiedSchedulerService()
553
+
554
+ # Export commonly used functions for backwards compatibility
555
+ async def start_scheduler():
556
+ """Start the unified scheduler service"""
557
+ return await unified_scheduler.start()
558
+
559
+ async def stop_scheduler():
560
+ """Stop the unified scheduler service"""
561
+ return await unified_scheduler.stop()
562
+
563
+ async def run_daily_analysis_pipeline():
564
+ """Run the daily analysis pipeline manually"""
565
+ return await unified_scheduler.trigger_manual_analysis()
566
+
567
+ async def schedule_user_daily_dose(user_id: str, time_str: str, days_back: int = 1):
568
+ """Schedule daily dose for a user"""
569
+ return await unified_scheduler.schedule_user_daily_dose(user_id, time_str, days_back)
570
+
571
+ async def trigger_user_daily_dose(user_id: str, days_back: int = 1):
572
+ """Trigger daily dose execution immediately for a user"""
573
+ return await unified_scheduler.trigger_user_daily_dose(user_id, days_back)
574
+
575
+ # For backwards compatibility
576
+ daily_scheduler = unified_scheduler
577
+ daily_dose_scheduler = unified_scheduler
578
+
579
+ # Export all public functions
580
+ __all__ = [
581
+ 'UnifiedSchedulerService',
582
+ 'unified_scheduler',
583
+ 'daily_scheduler',
584
+ 'daily_dose_scheduler',
585
+ 'start_scheduler',
586
+ 'stop_scheduler',
587
+ 'run_daily_analysis_pipeline',
588
+ 'schedule_user_daily_dose'
589
+ ]