pomera-ai-commander 0.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +105 -680
  3. package/bin/pomera-ai-commander.js +62 -62
  4. package/core/__init__.py +65 -65
  5. package/core/app_context.py +482 -482
  6. package/core/async_text_processor.py +421 -421
  7. package/core/backup_manager.py +655 -655
  8. package/core/backup_recovery_manager.py +1033 -1033
  9. package/core/content_hash_cache.py +508 -508
  10. package/core/context_menu.py +313 -313
  11. package/core/data_validator.py +1066 -1066
  12. package/core/database_connection_manager.py +744 -744
  13. package/core/database_curl_settings_manager.py +608 -608
  14. package/core/database_promera_ai_settings_manager.py +446 -446
  15. package/core/database_schema.py +411 -411
  16. package/core/database_schema_manager.py +395 -395
  17. package/core/database_settings_manager.py +1507 -1507
  18. package/core/database_settings_manager_interface.py +456 -456
  19. package/core/dialog_manager.py +734 -734
  20. package/core/efficient_line_numbers.py +510 -510
  21. package/core/error_handler.py +746 -746
  22. package/core/error_service.py +431 -431
  23. package/core/event_consolidator.py +511 -511
  24. package/core/mcp/__init__.py +43 -43
  25. package/core/mcp/protocol.py +288 -288
  26. package/core/mcp/schema.py +251 -251
  27. package/core/mcp/server_stdio.py +299 -299
  28. package/core/mcp/tool_registry.py +2372 -2345
  29. package/core/memory_efficient_text_widget.py +711 -711
  30. package/core/migration_manager.py +914 -914
  31. package/core/migration_test_suite.py +1085 -1085
  32. package/core/migration_validator.py +1143 -1143
  33. package/core/optimized_find_replace.py +714 -714
  34. package/core/optimized_pattern_engine.py +424 -424
  35. package/core/optimized_search_highlighter.py +552 -552
  36. package/core/performance_monitor.py +674 -674
  37. package/core/persistence_manager.py +712 -712
  38. package/core/progressive_stats_calculator.py +632 -632
  39. package/core/regex_pattern_cache.py +529 -529
  40. package/core/regex_pattern_library.py +350 -350
  41. package/core/search_operation_manager.py +434 -434
  42. package/core/settings_defaults_registry.py +1087 -1087
  43. package/core/settings_integrity_validator.py +1111 -1111
  44. package/core/settings_serializer.py +557 -557
  45. package/core/settings_validator.py +1823 -1823
  46. package/core/smart_stats_calculator.py +709 -709
  47. package/core/statistics_update_manager.py +619 -619
  48. package/core/stats_config_manager.py +858 -858
  49. package/core/streaming_text_handler.py +723 -723
  50. package/core/task_scheduler.py +596 -596
  51. package/core/update_pattern_library.py +168 -168
  52. package/core/visibility_monitor.py +596 -596
  53. package/core/widget_cache.py +498 -498
  54. package/mcp.json +51 -61
  55. package/package.json +61 -57
  56. package/pomera.py +7482 -7482
  57. package/pomera_mcp_server.py +183 -144
  58. package/requirements.txt +32 -0
  59. package/tools/__init__.py +4 -4
  60. package/tools/ai_tools.py +2891 -2891
  61. package/tools/ascii_art_generator.py +352 -352
  62. package/tools/base64_tools.py +183 -183
  63. package/tools/base_tool.py +511 -511
  64. package/tools/case_tool.py +308 -308
  65. package/tools/column_tools.py +395 -395
  66. package/tools/cron_tool.py +884 -884
  67. package/tools/curl_history.py +600 -600
  68. package/tools/curl_processor.py +1207 -1207
  69. package/tools/curl_settings.py +502 -502
  70. package/tools/curl_tool.py +5467 -5467
  71. package/tools/diff_viewer.py +1071 -1071
  72. package/tools/email_extraction_tool.py +248 -248
  73. package/tools/email_header_analyzer.py +425 -425
  74. package/tools/extraction_tools.py +250 -250
  75. package/tools/find_replace.py +1750 -1750
  76. package/tools/folder_file_reporter.py +1463 -1463
  77. package/tools/folder_file_reporter_adapter.py +480 -480
  78. package/tools/generator_tools.py +1216 -1216
  79. package/tools/hash_generator.py +255 -255
  80. package/tools/html_tool.py +656 -656
  81. package/tools/jsonxml_tool.py +729 -729
  82. package/tools/line_tools.py +419 -419
  83. package/tools/markdown_tools.py +561 -561
  84. package/tools/mcp_widget.py +1417 -1417
  85. package/tools/notes_widget.py +973 -973
  86. package/tools/number_base_converter.py +372 -372
  87. package/tools/regex_extractor.py +571 -571
  88. package/tools/slug_generator.py +310 -310
  89. package/tools/sorter_tools.py +458 -458
  90. package/tools/string_escape_tool.py +392 -392
  91. package/tools/text_statistics_tool.py +365 -365
  92. package/tools/text_wrapper.py +430 -430
  93. package/tools/timestamp_converter.py +421 -421
  94. package/tools/tool_loader.py +710 -710
  95. package/tools/translator_tools.py +522 -522
  96. package/tools/url_link_extractor.py +261 -261
  97. package/tools/url_parser.py +204 -204
  98. package/tools/whitespace_tools.py +355 -355
  99. package/tools/word_frequency_counter.py +146 -146
  100. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  101. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  102. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  103. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  104. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  105. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  106. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  107. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  108. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  109. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  110. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  111. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  112. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  113. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  114. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  115. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  116. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  117. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  118. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  119. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  120. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  121. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  122. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  123. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  124. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  125. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  126. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  127. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  128. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  129. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  130. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  131. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  132. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  133. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  134. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  135. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  136. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  137. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  138. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  139. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  140. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  141. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  142. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  143. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  144. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  145. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  146. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  147. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  148. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  149. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  150. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  151. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  152. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  153. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  154. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  155. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  156. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  157. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  158. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  159. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  160. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  161. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  162. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  163. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  164. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  165. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  166. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  167. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  168. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  169. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  170. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  171. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  172. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  173. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  174. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  175. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  176. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  177. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  178. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  179. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  180. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  181. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  182. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  183. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  184. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  185. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  186. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  187. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  188. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  189. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  190. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  191. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
@@ -1,596 +1,596 @@
1
- """
2
- Task Scheduler - Centralized background task management.
3
-
4
- Consolidates multiple background threads into a single managed scheduler
5
- to reduce overhead and improve resource management.
6
-
7
- Author: Pomera AI Commander Team
8
- """
9
-
10
- import threading
11
- import time
12
- import logging
13
- from typing import Dict, Callable, Optional, Any, List
14
- from dataclasses import dataclass, field
15
- from concurrent.futures import ThreadPoolExecutor, Future
16
- from enum import Enum
17
- import heapq
18
- from datetime import datetime
19
-
20
-
21
- class TaskPriority(Enum):
22
- """Task priority levels (lower value = higher priority)."""
23
- CRITICAL = 0
24
- HIGH = 1
25
- NORMAL = 2
26
- LOW = 3
27
-
28
-
29
- class TaskState(Enum):
30
- """Task execution states."""
31
- PENDING = "pending"
32
- RUNNING = "running"
33
- COMPLETED = "completed"
34
- FAILED = "failed"
35
- CANCELLED = "cancelled"
36
-
37
-
38
- @dataclass(order=True)
39
- class ScheduledTask:
40
- """
41
- A scheduled task with timing and priority.
42
-
43
- Ordering is by (next_run, priority) for heap queue.
44
- """
45
- next_run: float = field(compare=True)
46
- priority: int = field(compare=True)
47
- task_id: str = field(compare=False)
48
- func: Callable = field(compare=False)
49
- interval_seconds: float = field(compare=False, default=0)
50
- is_recurring: bool = field(compare=False, default=False)
51
- args: tuple = field(compare=False, default_factory=tuple)
52
- kwargs: dict = field(compare=False, default_factory=dict)
53
- state: TaskState = field(compare=False, default=TaskState.PENDING)
54
- last_run: Optional[float] = field(compare=False, default=None)
55
- run_count: int = field(compare=False, default=0)
56
- error_count: int = field(compare=False, default=0)
57
- last_error: Optional[str] = field(compare=False, default=None)
58
-
59
-
60
- @dataclass
61
- class TaskResult:
62
- """Result of a task execution."""
63
- task_id: str
64
- success: bool
65
- result: Any = None
66
- error: Optional[str] = None
67
- execution_time: float = 0.0
68
- timestamp: datetime = field(default_factory=datetime.now)
69
-
70
-
71
- class TaskScheduler:
72
- """
73
- Centralized task scheduler for background operations.
74
-
75
- Consolidates:
76
- - Periodic cleanup tasks (stats cache, backup manager)
77
- - Async text processing
78
- - Backup operations
79
- - Any other background work
80
-
81
- Benefits:
82
- - Single thread pool instead of multiple daemon threads
83
- - Proper shutdown handling
84
- - Task prioritization
85
- - Better resource utilization
86
- - Task monitoring and statistics
87
-
88
- Usage:
89
- scheduler = TaskScheduler()
90
- scheduler.start()
91
-
92
- # One-time task
93
- scheduler.schedule_once('cleanup', cleanup_func, delay_seconds=60)
94
-
95
- # Recurring task
96
- scheduler.schedule_recurring('backup', backup_func, interval_seconds=300)
97
-
98
- # Later...
99
- scheduler.stop()
100
- """
101
-
102
- def __init__(self,
103
- max_workers: int = 4,
104
- logger: Optional[logging.Logger] = None,
105
- name: str = "TaskScheduler"):
106
- """
107
- Initialize the task scheduler.
108
-
109
- Args:
110
- max_workers: Maximum concurrent worker threads
111
- logger: Logger instance (creates one if not provided)
112
- name: Name for the scheduler (used in thread names)
113
- """
114
- self.name = name
115
- self.logger = logger or logging.getLogger(__name__)
116
- self.max_workers = max_workers
117
-
118
- # Thread pool for task execution
119
- self._executor: Optional[ThreadPoolExecutor] = None
120
-
121
- # Scheduled tasks
122
- self._task_queue: List[ScheduledTask] = [] # heap queue
123
- self._tasks: Dict[str, ScheduledTask] = {}
124
- self._task_lock = threading.RLock()
125
-
126
- # Scheduler thread
127
- self._scheduler_thread: Optional[threading.Thread] = None
128
- self._stop_event = threading.Event()
129
- self._running = False
130
-
131
- # Active futures
132
- self._active_futures: Dict[str, Future] = {}
133
-
134
- # Task results history
135
- self._results_history: List[TaskResult] = []
136
- self._max_results_history = 100
137
-
138
- # Statistics
139
- self._stats = {
140
- 'tasks_executed': 0,
141
- 'tasks_failed': 0,
142
- 'tasks_cancelled': 0,
143
- 'total_execution_time': 0.0,
144
- 'started_at': None,
145
- 'stopped_at': None
146
- }
147
-
148
- # Callbacks
149
- self._on_task_complete: Optional[Callable[[TaskResult], None]] = None
150
- self._on_task_error: Optional[Callable[[str, Exception], None]] = None
151
-
152
- def start(self) -> None:
153
- """Start the scheduler."""
154
- if self._running:
155
- self.logger.warning(f"{self.name} is already running")
156
- return
157
-
158
- self._stop_event.clear()
159
- self._running = True
160
- self._stats['started_at'] = datetime.now()
161
- self._stats['stopped_at'] = None
162
-
163
- # Create thread pool
164
- self._executor = ThreadPoolExecutor(
165
- max_workers=self.max_workers,
166
- thread_name_prefix=f"{self.name}-Worker"
167
- )
168
-
169
- # Start scheduler thread
170
- self._scheduler_thread = threading.Thread(
171
- target=self._scheduler_loop,
172
- daemon=True,
173
- name=f"{self.name}-Main"
174
- )
175
- self._scheduler_thread.start()
176
-
177
- self.logger.info(f"{self.name} started with {self.max_workers} workers")
178
-
179
- def stop(self, wait: bool = True, timeout: float = 10.0) -> None:
180
- """
181
- Stop the scheduler.
182
-
183
- Args:
184
- wait: Whether to wait for running tasks to complete
185
- timeout: Maximum time to wait for shutdown
186
- """
187
- if not self._running:
188
- return
189
-
190
- self.logger.info(f"Stopping {self.name}...")
191
- self._stop_event.set()
192
- self._running = False
193
- self._stats['stopped_at'] = datetime.now()
194
-
195
- # Wait for scheduler thread
196
- if self._scheduler_thread and self._scheduler_thread.is_alive():
197
- self._scheduler_thread.join(timeout=timeout)
198
-
199
- # Shutdown thread pool
200
- if self._executor:
201
- self._executor.shutdown(wait=wait, cancel_futures=not wait)
202
- self._executor = None
203
-
204
- self.logger.info(f"{self.name} stopped")
205
-
206
- def schedule_once(self,
207
- task_id: str,
208
- func: Callable,
209
- delay_seconds: float = 0,
210
- priority: TaskPriority = TaskPriority.NORMAL,
211
- *args, **kwargs) -> str:
212
- """
213
- Schedule a one-time task.
214
-
215
- Args:
216
- task_id: Unique identifier for the task
217
- func: Function to execute
218
- delay_seconds: Delay before execution
219
- priority: Task priority
220
- *args, **kwargs: Arguments to pass to func
221
-
222
- Returns:
223
- Task ID
224
- """
225
- task = ScheduledTask(
226
- next_run=time.time() + delay_seconds,
227
- priority=priority.value,
228
- task_id=task_id,
229
- func=func,
230
- interval_seconds=0,
231
- is_recurring=False,
232
- args=args,
233
- kwargs=kwargs
234
- )
235
-
236
- self._add_task(task)
237
- self.logger.debug(f"Scheduled one-time task: {task_id} (delay: {delay_seconds}s)")
238
- return task_id
239
-
240
- def schedule_recurring(self,
241
- task_id: str,
242
- func: Callable,
243
- interval_seconds: float,
244
- priority: TaskPriority = TaskPriority.NORMAL,
245
- initial_delay: float = 0,
246
- *args, **kwargs) -> str:
247
- """
248
- Schedule a recurring task.
249
-
250
- Args:
251
- task_id: Unique identifier for the task
252
- func: Function to execute
253
- interval_seconds: Interval between executions
254
- priority: Task priority
255
- initial_delay: Delay before first execution
256
- *args, **kwargs: Arguments to pass to func
257
-
258
- Returns:
259
- Task ID
260
- """
261
- task = ScheduledTask(
262
- next_run=time.time() + initial_delay,
263
- priority=priority.value,
264
- task_id=task_id,
265
- func=func,
266
- interval_seconds=interval_seconds,
267
- is_recurring=True,
268
- args=args,
269
- kwargs=kwargs
270
- )
271
-
272
- self._add_task(task)
273
- self.logger.debug(
274
- f"Scheduled recurring task: {task_id} "
275
- f"(interval: {interval_seconds}s, initial delay: {initial_delay}s)"
276
- )
277
- return task_id
278
-
279
- def cancel_task(self, task_id: str) -> bool:
280
- """
281
- Cancel a scheduled task.
282
-
283
- Args:
284
- task_id: ID of task to cancel
285
-
286
- Returns:
287
- True if task was found and cancelled
288
- """
289
- with self._task_lock:
290
- if task_id in self._tasks:
291
- self._tasks[task_id].state = TaskState.CANCELLED
292
- del self._tasks[task_id]
293
- self._stats['tasks_cancelled'] += 1
294
- self.logger.debug(f"Cancelled task: {task_id}")
295
- return True
296
-
297
- # Also try to cancel active future
298
- if task_id in self._active_futures:
299
- future = self._active_futures[task_id]
300
- if future.cancel():
301
- self._stats['tasks_cancelled'] += 1
302
- self.logger.debug(f"Cancelled running task: {task_id}")
303
- return True
304
-
305
- return False
306
-
307
- def pause_task(self, task_id: str) -> bool:
308
- """
309
- Pause a recurring task (skips next executions until resumed).
310
-
311
- Args:
312
- task_id: ID of task to pause
313
-
314
- Returns:
315
- True if task was found and paused
316
- """
317
- with self._task_lock:
318
- if task_id in self._tasks:
319
- # Move next_run far into the future
320
- self._tasks[task_id].next_run = float('inf')
321
- self.logger.debug(f"Paused task: {task_id}")
322
- return True
323
- return False
324
-
325
- def resume_task(self, task_id: str) -> bool:
326
- """
327
- Resume a paused task.
328
-
329
- Args:
330
- task_id: ID of task to resume
331
-
332
- Returns:
333
- True if task was found and resumed
334
- """
335
- with self._task_lock:
336
- if task_id in self._tasks:
337
- task = self._tasks[task_id]
338
- task.next_run = time.time()
339
- # Re-add to queue
340
- heapq.heappush(self._task_queue, task)
341
- self.logger.debug(f"Resumed task: {task_id}")
342
- return True
343
- return False
344
-
345
- def run_now(self, task_id: str) -> bool:
346
- """
347
- Run a scheduled task immediately (doesn't affect schedule).
348
-
349
- Args:
350
- task_id: ID of task to run
351
-
352
- Returns:
353
- True if task was found and triggered
354
- """
355
- with self._task_lock:
356
- if task_id in self._tasks:
357
- task = self._tasks[task_id]
358
- # Create a copy for immediate execution
359
- self._execute_task(task)
360
- return True
361
- return False
362
-
363
- def get_task_info(self, task_id: str) -> Optional[Dict[str, Any]]:
364
- """
365
- Get information about a task.
366
-
367
- Args:
368
- task_id: ID of task
369
-
370
- Returns:
371
- Task information dictionary, or None if not found
372
- """
373
- with self._task_lock:
374
- if task_id not in self._tasks:
375
- return None
376
-
377
- task = self._tasks[task_id]
378
- return {
379
- 'task_id': task.task_id,
380
- 'state': task.state.value,
381
- 'is_recurring': task.is_recurring,
382
- 'interval_seconds': task.interval_seconds,
383
- 'priority': TaskPriority(task.priority).name,
384
- 'next_run': datetime.fromtimestamp(task.next_run).isoformat()
385
- if task.next_run != float('inf') else 'paused',
386
- 'last_run': datetime.fromtimestamp(task.last_run).isoformat()
387
- if task.last_run else None,
388
- 'run_count': task.run_count,
389
- 'error_count': task.error_count,
390
- 'last_error': task.last_error
391
- }
392
-
393
- def _add_task(self, task: ScheduledTask) -> None:
394
- """Add a task to the scheduler."""
395
- with self._task_lock:
396
- # Cancel existing task with same ID
397
- if task.task_id in self._tasks:
398
- self.cancel_task(task.task_id)
399
-
400
- self._tasks[task.task_id] = task
401
- heapq.heappush(self._task_queue, task)
402
-
403
- def _scheduler_loop(self) -> None:
404
- """Main scheduler loop."""
405
- while not self._stop_event.is_set():
406
- try:
407
- self._process_due_tasks()
408
- # Sleep briefly to avoid busy-waiting
409
- self._stop_event.wait(0.1)
410
- except Exception as e:
411
- self.logger.error(f"Scheduler loop error: {e}", exc_info=True)
412
-
413
- def _process_due_tasks(self) -> None:
414
- """Process all tasks that are due."""
415
- current_time = time.time()
416
-
417
- with self._task_lock:
418
- while self._task_queue:
419
- # Peek at next task
420
- task = self._task_queue[0]
421
-
422
- # Check if task was cancelled
423
- if task.task_id not in self._tasks:
424
- heapq.heappop(self._task_queue)
425
- continue
426
-
427
- # Check if task is paused
428
- if task.next_run == float('inf'):
429
- heapq.heappop(self._task_queue)
430
- continue
431
-
432
- # Check if task is due
433
- if task.next_run > current_time:
434
- break
435
-
436
- # Pop and execute
437
- heapq.heappop(self._task_queue)
438
- self._execute_task(task)
439
-
440
- def _execute_task(self, task: ScheduledTask) -> None:
441
- """Execute a task in the thread pool."""
442
- if not self._executor or not self._running:
443
- return
444
-
445
- def task_wrapper():
446
- start_time = time.time()
447
- result = TaskResult(task_id=task.task_id, success=False)
448
-
449
- try:
450
- task.state = TaskState.RUNNING
451
- ret_value = task.func(*task.args, **task.kwargs)
452
-
453
- result.success = True
454
- result.result = ret_value
455
- task.state = TaskState.COMPLETED
456
- task.run_count += 1
457
- self._stats['tasks_executed'] += 1
458
-
459
- except Exception as e:
460
- result.success = False
461
- result.error = str(e)
462
- task.state = TaskState.FAILED
463
- task.error_count += 1
464
- task.last_error = str(e)
465
- self._stats['tasks_failed'] += 1
466
- self.logger.error(f"Task {task.task_id} failed: {e}")
467
-
468
- if self._on_task_error:
469
- try:
470
- self._on_task_error(task.task_id, e)
471
- except Exception:
472
- pass
473
-
474
- finally:
475
- execution_time = time.time() - start_time
476
- result.execution_time = execution_time
477
- task.last_run = time.time()
478
- self._stats['total_execution_time'] += execution_time
479
-
480
- # Store result
481
- self._results_history.append(result)
482
- if len(self._results_history) > self._max_results_history:
483
- self._results_history = self._results_history[-self._max_results_history:]
484
-
485
- # Clean up future reference
486
- self._active_futures.pop(task.task_id, None)
487
-
488
- # Reschedule if recurring
489
- if task.is_recurring and task.task_id in self._tasks:
490
- task.next_run = time.time() + task.interval_seconds
491
- task.state = TaskState.PENDING
492
- with self._task_lock:
493
- heapq.heappush(self._task_queue, task)
494
-
495
- # Callback
496
- if self._on_task_complete:
497
- try:
498
- self._on_task_complete(result)
499
- except Exception:
500
- pass
501
-
502
- try:
503
- future = self._executor.submit(task_wrapper)
504
- self._active_futures[task.task_id] = future
505
- except Exception as e:
506
- self.logger.error(f"Failed to submit task {task.task_id}: {e}")
507
-
508
- def set_on_task_complete(self, callback: Callable[[TaskResult], None]) -> None:
509
- """Set callback for task completion."""
510
- self._on_task_complete = callback
511
-
512
- def set_on_task_error(self, callback: Callable[[str, Exception], None]) -> None:
513
- """Set callback for task errors."""
514
- self._on_task_error = callback
515
-
516
- def get_stats(self) -> Dict[str, Any]:
517
- """Get scheduler statistics."""
518
- with self._task_lock:
519
- return {
520
- **self._stats,
521
- 'pending_tasks': len(self._tasks),
522
- 'active_tasks': len(self._active_futures),
523
- 'is_running': self._running,
524
- 'uptime_seconds': (
525
- (datetime.now() - self._stats['started_at']).total_seconds()
526
- if self._stats['started_at'] and self._running else 0
527
- )
528
- }
529
-
530
- def get_pending_tasks(self) -> List[str]:
531
- """Get list of pending task IDs."""
532
- with self._task_lock:
533
- return list(self._tasks.keys())
534
-
535
- def get_recent_results(self, count: int = 10) -> List[TaskResult]:
536
- """Get recent task results."""
537
- return self._results_history[-count:]
538
-
539
- @property
540
- def is_running(self) -> bool:
541
- """Check if scheduler is running."""
542
- return self._running
543
-
544
-
545
- # Global scheduler instance
546
- _scheduler: Optional[TaskScheduler] = None
547
-
548
-
549
- def get_task_scheduler() -> TaskScheduler:
550
- """
551
- Get the global task scheduler instance.
552
-
553
- Creates one if it doesn't exist.
554
-
555
- Returns:
556
- Global TaskScheduler instance
557
- """
558
- global _scheduler
559
- if _scheduler is None:
560
- _scheduler = TaskScheduler()
561
- return _scheduler
562
-
563
-
564
- def init_task_scheduler(max_workers: int = 4,
565
- logger: Optional[logging.Logger] = None,
566
- auto_start: bool = True) -> TaskScheduler:
567
- """
568
- Initialize the global task scheduler.
569
-
570
- Args:
571
- max_workers: Maximum concurrent workers
572
- logger: Logger instance
573
- auto_start: Whether to start the scheduler immediately
574
-
575
- Returns:
576
- Initialized TaskScheduler
577
- """
578
- global _scheduler
579
- _scheduler = TaskScheduler(max_workers=max_workers, logger=logger)
580
- if auto_start:
581
- _scheduler.start()
582
- return _scheduler
583
-
584
-
585
- def shutdown_task_scheduler(wait: bool = True) -> None:
586
- """
587
- Shutdown the global task scheduler.
588
-
589
- Args:
590
- wait: Whether to wait for running tasks
591
- """
592
- global _scheduler
593
- if _scheduler is not None:
594
- _scheduler.stop(wait=wait)
595
- _scheduler = None
596
-
1
+ """
2
+ Task Scheduler - Centralized background task management.
3
+
4
+ Consolidates multiple background threads into a single managed scheduler
5
+ to reduce overhead and improve resource management.
6
+
7
+ Author: Pomera AI Commander Team
8
+ """
9
+
10
+ import threading
11
+ import time
12
+ import logging
13
+ from typing import Dict, Callable, Optional, Any, List
14
+ from dataclasses import dataclass, field
15
+ from concurrent.futures import ThreadPoolExecutor, Future
16
+ from enum import Enum
17
+ import heapq
18
+ from datetime import datetime
19
+
20
+
21
+ class TaskPriority(Enum):
22
+ """Task priority levels (lower value = higher priority)."""
23
+ CRITICAL = 0
24
+ HIGH = 1
25
+ NORMAL = 2
26
+ LOW = 3
27
+
28
+
29
+ class TaskState(Enum):
30
+ """Task execution states."""
31
+ PENDING = "pending"
32
+ RUNNING = "running"
33
+ COMPLETED = "completed"
34
+ FAILED = "failed"
35
+ CANCELLED = "cancelled"
36
+
37
+
38
+ @dataclass(order=True)
39
+ class ScheduledTask:
40
+ """
41
+ A scheduled task with timing and priority.
42
+
43
+ Ordering is by (next_run, priority) for heap queue.
44
+ """
45
+ next_run: float = field(compare=True)
46
+ priority: int = field(compare=True)
47
+ task_id: str = field(compare=False)
48
+ func: Callable = field(compare=False)
49
+ interval_seconds: float = field(compare=False, default=0)
50
+ is_recurring: bool = field(compare=False, default=False)
51
+ args: tuple = field(compare=False, default_factory=tuple)
52
+ kwargs: dict = field(compare=False, default_factory=dict)
53
+ state: TaskState = field(compare=False, default=TaskState.PENDING)
54
+ last_run: Optional[float] = field(compare=False, default=None)
55
+ run_count: int = field(compare=False, default=0)
56
+ error_count: int = field(compare=False, default=0)
57
+ last_error: Optional[str] = field(compare=False, default=None)
58
+
59
+
60
+ @dataclass
61
+ class TaskResult:
62
+ """Result of a task execution."""
63
+ task_id: str
64
+ success: bool
65
+ result: Any = None
66
+ error: Optional[str] = None
67
+ execution_time: float = 0.0
68
+ timestamp: datetime = field(default_factory=datetime.now)
69
+
70
+
71
+ class TaskScheduler:
72
+ """
73
+ Centralized task scheduler for background operations.
74
+
75
+ Consolidates:
76
+ - Periodic cleanup tasks (stats cache, backup manager)
77
+ - Async text processing
78
+ - Backup operations
79
+ - Any other background work
80
+
81
+ Benefits:
82
+ - Single thread pool instead of multiple daemon threads
83
+ - Proper shutdown handling
84
+ - Task prioritization
85
+ - Better resource utilization
86
+ - Task monitoring and statistics
87
+
88
+ Usage:
89
+ scheduler = TaskScheduler()
90
+ scheduler.start()
91
+
92
+ # One-time task
93
+ scheduler.schedule_once('cleanup', cleanup_func, delay_seconds=60)
94
+
95
+ # Recurring task
96
+ scheduler.schedule_recurring('backup', backup_func, interval_seconds=300)
97
+
98
+ # Later...
99
+ scheduler.stop()
100
+ """
101
+
102
+ def __init__(self,
103
+ max_workers: int = 4,
104
+ logger: Optional[logging.Logger] = None,
105
+ name: str = "TaskScheduler"):
106
+ """
107
+ Initialize the task scheduler.
108
+
109
+ Args:
110
+ max_workers: Maximum concurrent worker threads
111
+ logger: Logger instance (creates one if not provided)
112
+ name: Name for the scheduler (used in thread names)
113
+ """
114
+ self.name = name
115
+ self.logger = logger or logging.getLogger(__name__)
116
+ self.max_workers = max_workers
117
+
118
+ # Thread pool for task execution
119
+ self._executor: Optional[ThreadPoolExecutor] = None
120
+
121
+ # Scheduled tasks
122
+ self._task_queue: List[ScheduledTask] = [] # heap queue
123
+ self._tasks: Dict[str, ScheduledTask] = {}
124
+ self._task_lock = threading.RLock()
125
+
126
+ # Scheduler thread
127
+ self._scheduler_thread: Optional[threading.Thread] = None
128
+ self._stop_event = threading.Event()
129
+ self._running = False
130
+
131
+ # Active futures
132
+ self._active_futures: Dict[str, Future] = {}
133
+
134
+ # Task results history
135
+ self._results_history: List[TaskResult] = []
136
+ self._max_results_history = 100
137
+
138
+ # Statistics
139
+ self._stats = {
140
+ 'tasks_executed': 0,
141
+ 'tasks_failed': 0,
142
+ 'tasks_cancelled': 0,
143
+ 'total_execution_time': 0.0,
144
+ 'started_at': None,
145
+ 'stopped_at': None
146
+ }
147
+
148
+ # Callbacks
149
+ self._on_task_complete: Optional[Callable[[TaskResult], None]] = None
150
+ self._on_task_error: Optional[Callable[[str, Exception], None]] = None
151
+
152
+ def start(self) -> None:
153
+ """Start the scheduler."""
154
+ if self._running:
155
+ self.logger.warning(f"{self.name} is already running")
156
+ return
157
+
158
+ self._stop_event.clear()
159
+ self._running = True
160
+ self._stats['started_at'] = datetime.now()
161
+ self._stats['stopped_at'] = None
162
+
163
+ # Create thread pool
164
+ self._executor = ThreadPoolExecutor(
165
+ max_workers=self.max_workers,
166
+ thread_name_prefix=f"{self.name}-Worker"
167
+ )
168
+
169
+ # Start scheduler thread
170
+ self._scheduler_thread = threading.Thread(
171
+ target=self._scheduler_loop,
172
+ daemon=True,
173
+ name=f"{self.name}-Main"
174
+ )
175
+ self._scheduler_thread.start()
176
+
177
+ self.logger.info(f"{self.name} started with {self.max_workers} workers")
178
+
179
+ def stop(self, wait: bool = True, timeout: float = 10.0) -> None:
180
+ """
181
+ Stop the scheduler.
182
+
183
+ Args:
184
+ wait: Whether to wait for running tasks to complete
185
+ timeout: Maximum time to wait for shutdown
186
+ """
187
+ if not self._running:
188
+ return
189
+
190
+ self.logger.info(f"Stopping {self.name}...")
191
+ self._stop_event.set()
192
+ self._running = False
193
+ self._stats['stopped_at'] = datetime.now()
194
+
195
+ # Wait for scheduler thread
196
+ if self._scheduler_thread and self._scheduler_thread.is_alive():
197
+ self._scheduler_thread.join(timeout=timeout)
198
+
199
+ # Shutdown thread pool
200
+ if self._executor:
201
+ self._executor.shutdown(wait=wait, cancel_futures=not wait)
202
+ self._executor = None
203
+
204
+ self.logger.info(f"{self.name} stopped")
205
+
206
+ def schedule_once(self,
207
+ task_id: str,
208
+ func: Callable,
209
+ delay_seconds: float = 0,
210
+ priority: TaskPriority = TaskPriority.NORMAL,
211
+ *args, **kwargs) -> str:
212
+ """
213
+ Schedule a one-time task.
214
+
215
+ Args:
216
+ task_id: Unique identifier for the task
217
+ func: Function to execute
218
+ delay_seconds: Delay before execution
219
+ priority: Task priority
220
+ *args, **kwargs: Arguments to pass to func
221
+
222
+ Returns:
223
+ Task ID
224
+ """
225
+ task = ScheduledTask(
226
+ next_run=time.time() + delay_seconds,
227
+ priority=priority.value,
228
+ task_id=task_id,
229
+ func=func,
230
+ interval_seconds=0,
231
+ is_recurring=False,
232
+ args=args,
233
+ kwargs=kwargs
234
+ )
235
+
236
+ self._add_task(task)
237
+ self.logger.debug(f"Scheduled one-time task: {task_id} (delay: {delay_seconds}s)")
238
+ return task_id
239
+
240
+ def schedule_recurring(self,
241
+ task_id: str,
242
+ func: Callable,
243
+ interval_seconds: float,
244
+ priority: TaskPriority = TaskPriority.NORMAL,
245
+ initial_delay: float = 0,
246
+ *args, **kwargs) -> str:
247
+ """
248
+ Schedule a recurring task.
249
+
250
+ Args:
251
+ task_id: Unique identifier for the task
252
+ func: Function to execute
253
+ interval_seconds: Interval between executions
254
+ priority: Task priority
255
+ initial_delay: Delay before first execution
256
+ *args, **kwargs: Arguments to pass to func
257
+
258
+ Returns:
259
+ Task ID
260
+ """
261
+ task = ScheduledTask(
262
+ next_run=time.time() + initial_delay,
263
+ priority=priority.value,
264
+ task_id=task_id,
265
+ func=func,
266
+ interval_seconds=interval_seconds,
267
+ is_recurring=True,
268
+ args=args,
269
+ kwargs=kwargs
270
+ )
271
+
272
+ self._add_task(task)
273
+ self.logger.debug(
274
+ f"Scheduled recurring task: {task_id} "
275
+ f"(interval: {interval_seconds}s, initial delay: {initial_delay}s)"
276
+ )
277
+ return task_id
278
+
279
+ def cancel_task(self, task_id: str) -> bool:
280
+ """
281
+ Cancel a scheduled task.
282
+
283
+ Args:
284
+ task_id: ID of task to cancel
285
+
286
+ Returns:
287
+ True if task was found and cancelled
288
+ """
289
+ with self._task_lock:
290
+ if task_id in self._tasks:
291
+ self._tasks[task_id].state = TaskState.CANCELLED
292
+ del self._tasks[task_id]
293
+ self._stats['tasks_cancelled'] += 1
294
+ self.logger.debug(f"Cancelled task: {task_id}")
295
+ return True
296
+
297
+ # Also try to cancel active future
298
+ if task_id in self._active_futures:
299
+ future = self._active_futures[task_id]
300
+ if future.cancel():
301
+ self._stats['tasks_cancelled'] += 1
302
+ self.logger.debug(f"Cancelled running task: {task_id}")
303
+ return True
304
+
305
+ return False
306
+
307
+ def pause_task(self, task_id: str) -> bool:
308
+ """
309
+ Pause a recurring task (skips next executions until resumed).
310
+
311
+ Args:
312
+ task_id: ID of task to pause
313
+
314
+ Returns:
315
+ True if task was found and paused
316
+ """
317
+ with self._task_lock:
318
+ if task_id in self._tasks:
319
+ # Move next_run far into the future
320
+ self._tasks[task_id].next_run = float('inf')
321
+ self.logger.debug(f"Paused task: {task_id}")
322
+ return True
323
+ return False
324
+
325
+ def resume_task(self, task_id: str) -> bool:
326
+ """
327
+ Resume a paused task.
328
+
329
+ Args:
330
+ task_id: ID of task to resume
331
+
332
+ Returns:
333
+ True if task was found and resumed
334
+ """
335
+ with self._task_lock:
336
+ if task_id in self._tasks:
337
+ task = self._tasks[task_id]
338
+ task.next_run = time.time()
339
+ # Re-add to queue
340
+ heapq.heappush(self._task_queue, task)
341
+ self.logger.debug(f"Resumed task: {task_id}")
342
+ return True
343
+ return False
344
+
345
+ def run_now(self, task_id: str) -> bool:
346
+ """
347
+ Run a scheduled task immediately (doesn't affect schedule).
348
+
349
+ Args:
350
+ task_id: ID of task to run
351
+
352
+ Returns:
353
+ True if task was found and triggered
354
+ """
355
+ with self._task_lock:
356
+ if task_id in self._tasks:
357
+ task = self._tasks[task_id]
358
+ # Create a copy for immediate execution
359
+ self._execute_task(task)
360
+ return True
361
+ return False
362
+
363
+ def get_task_info(self, task_id: str) -> Optional[Dict[str, Any]]:
364
+ """
365
+ Get information about a task.
366
+
367
+ Args:
368
+ task_id: ID of task
369
+
370
+ Returns:
371
+ Task information dictionary, or None if not found
372
+ """
373
+ with self._task_lock:
374
+ if task_id not in self._tasks:
375
+ return None
376
+
377
+ task = self._tasks[task_id]
378
+ return {
379
+ 'task_id': task.task_id,
380
+ 'state': task.state.value,
381
+ 'is_recurring': task.is_recurring,
382
+ 'interval_seconds': task.interval_seconds,
383
+ 'priority': TaskPriority(task.priority).name,
384
+ 'next_run': datetime.fromtimestamp(task.next_run).isoformat()
385
+ if task.next_run != float('inf') else 'paused',
386
+ 'last_run': datetime.fromtimestamp(task.last_run).isoformat()
387
+ if task.last_run else None,
388
+ 'run_count': task.run_count,
389
+ 'error_count': task.error_count,
390
+ 'last_error': task.last_error
391
+ }
392
+
393
+ def _add_task(self, task: ScheduledTask) -> None:
394
+ """Add a task to the scheduler."""
395
+ with self._task_lock:
396
+ # Cancel existing task with same ID
397
+ if task.task_id in self._tasks:
398
+ self.cancel_task(task.task_id)
399
+
400
+ self._tasks[task.task_id] = task
401
+ heapq.heappush(self._task_queue, task)
402
+
403
+ def _scheduler_loop(self) -> None:
404
+ """Main scheduler loop."""
405
+ while not self._stop_event.is_set():
406
+ try:
407
+ self._process_due_tasks()
408
+ # Sleep briefly to avoid busy-waiting
409
+ self._stop_event.wait(0.1)
410
+ except Exception as e:
411
+ self.logger.error(f"Scheduler loop error: {e}", exc_info=True)
412
+
413
+ def _process_due_tasks(self) -> None:
414
+ """Process all tasks that are due."""
415
+ current_time = time.time()
416
+
417
+ with self._task_lock:
418
+ while self._task_queue:
419
+ # Peek at next task
420
+ task = self._task_queue[0]
421
+
422
+ # Check if task was cancelled
423
+ if task.task_id not in self._tasks:
424
+ heapq.heappop(self._task_queue)
425
+ continue
426
+
427
+ # Check if task is paused
428
+ if task.next_run == float('inf'):
429
+ heapq.heappop(self._task_queue)
430
+ continue
431
+
432
+ # Check if task is due
433
+ if task.next_run > current_time:
434
+ break
435
+
436
+ # Pop and execute
437
+ heapq.heappop(self._task_queue)
438
+ self._execute_task(task)
439
+
440
+ def _execute_task(self, task: ScheduledTask) -> None:
441
+ """Execute a task in the thread pool."""
442
+ if not self._executor or not self._running:
443
+ return
444
+
445
+ def task_wrapper():
446
+ start_time = time.time()
447
+ result = TaskResult(task_id=task.task_id, success=False)
448
+
449
+ try:
450
+ task.state = TaskState.RUNNING
451
+ ret_value = task.func(*task.args, **task.kwargs)
452
+
453
+ result.success = True
454
+ result.result = ret_value
455
+ task.state = TaskState.COMPLETED
456
+ task.run_count += 1
457
+ self._stats['tasks_executed'] += 1
458
+
459
+ except Exception as e:
460
+ result.success = False
461
+ result.error = str(e)
462
+ task.state = TaskState.FAILED
463
+ task.error_count += 1
464
+ task.last_error = str(e)
465
+ self._stats['tasks_failed'] += 1
466
+ self.logger.error(f"Task {task.task_id} failed: {e}")
467
+
468
+ if self._on_task_error:
469
+ try:
470
+ self._on_task_error(task.task_id, e)
471
+ except Exception:
472
+ pass
473
+
474
+ finally:
475
+ execution_time = time.time() - start_time
476
+ result.execution_time = execution_time
477
+ task.last_run = time.time()
478
+ self._stats['total_execution_time'] += execution_time
479
+
480
+ # Store result
481
+ self._results_history.append(result)
482
+ if len(self._results_history) > self._max_results_history:
483
+ self._results_history = self._results_history[-self._max_results_history:]
484
+
485
+ # Clean up future reference
486
+ self._active_futures.pop(task.task_id, None)
487
+
488
+ # Reschedule if recurring
489
+ if task.is_recurring and task.task_id in self._tasks:
490
+ task.next_run = time.time() + task.interval_seconds
491
+ task.state = TaskState.PENDING
492
+ with self._task_lock:
493
+ heapq.heappush(self._task_queue, task)
494
+
495
+ # Callback
496
+ if self._on_task_complete:
497
+ try:
498
+ self._on_task_complete(result)
499
+ except Exception:
500
+ pass
501
+
502
+ try:
503
+ future = self._executor.submit(task_wrapper)
504
+ self._active_futures[task.task_id] = future
505
+ except Exception as e:
506
+ self.logger.error(f"Failed to submit task {task.task_id}: {e}")
507
+
508
+ def set_on_task_complete(self, callback: Callable[[TaskResult], None]) -> None:
509
+ """Set callback for task completion."""
510
+ self._on_task_complete = callback
511
+
512
+ def set_on_task_error(self, callback: Callable[[str, Exception], None]) -> None:
513
+ """Set callback for task errors."""
514
+ self._on_task_error = callback
515
+
516
+ def get_stats(self) -> Dict[str, Any]:
517
+ """Get scheduler statistics."""
518
+ with self._task_lock:
519
+ return {
520
+ **self._stats,
521
+ 'pending_tasks': len(self._tasks),
522
+ 'active_tasks': len(self._active_futures),
523
+ 'is_running': self._running,
524
+ 'uptime_seconds': (
525
+ (datetime.now() - self._stats['started_at']).total_seconds()
526
+ if self._stats['started_at'] and self._running else 0
527
+ )
528
+ }
529
+
530
+ def get_pending_tasks(self) -> List[str]:
531
+ """Get list of pending task IDs."""
532
+ with self._task_lock:
533
+ return list(self._tasks.keys())
534
+
535
+ def get_recent_results(self, count: int = 10) -> List[TaskResult]:
536
+ """Get recent task results."""
537
+ return self._results_history[-count:]
538
+
539
+ @property
540
+ def is_running(self) -> bool:
541
+ """Check if scheduler is running."""
542
+ return self._running
543
+
544
+
545
+ # Global scheduler instance
546
+ _scheduler: Optional[TaskScheduler] = None
547
+
548
+
549
+ def get_task_scheduler() -> TaskScheduler:
550
+ """
551
+ Get the global task scheduler instance.
552
+
553
+ Creates one if it doesn't exist.
554
+
555
+ Returns:
556
+ Global TaskScheduler instance
557
+ """
558
+ global _scheduler
559
+ if _scheduler is None:
560
+ _scheduler = TaskScheduler()
561
+ return _scheduler
562
+
563
+
564
+ def init_task_scheduler(max_workers: int = 4,
565
+ logger: Optional[logging.Logger] = None,
566
+ auto_start: bool = True) -> TaskScheduler:
567
+ """
568
+ Initialize the global task scheduler.
569
+
570
+ Args:
571
+ max_workers: Maximum concurrent workers
572
+ logger: Logger instance
573
+ auto_start: Whether to start the scheduler immediately
574
+
575
+ Returns:
576
+ Initialized TaskScheduler
577
+ """
578
+ global _scheduler
579
+ _scheduler = TaskScheduler(max_workers=max_workers, logger=logger)
580
+ if auto_start:
581
+ _scheduler.start()
582
+ return _scheduler
583
+
584
+
585
+ def shutdown_task_scheduler(wait: bool = True) -> None:
586
+ """
587
+ Shutdown the global task scheduler.
588
+
589
+ Args:
590
+ wait: Whether to wait for running tasks
591
+ """
592
+ global _scheduler
593
+ if _scheduler is not None:
594
+ _scheduler.stop(wait=wait)
595
+ _scheduler = None
596
+