pomera-ai-commander 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +680 -0
- package/bin/pomera-ai-commander.js +62 -0
- package/core/__init__.py +66 -0
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/app_context.py +482 -0
- package/core/async_text_processor.py +422 -0
- package/core/backup_manager.py +656 -0
- package/core/backup_recovery_manager.py +1034 -0
- package/core/content_hash_cache.py +509 -0
- package/core/context_menu.py +313 -0
- package/core/data_validator.py +1067 -0
- package/core/database_connection_manager.py +745 -0
- package/core/database_curl_settings_manager.py +609 -0
- package/core/database_promera_ai_settings_manager.py +447 -0
- package/core/database_schema.py +412 -0
- package/core/database_schema_manager.py +396 -0
- package/core/database_settings_manager.py +1508 -0
- package/core/database_settings_manager_interface.py +457 -0
- package/core/dialog_manager.py +735 -0
- package/core/efficient_line_numbers.py +511 -0
- package/core/error_handler.py +747 -0
- package/core/error_service.py +431 -0
- package/core/event_consolidator.py +512 -0
- package/core/mcp/__init__.py +43 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/core/mcp/protocol.py +288 -0
- package/core/mcp/schema.py +251 -0
- package/core/mcp/server_stdio.py +299 -0
- package/core/mcp/tool_registry.py +2345 -0
- package/core/memory_efficient_text_widget.py +712 -0
- package/core/migration_manager.py +915 -0
- package/core/migration_test_suite.py +1086 -0
- package/core/migration_validator.py +1144 -0
- package/core/optimized_find_replace.py +715 -0
- package/core/optimized_pattern_engine.py +424 -0
- package/core/optimized_search_highlighter.py +553 -0
- package/core/performance_monitor.py +675 -0
- package/core/persistence_manager.py +713 -0
- package/core/progressive_stats_calculator.py +632 -0
- package/core/regex_pattern_cache.py +530 -0
- package/core/regex_pattern_library.py +351 -0
- package/core/search_operation_manager.py +435 -0
- package/core/settings_defaults_registry.py +1087 -0
- package/core/settings_integrity_validator.py +1112 -0
- package/core/settings_serializer.py +558 -0
- package/core/settings_validator.py +1824 -0
- package/core/smart_stats_calculator.py +710 -0
- package/core/statistics_update_manager.py +619 -0
- package/core/stats_config_manager.py +858 -0
- package/core/streaming_text_handler.py +723 -0
- package/core/task_scheduler.py +596 -0
- package/core/update_pattern_library.py +169 -0
- package/core/visibility_monitor.py +596 -0
- package/core/widget_cache.py +498 -0
- package/mcp.json +61 -0
- package/package.json +57 -0
- package/pomera.py +7483 -0
- package/pomera_mcp_server.py +144 -0
- package/tools/__init__.py +5 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
- package/tools/ai_tools.py +2892 -0
- package/tools/ascii_art_generator.py +353 -0
- package/tools/base64_tools.py +184 -0
- package/tools/base_tool.py +511 -0
- package/tools/case_tool.py +309 -0
- package/tools/column_tools.py +396 -0
- package/tools/cron_tool.py +885 -0
- package/tools/curl_history.py +601 -0
- package/tools/curl_processor.py +1208 -0
- package/tools/curl_settings.py +503 -0
- package/tools/curl_tool.py +5467 -0
- package/tools/diff_viewer.py +1072 -0
- package/tools/email_extraction_tool.py +249 -0
- package/tools/email_header_analyzer.py +426 -0
- package/tools/extraction_tools.py +250 -0
- package/tools/find_replace.py +1751 -0
- package/tools/folder_file_reporter.py +1463 -0
- package/tools/folder_file_reporter_adapter.py +480 -0
- package/tools/generator_tools.py +1217 -0
- package/tools/hash_generator.py +256 -0
- package/tools/html_tool.py +657 -0
- package/tools/huggingface_helper.py +449 -0
- package/tools/jsonxml_tool.py +730 -0
- package/tools/line_tools.py +419 -0
- package/tools/list_comparator.py +720 -0
- package/tools/markdown_tools.py +562 -0
- package/tools/mcp_widget.py +1417 -0
- package/tools/notes_widget.py +973 -0
- package/tools/number_base_converter.py +373 -0
- package/tools/regex_extractor.py +572 -0
- package/tools/slug_generator.py +311 -0
- package/tools/sorter_tools.py +459 -0
- package/tools/string_escape_tool.py +393 -0
- package/tools/text_statistics_tool.py +366 -0
- package/tools/text_wrapper.py +431 -0
- package/tools/timestamp_converter.py +422 -0
- package/tools/tool_loader.py +710 -0
- package/tools/translator_tools.py +523 -0
- package/tools/url_link_extractor.py +262 -0
- package/tools/url_parser.py +205 -0
- package/tools/whitespace_tools.py +356 -0
- package/tools/word_frequency_counter.py +147 -0
|
@@ -0,0 +1,596 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Task Scheduler - Centralized background task management.
|
|
3
|
+
|
|
4
|
+
Consolidates multiple background threads into a single managed scheduler
|
|
5
|
+
to reduce overhead and improve resource management.
|
|
6
|
+
|
|
7
|
+
Author: Pomera AI Commander Team
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import threading
|
|
11
|
+
import time
|
|
12
|
+
import logging
|
|
13
|
+
from typing import Dict, Callable, Optional, Any, List
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from concurrent.futures import ThreadPoolExecutor, Future
|
|
16
|
+
from enum import Enum
|
|
17
|
+
import heapq
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TaskPriority(Enum):
|
|
22
|
+
"""Task priority levels (lower value = higher priority)."""
|
|
23
|
+
CRITICAL = 0
|
|
24
|
+
HIGH = 1
|
|
25
|
+
NORMAL = 2
|
|
26
|
+
LOW = 3
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class TaskState(Enum):
|
|
30
|
+
"""Task execution states."""
|
|
31
|
+
PENDING = "pending"
|
|
32
|
+
RUNNING = "running"
|
|
33
|
+
COMPLETED = "completed"
|
|
34
|
+
FAILED = "failed"
|
|
35
|
+
CANCELLED = "cancelled"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass(order=True)
|
|
39
|
+
class ScheduledTask:
|
|
40
|
+
"""
|
|
41
|
+
A scheduled task with timing and priority.
|
|
42
|
+
|
|
43
|
+
Ordering is by (next_run, priority) for heap queue.
|
|
44
|
+
"""
|
|
45
|
+
next_run: float = field(compare=True)
|
|
46
|
+
priority: int = field(compare=True)
|
|
47
|
+
task_id: str = field(compare=False)
|
|
48
|
+
func: Callable = field(compare=False)
|
|
49
|
+
interval_seconds: float = field(compare=False, default=0)
|
|
50
|
+
is_recurring: bool = field(compare=False, default=False)
|
|
51
|
+
args: tuple = field(compare=False, default_factory=tuple)
|
|
52
|
+
kwargs: dict = field(compare=False, default_factory=dict)
|
|
53
|
+
state: TaskState = field(compare=False, default=TaskState.PENDING)
|
|
54
|
+
last_run: Optional[float] = field(compare=False, default=None)
|
|
55
|
+
run_count: int = field(compare=False, default=0)
|
|
56
|
+
error_count: int = field(compare=False, default=0)
|
|
57
|
+
last_error: Optional[str] = field(compare=False, default=None)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class TaskResult:
|
|
62
|
+
"""Result of a task execution."""
|
|
63
|
+
task_id: str
|
|
64
|
+
success: bool
|
|
65
|
+
result: Any = None
|
|
66
|
+
error: Optional[str] = None
|
|
67
|
+
execution_time: float = 0.0
|
|
68
|
+
timestamp: datetime = field(default_factory=datetime.now)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class TaskScheduler:
|
|
72
|
+
"""
|
|
73
|
+
Centralized task scheduler for background operations.
|
|
74
|
+
|
|
75
|
+
Consolidates:
|
|
76
|
+
- Periodic cleanup tasks (stats cache, backup manager)
|
|
77
|
+
- Async text processing
|
|
78
|
+
- Backup operations
|
|
79
|
+
- Any other background work
|
|
80
|
+
|
|
81
|
+
Benefits:
|
|
82
|
+
- Single thread pool instead of multiple daemon threads
|
|
83
|
+
- Proper shutdown handling
|
|
84
|
+
- Task prioritization
|
|
85
|
+
- Better resource utilization
|
|
86
|
+
- Task monitoring and statistics
|
|
87
|
+
|
|
88
|
+
Usage:
|
|
89
|
+
scheduler = TaskScheduler()
|
|
90
|
+
scheduler.start()
|
|
91
|
+
|
|
92
|
+
# One-time task
|
|
93
|
+
scheduler.schedule_once('cleanup', cleanup_func, delay_seconds=60)
|
|
94
|
+
|
|
95
|
+
# Recurring task
|
|
96
|
+
scheduler.schedule_recurring('backup', backup_func, interval_seconds=300)
|
|
97
|
+
|
|
98
|
+
# Later...
|
|
99
|
+
scheduler.stop()
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
def __init__(self,
|
|
103
|
+
max_workers: int = 4,
|
|
104
|
+
logger: Optional[logging.Logger] = None,
|
|
105
|
+
name: str = "TaskScheduler"):
|
|
106
|
+
"""
|
|
107
|
+
Initialize the task scheduler.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
max_workers: Maximum concurrent worker threads
|
|
111
|
+
logger: Logger instance (creates one if not provided)
|
|
112
|
+
name: Name for the scheduler (used in thread names)
|
|
113
|
+
"""
|
|
114
|
+
self.name = name
|
|
115
|
+
self.logger = logger or logging.getLogger(__name__)
|
|
116
|
+
self.max_workers = max_workers
|
|
117
|
+
|
|
118
|
+
# Thread pool for task execution
|
|
119
|
+
self._executor: Optional[ThreadPoolExecutor] = None
|
|
120
|
+
|
|
121
|
+
# Scheduled tasks
|
|
122
|
+
self._task_queue: List[ScheduledTask] = [] # heap queue
|
|
123
|
+
self._tasks: Dict[str, ScheduledTask] = {}
|
|
124
|
+
self._task_lock = threading.RLock()
|
|
125
|
+
|
|
126
|
+
# Scheduler thread
|
|
127
|
+
self._scheduler_thread: Optional[threading.Thread] = None
|
|
128
|
+
self._stop_event = threading.Event()
|
|
129
|
+
self._running = False
|
|
130
|
+
|
|
131
|
+
# Active futures
|
|
132
|
+
self._active_futures: Dict[str, Future] = {}
|
|
133
|
+
|
|
134
|
+
# Task results history
|
|
135
|
+
self._results_history: List[TaskResult] = []
|
|
136
|
+
self._max_results_history = 100
|
|
137
|
+
|
|
138
|
+
# Statistics
|
|
139
|
+
self._stats = {
|
|
140
|
+
'tasks_executed': 0,
|
|
141
|
+
'tasks_failed': 0,
|
|
142
|
+
'tasks_cancelled': 0,
|
|
143
|
+
'total_execution_time': 0.0,
|
|
144
|
+
'started_at': None,
|
|
145
|
+
'stopped_at': None
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
# Callbacks
|
|
149
|
+
self._on_task_complete: Optional[Callable[[TaskResult], None]] = None
|
|
150
|
+
self._on_task_error: Optional[Callable[[str, Exception], None]] = None
|
|
151
|
+
|
|
152
|
+
def start(self) -> None:
|
|
153
|
+
"""Start the scheduler."""
|
|
154
|
+
if self._running:
|
|
155
|
+
self.logger.warning(f"{self.name} is already running")
|
|
156
|
+
return
|
|
157
|
+
|
|
158
|
+
self._stop_event.clear()
|
|
159
|
+
self._running = True
|
|
160
|
+
self._stats['started_at'] = datetime.now()
|
|
161
|
+
self._stats['stopped_at'] = None
|
|
162
|
+
|
|
163
|
+
# Create thread pool
|
|
164
|
+
self._executor = ThreadPoolExecutor(
|
|
165
|
+
max_workers=self.max_workers,
|
|
166
|
+
thread_name_prefix=f"{self.name}-Worker"
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Start scheduler thread
|
|
170
|
+
self._scheduler_thread = threading.Thread(
|
|
171
|
+
target=self._scheduler_loop,
|
|
172
|
+
daemon=True,
|
|
173
|
+
name=f"{self.name}-Main"
|
|
174
|
+
)
|
|
175
|
+
self._scheduler_thread.start()
|
|
176
|
+
|
|
177
|
+
self.logger.info(f"{self.name} started with {self.max_workers} workers")
|
|
178
|
+
|
|
179
|
+
def stop(self, wait: bool = True, timeout: float = 10.0) -> None:
|
|
180
|
+
"""
|
|
181
|
+
Stop the scheduler.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
wait: Whether to wait for running tasks to complete
|
|
185
|
+
timeout: Maximum time to wait for shutdown
|
|
186
|
+
"""
|
|
187
|
+
if not self._running:
|
|
188
|
+
return
|
|
189
|
+
|
|
190
|
+
self.logger.info(f"Stopping {self.name}...")
|
|
191
|
+
self._stop_event.set()
|
|
192
|
+
self._running = False
|
|
193
|
+
self._stats['stopped_at'] = datetime.now()
|
|
194
|
+
|
|
195
|
+
# Wait for scheduler thread
|
|
196
|
+
if self._scheduler_thread and self._scheduler_thread.is_alive():
|
|
197
|
+
self._scheduler_thread.join(timeout=timeout)
|
|
198
|
+
|
|
199
|
+
# Shutdown thread pool
|
|
200
|
+
if self._executor:
|
|
201
|
+
self._executor.shutdown(wait=wait, cancel_futures=not wait)
|
|
202
|
+
self._executor = None
|
|
203
|
+
|
|
204
|
+
self.logger.info(f"{self.name} stopped")
|
|
205
|
+
|
|
206
|
+
def schedule_once(self,
|
|
207
|
+
task_id: str,
|
|
208
|
+
func: Callable,
|
|
209
|
+
delay_seconds: float = 0,
|
|
210
|
+
priority: TaskPriority = TaskPriority.NORMAL,
|
|
211
|
+
*args, **kwargs) -> str:
|
|
212
|
+
"""
|
|
213
|
+
Schedule a one-time task.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
task_id: Unique identifier for the task
|
|
217
|
+
func: Function to execute
|
|
218
|
+
delay_seconds: Delay before execution
|
|
219
|
+
priority: Task priority
|
|
220
|
+
*args, **kwargs: Arguments to pass to func
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
Task ID
|
|
224
|
+
"""
|
|
225
|
+
task = ScheduledTask(
|
|
226
|
+
next_run=time.time() + delay_seconds,
|
|
227
|
+
priority=priority.value,
|
|
228
|
+
task_id=task_id,
|
|
229
|
+
func=func,
|
|
230
|
+
interval_seconds=0,
|
|
231
|
+
is_recurring=False,
|
|
232
|
+
args=args,
|
|
233
|
+
kwargs=kwargs
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
self._add_task(task)
|
|
237
|
+
self.logger.debug(f"Scheduled one-time task: {task_id} (delay: {delay_seconds}s)")
|
|
238
|
+
return task_id
|
|
239
|
+
|
|
240
|
+
def schedule_recurring(self,
|
|
241
|
+
task_id: str,
|
|
242
|
+
func: Callable,
|
|
243
|
+
interval_seconds: float,
|
|
244
|
+
priority: TaskPriority = TaskPriority.NORMAL,
|
|
245
|
+
initial_delay: float = 0,
|
|
246
|
+
*args, **kwargs) -> str:
|
|
247
|
+
"""
|
|
248
|
+
Schedule a recurring task.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
task_id: Unique identifier for the task
|
|
252
|
+
func: Function to execute
|
|
253
|
+
interval_seconds: Interval between executions
|
|
254
|
+
priority: Task priority
|
|
255
|
+
initial_delay: Delay before first execution
|
|
256
|
+
*args, **kwargs: Arguments to pass to func
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
Task ID
|
|
260
|
+
"""
|
|
261
|
+
task = ScheduledTask(
|
|
262
|
+
next_run=time.time() + initial_delay,
|
|
263
|
+
priority=priority.value,
|
|
264
|
+
task_id=task_id,
|
|
265
|
+
func=func,
|
|
266
|
+
interval_seconds=interval_seconds,
|
|
267
|
+
is_recurring=True,
|
|
268
|
+
args=args,
|
|
269
|
+
kwargs=kwargs
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
self._add_task(task)
|
|
273
|
+
self.logger.debug(
|
|
274
|
+
f"Scheduled recurring task: {task_id} "
|
|
275
|
+
f"(interval: {interval_seconds}s, initial delay: {initial_delay}s)"
|
|
276
|
+
)
|
|
277
|
+
return task_id
|
|
278
|
+
|
|
279
|
+
def cancel_task(self, task_id: str) -> bool:
|
|
280
|
+
"""
|
|
281
|
+
Cancel a scheduled task.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
task_id: ID of task to cancel
|
|
285
|
+
|
|
286
|
+
Returns:
|
|
287
|
+
True if task was found and cancelled
|
|
288
|
+
"""
|
|
289
|
+
with self._task_lock:
|
|
290
|
+
if task_id in self._tasks:
|
|
291
|
+
self._tasks[task_id].state = TaskState.CANCELLED
|
|
292
|
+
del self._tasks[task_id]
|
|
293
|
+
self._stats['tasks_cancelled'] += 1
|
|
294
|
+
self.logger.debug(f"Cancelled task: {task_id}")
|
|
295
|
+
return True
|
|
296
|
+
|
|
297
|
+
# Also try to cancel active future
|
|
298
|
+
if task_id in self._active_futures:
|
|
299
|
+
future = self._active_futures[task_id]
|
|
300
|
+
if future.cancel():
|
|
301
|
+
self._stats['tasks_cancelled'] += 1
|
|
302
|
+
self.logger.debug(f"Cancelled running task: {task_id}")
|
|
303
|
+
return True
|
|
304
|
+
|
|
305
|
+
return False
|
|
306
|
+
|
|
307
|
+
def pause_task(self, task_id: str) -> bool:
|
|
308
|
+
"""
|
|
309
|
+
Pause a recurring task (skips next executions until resumed).
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
task_id: ID of task to pause
|
|
313
|
+
|
|
314
|
+
Returns:
|
|
315
|
+
True if task was found and paused
|
|
316
|
+
"""
|
|
317
|
+
with self._task_lock:
|
|
318
|
+
if task_id in self._tasks:
|
|
319
|
+
# Move next_run far into the future
|
|
320
|
+
self._tasks[task_id].next_run = float('inf')
|
|
321
|
+
self.logger.debug(f"Paused task: {task_id}")
|
|
322
|
+
return True
|
|
323
|
+
return False
|
|
324
|
+
|
|
325
|
+
def resume_task(self, task_id: str) -> bool:
|
|
326
|
+
"""
|
|
327
|
+
Resume a paused task.
|
|
328
|
+
|
|
329
|
+
Args:
|
|
330
|
+
task_id: ID of task to resume
|
|
331
|
+
|
|
332
|
+
Returns:
|
|
333
|
+
True if task was found and resumed
|
|
334
|
+
"""
|
|
335
|
+
with self._task_lock:
|
|
336
|
+
if task_id in self._tasks:
|
|
337
|
+
task = self._tasks[task_id]
|
|
338
|
+
task.next_run = time.time()
|
|
339
|
+
# Re-add to queue
|
|
340
|
+
heapq.heappush(self._task_queue, task)
|
|
341
|
+
self.logger.debug(f"Resumed task: {task_id}")
|
|
342
|
+
return True
|
|
343
|
+
return False
|
|
344
|
+
|
|
345
|
+
def run_now(self, task_id: str) -> bool:
|
|
346
|
+
"""
|
|
347
|
+
Run a scheduled task immediately (doesn't affect schedule).
|
|
348
|
+
|
|
349
|
+
Args:
|
|
350
|
+
task_id: ID of task to run
|
|
351
|
+
|
|
352
|
+
Returns:
|
|
353
|
+
True if task was found and triggered
|
|
354
|
+
"""
|
|
355
|
+
with self._task_lock:
|
|
356
|
+
if task_id in self._tasks:
|
|
357
|
+
task = self._tasks[task_id]
|
|
358
|
+
# Create a copy for immediate execution
|
|
359
|
+
self._execute_task(task)
|
|
360
|
+
return True
|
|
361
|
+
return False
|
|
362
|
+
|
|
363
|
+
def get_task_info(self, task_id: str) -> Optional[Dict[str, Any]]:
|
|
364
|
+
"""
|
|
365
|
+
Get information about a task.
|
|
366
|
+
|
|
367
|
+
Args:
|
|
368
|
+
task_id: ID of task
|
|
369
|
+
|
|
370
|
+
Returns:
|
|
371
|
+
Task information dictionary, or None if not found
|
|
372
|
+
"""
|
|
373
|
+
with self._task_lock:
|
|
374
|
+
if task_id not in self._tasks:
|
|
375
|
+
return None
|
|
376
|
+
|
|
377
|
+
task = self._tasks[task_id]
|
|
378
|
+
return {
|
|
379
|
+
'task_id': task.task_id,
|
|
380
|
+
'state': task.state.value,
|
|
381
|
+
'is_recurring': task.is_recurring,
|
|
382
|
+
'interval_seconds': task.interval_seconds,
|
|
383
|
+
'priority': TaskPriority(task.priority).name,
|
|
384
|
+
'next_run': datetime.fromtimestamp(task.next_run).isoformat()
|
|
385
|
+
if task.next_run != float('inf') else 'paused',
|
|
386
|
+
'last_run': datetime.fromtimestamp(task.last_run).isoformat()
|
|
387
|
+
if task.last_run else None,
|
|
388
|
+
'run_count': task.run_count,
|
|
389
|
+
'error_count': task.error_count,
|
|
390
|
+
'last_error': task.last_error
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
def _add_task(self, task: ScheduledTask) -> None:
|
|
394
|
+
"""Add a task to the scheduler."""
|
|
395
|
+
with self._task_lock:
|
|
396
|
+
# Cancel existing task with same ID
|
|
397
|
+
if task.task_id in self._tasks:
|
|
398
|
+
self.cancel_task(task.task_id)
|
|
399
|
+
|
|
400
|
+
self._tasks[task.task_id] = task
|
|
401
|
+
heapq.heappush(self._task_queue, task)
|
|
402
|
+
|
|
403
|
+
def _scheduler_loop(self) -> None:
|
|
404
|
+
"""Main scheduler loop."""
|
|
405
|
+
while not self._stop_event.is_set():
|
|
406
|
+
try:
|
|
407
|
+
self._process_due_tasks()
|
|
408
|
+
# Sleep briefly to avoid busy-waiting
|
|
409
|
+
self._stop_event.wait(0.1)
|
|
410
|
+
except Exception as e:
|
|
411
|
+
self.logger.error(f"Scheduler loop error: {e}", exc_info=True)
|
|
412
|
+
|
|
413
|
+
def _process_due_tasks(self) -> None:
|
|
414
|
+
"""Process all tasks that are due."""
|
|
415
|
+
current_time = time.time()
|
|
416
|
+
|
|
417
|
+
with self._task_lock:
|
|
418
|
+
while self._task_queue:
|
|
419
|
+
# Peek at next task
|
|
420
|
+
task = self._task_queue[0]
|
|
421
|
+
|
|
422
|
+
# Check if task was cancelled
|
|
423
|
+
if task.task_id not in self._tasks:
|
|
424
|
+
heapq.heappop(self._task_queue)
|
|
425
|
+
continue
|
|
426
|
+
|
|
427
|
+
# Check if task is paused
|
|
428
|
+
if task.next_run == float('inf'):
|
|
429
|
+
heapq.heappop(self._task_queue)
|
|
430
|
+
continue
|
|
431
|
+
|
|
432
|
+
# Check if task is due
|
|
433
|
+
if task.next_run > current_time:
|
|
434
|
+
break
|
|
435
|
+
|
|
436
|
+
# Pop and execute
|
|
437
|
+
heapq.heappop(self._task_queue)
|
|
438
|
+
self._execute_task(task)
|
|
439
|
+
|
|
440
|
+
def _execute_task(self, task: ScheduledTask) -> None:
|
|
441
|
+
"""Execute a task in the thread pool."""
|
|
442
|
+
if not self._executor or not self._running:
|
|
443
|
+
return
|
|
444
|
+
|
|
445
|
+
def task_wrapper():
|
|
446
|
+
start_time = time.time()
|
|
447
|
+
result = TaskResult(task_id=task.task_id, success=False)
|
|
448
|
+
|
|
449
|
+
try:
|
|
450
|
+
task.state = TaskState.RUNNING
|
|
451
|
+
ret_value = task.func(*task.args, **task.kwargs)
|
|
452
|
+
|
|
453
|
+
result.success = True
|
|
454
|
+
result.result = ret_value
|
|
455
|
+
task.state = TaskState.COMPLETED
|
|
456
|
+
task.run_count += 1
|
|
457
|
+
self._stats['tasks_executed'] += 1
|
|
458
|
+
|
|
459
|
+
except Exception as e:
|
|
460
|
+
result.success = False
|
|
461
|
+
result.error = str(e)
|
|
462
|
+
task.state = TaskState.FAILED
|
|
463
|
+
task.error_count += 1
|
|
464
|
+
task.last_error = str(e)
|
|
465
|
+
self._stats['tasks_failed'] += 1
|
|
466
|
+
self.logger.error(f"Task {task.task_id} failed: {e}")
|
|
467
|
+
|
|
468
|
+
if self._on_task_error:
|
|
469
|
+
try:
|
|
470
|
+
self._on_task_error(task.task_id, e)
|
|
471
|
+
except Exception:
|
|
472
|
+
pass
|
|
473
|
+
|
|
474
|
+
finally:
|
|
475
|
+
execution_time = time.time() - start_time
|
|
476
|
+
result.execution_time = execution_time
|
|
477
|
+
task.last_run = time.time()
|
|
478
|
+
self._stats['total_execution_time'] += execution_time
|
|
479
|
+
|
|
480
|
+
# Store result
|
|
481
|
+
self._results_history.append(result)
|
|
482
|
+
if len(self._results_history) > self._max_results_history:
|
|
483
|
+
self._results_history = self._results_history[-self._max_results_history:]
|
|
484
|
+
|
|
485
|
+
# Clean up future reference
|
|
486
|
+
self._active_futures.pop(task.task_id, None)
|
|
487
|
+
|
|
488
|
+
# Reschedule if recurring
|
|
489
|
+
if task.is_recurring and task.task_id in self._tasks:
|
|
490
|
+
task.next_run = time.time() + task.interval_seconds
|
|
491
|
+
task.state = TaskState.PENDING
|
|
492
|
+
with self._task_lock:
|
|
493
|
+
heapq.heappush(self._task_queue, task)
|
|
494
|
+
|
|
495
|
+
# Callback
|
|
496
|
+
if self._on_task_complete:
|
|
497
|
+
try:
|
|
498
|
+
self._on_task_complete(result)
|
|
499
|
+
except Exception:
|
|
500
|
+
pass
|
|
501
|
+
|
|
502
|
+
try:
|
|
503
|
+
future = self._executor.submit(task_wrapper)
|
|
504
|
+
self._active_futures[task.task_id] = future
|
|
505
|
+
except Exception as e:
|
|
506
|
+
self.logger.error(f"Failed to submit task {task.task_id}: {e}")
|
|
507
|
+
|
|
508
|
+
def set_on_task_complete(self, callback: Callable[[TaskResult], None]) -> None:
|
|
509
|
+
"""Set callback for task completion."""
|
|
510
|
+
self._on_task_complete = callback
|
|
511
|
+
|
|
512
|
+
def set_on_task_error(self, callback: Callable[[str, Exception], None]) -> None:
|
|
513
|
+
"""Set callback for task errors."""
|
|
514
|
+
self._on_task_error = callback
|
|
515
|
+
|
|
516
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
517
|
+
"""Get scheduler statistics."""
|
|
518
|
+
with self._task_lock:
|
|
519
|
+
return {
|
|
520
|
+
**self._stats,
|
|
521
|
+
'pending_tasks': len(self._tasks),
|
|
522
|
+
'active_tasks': len(self._active_futures),
|
|
523
|
+
'is_running': self._running,
|
|
524
|
+
'uptime_seconds': (
|
|
525
|
+
(datetime.now() - self._stats['started_at']).total_seconds()
|
|
526
|
+
if self._stats['started_at'] and self._running else 0
|
|
527
|
+
)
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
def get_pending_tasks(self) -> List[str]:
|
|
531
|
+
"""Get list of pending task IDs."""
|
|
532
|
+
with self._task_lock:
|
|
533
|
+
return list(self._tasks.keys())
|
|
534
|
+
|
|
535
|
+
def get_recent_results(self, count: int = 10) -> List[TaskResult]:
|
|
536
|
+
"""Get recent task results."""
|
|
537
|
+
return self._results_history[-count:]
|
|
538
|
+
|
|
539
|
+
@property
|
|
540
|
+
def is_running(self) -> bool:
|
|
541
|
+
"""Check if scheduler is running."""
|
|
542
|
+
return self._running
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
# Global scheduler instance
|
|
546
|
+
_scheduler: Optional[TaskScheduler] = None
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
def get_task_scheduler() -> TaskScheduler:
|
|
550
|
+
"""
|
|
551
|
+
Get the global task scheduler instance.
|
|
552
|
+
|
|
553
|
+
Creates one if it doesn't exist.
|
|
554
|
+
|
|
555
|
+
Returns:
|
|
556
|
+
Global TaskScheduler instance
|
|
557
|
+
"""
|
|
558
|
+
global _scheduler
|
|
559
|
+
if _scheduler is None:
|
|
560
|
+
_scheduler = TaskScheduler()
|
|
561
|
+
return _scheduler
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
def init_task_scheduler(max_workers: int = 4,
|
|
565
|
+
logger: Optional[logging.Logger] = None,
|
|
566
|
+
auto_start: bool = True) -> TaskScheduler:
|
|
567
|
+
"""
|
|
568
|
+
Initialize the global task scheduler.
|
|
569
|
+
|
|
570
|
+
Args:
|
|
571
|
+
max_workers: Maximum concurrent workers
|
|
572
|
+
logger: Logger instance
|
|
573
|
+
auto_start: Whether to start the scheduler immediately
|
|
574
|
+
|
|
575
|
+
Returns:
|
|
576
|
+
Initialized TaskScheduler
|
|
577
|
+
"""
|
|
578
|
+
global _scheduler
|
|
579
|
+
_scheduler = TaskScheduler(max_workers=max_workers, logger=logger)
|
|
580
|
+
if auto_start:
|
|
581
|
+
_scheduler.start()
|
|
582
|
+
return _scheduler
|
|
583
|
+
|
|
584
|
+
|
|
585
|
+
def shutdown_task_scheduler(wait: bool = True) -> None:
|
|
586
|
+
"""
|
|
587
|
+
Shutdown the global task scheduler.
|
|
588
|
+
|
|
589
|
+
Args:
|
|
590
|
+
wait: Whether to wait for running tasks
|
|
591
|
+
"""
|
|
592
|
+
global _scheduler
|
|
593
|
+
if _scheduler is not None:
|
|
594
|
+
_scheduler.stop(wait=wait)
|
|
595
|
+
_scheduler = None
|
|
596
|
+
|