mcli-framework 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (186) hide show
  1. mcli/app/chat_cmd.py +42 -0
  2. mcli/app/commands_cmd.py +226 -0
  3. mcli/app/completion_cmd.py +216 -0
  4. mcli/app/completion_helpers.py +288 -0
  5. mcli/app/cron_test_cmd.py +697 -0
  6. mcli/app/logs_cmd.py +419 -0
  7. mcli/app/main.py +492 -0
  8. mcli/app/model/model.py +1060 -0
  9. mcli/app/model_cmd.py +227 -0
  10. mcli/app/redis_cmd.py +269 -0
  11. mcli/app/video/video.py +1114 -0
  12. mcli/app/visual_cmd.py +303 -0
  13. mcli/chat/chat.py +2409 -0
  14. mcli/chat/command_rag.py +514 -0
  15. mcli/chat/enhanced_chat.py +652 -0
  16. mcli/chat/system_controller.py +1010 -0
  17. mcli/chat/system_integration.py +1016 -0
  18. mcli/cli.py +25 -0
  19. mcli/config.toml +20 -0
  20. mcli/lib/api/api.py +586 -0
  21. mcli/lib/api/daemon_client.py +203 -0
  22. mcli/lib/api/daemon_client_local.py +44 -0
  23. mcli/lib/api/daemon_decorator.py +217 -0
  24. mcli/lib/api/mcli_decorators.py +1032 -0
  25. mcli/lib/auth/auth.py +85 -0
  26. mcli/lib/auth/aws_manager.py +85 -0
  27. mcli/lib/auth/azure_manager.py +91 -0
  28. mcli/lib/auth/credential_manager.py +192 -0
  29. mcli/lib/auth/gcp_manager.py +93 -0
  30. mcli/lib/auth/key_manager.py +117 -0
  31. mcli/lib/auth/mcli_manager.py +93 -0
  32. mcli/lib/auth/token_manager.py +75 -0
  33. mcli/lib/auth/token_util.py +1011 -0
  34. mcli/lib/config/config.py +47 -0
  35. mcli/lib/discovery/__init__.py +1 -0
  36. mcli/lib/discovery/command_discovery.py +274 -0
  37. mcli/lib/erd/erd.py +1345 -0
  38. mcli/lib/erd/generate_graph.py +453 -0
  39. mcli/lib/files/files.py +76 -0
  40. mcli/lib/fs/fs.py +109 -0
  41. mcli/lib/lib.py +29 -0
  42. mcli/lib/logger/logger.py +611 -0
  43. mcli/lib/performance/optimizer.py +409 -0
  44. mcli/lib/performance/rust_bridge.py +502 -0
  45. mcli/lib/performance/uvloop_config.py +154 -0
  46. mcli/lib/pickles/pickles.py +50 -0
  47. mcli/lib/search/cached_vectorizer.py +479 -0
  48. mcli/lib/services/data_pipeline.py +460 -0
  49. mcli/lib/services/lsh_client.py +441 -0
  50. mcli/lib/services/redis_service.py +387 -0
  51. mcli/lib/shell/shell.py +137 -0
  52. mcli/lib/toml/toml.py +33 -0
  53. mcli/lib/ui/styling.py +47 -0
  54. mcli/lib/ui/visual_effects.py +634 -0
  55. mcli/lib/watcher/watcher.py +185 -0
  56. mcli/ml/api/app.py +215 -0
  57. mcli/ml/api/middleware.py +224 -0
  58. mcli/ml/api/routers/admin_router.py +12 -0
  59. mcli/ml/api/routers/auth_router.py +244 -0
  60. mcli/ml/api/routers/backtest_router.py +12 -0
  61. mcli/ml/api/routers/data_router.py +12 -0
  62. mcli/ml/api/routers/model_router.py +302 -0
  63. mcli/ml/api/routers/monitoring_router.py +12 -0
  64. mcli/ml/api/routers/portfolio_router.py +12 -0
  65. mcli/ml/api/routers/prediction_router.py +267 -0
  66. mcli/ml/api/routers/trade_router.py +12 -0
  67. mcli/ml/api/routers/websocket_router.py +76 -0
  68. mcli/ml/api/schemas.py +64 -0
  69. mcli/ml/auth/auth_manager.py +425 -0
  70. mcli/ml/auth/models.py +154 -0
  71. mcli/ml/auth/permissions.py +302 -0
  72. mcli/ml/backtesting/backtest_engine.py +502 -0
  73. mcli/ml/backtesting/performance_metrics.py +393 -0
  74. mcli/ml/cache.py +400 -0
  75. mcli/ml/cli/main.py +398 -0
  76. mcli/ml/config/settings.py +394 -0
  77. mcli/ml/configs/dvc_config.py +230 -0
  78. mcli/ml/configs/mlflow_config.py +131 -0
  79. mcli/ml/configs/mlops_manager.py +293 -0
  80. mcli/ml/dashboard/app.py +532 -0
  81. mcli/ml/dashboard/app_integrated.py +738 -0
  82. mcli/ml/dashboard/app_supabase.py +560 -0
  83. mcli/ml/dashboard/app_training.py +615 -0
  84. mcli/ml/dashboard/cli.py +51 -0
  85. mcli/ml/data_ingestion/api_connectors.py +501 -0
  86. mcli/ml/data_ingestion/data_pipeline.py +567 -0
  87. mcli/ml/data_ingestion/stream_processor.py +512 -0
  88. mcli/ml/database/migrations/env.py +94 -0
  89. mcli/ml/database/models.py +667 -0
  90. mcli/ml/database/session.py +200 -0
  91. mcli/ml/experimentation/ab_testing.py +845 -0
  92. mcli/ml/features/ensemble_features.py +607 -0
  93. mcli/ml/features/political_features.py +676 -0
  94. mcli/ml/features/recommendation_engine.py +809 -0
  95. mcli/ml/features/stock_features.py +573 -0
  96. mcli/ml/features/test_feature_engineering.py +346 -0
  97. mcli/ml/logging.py +85 -0
  98. mcli/ml/mlops/data_versioning.py +518 -0
  99. mcli/ml/mlops/experiment_tracker.py +377 -0
  100. mcli/ml/mlops/model_serving.py +481 -0
  101. mcli/ml/mlops/pipeline_orchestrator.py +614 -0
  102. mcli/ml/models/base_models.py +324 -0
  103. mcli/ml/models/ensemble_models.py +675 -0
  104. mcli/ml/models/recommendation_models.py +474 -0
  105. mcli/ml/models/test_models.py +487 -0
  106. mcli/ml/monitoring/drift_detection.py +676 -0
  107. mcli/ml/monitoring/metrics.py +45 -0
  108. mcli/ml/optimization/portfolio_optimizer.py +834 -0
  109. mcli/ml/preprocessing/data_cleaners.py +451 -0
  110. mcli/ml/preprocessing/feature_extractors.py +491 -0
  111. mcli/ml/preprocessing/ml_pipeline.py +382 -0
  112. mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
  113. mcli/ml/preprocessing/test_preprocessing.py +294 -0
  114. mcli/ml/scripts/populate_sample_data.py +200 -0
  115. mcli/ml/tasks.py +400 -0
  116. mcli/ml/tests/test_integration.py +429 -0
  117. mcli/ml/tests/test_training_dashboard.py +387 -0
  118. mcli/public/oi/oi.py +15 -0
  119. mcli/public/public.py +4 -0
  120. mcli/self/self_cmd.py +1246 -0
  121. mcli/workflow/daemon/api_daemon.py +800 -0
  122. mcli/workflow/daemon/async_command_database.py +681 -0
  123. mcli/workflow/daemon/async_process_manager.py +591 -0
  124. mcli/workflow/daemon/client.py +530 -0
  125. mcli/workflow/daemon/commands.py +1196 -0
  126. mcli/workflow/daemon/daemon.py +905 -0
  127. mcli/workflow/daemon/daemon_api.py +59 -0
  128. mcli/workflow/daemon/enhanced_daemon.py +571 -0
  129. mcli/workflow/daemon/process_cli.py +244 -0
  130. mcli/workflow/daemon/process_manager.py +439 -0
  131. mcli/workflow/daemon/test_daemon.py +275 -0
  132. mcli/workflow/dashboard/dashboard_cmd.py +113 -0
  133. mcli/workflow/docker/docker.py +0 -0
  134. mcli/workflow/file/file.py +100 -0
  135. mcli/workflow/gcloud/config.toml +21 -0
  136. mcli/workflow/gcloud/gcloud.py +58 -0
  137. mcli/workflow/git_commit/ai_service.py +328 -0
  138. mcli/workflow/git_commit/commands.py +430 -0
  139. mcli/workflow/lsh_integration.py +355 -0
  140. mcli/workflow/model_service/client.py +594 -0
  141. mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
  142. mcli/workflow/model_service/lightweight_embedder.py +397 -0
  143. mcli/workflow/model_service/lightweight_model_server.py +714 -0
  144. mcli/workflow/model_service/lightweight_test.py +241 -0
  145. mcli/workflow/model_service/model_service.py +1955 -0
  146. mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
  147. mcli/workflow/model_service/pdf_processor.py +386 -0
  148. mcli/workflow/model_service/test_efficient_runner.py +234 -0
  149. mcli/workflow/model_service/test_example.py +315 -0
  150. mcli/workflow/model_service/test_integration.py +131 -0
  151. mcli/workflow/model_service/test_new_features.py +149 -0
  152. mcli/workflow/openai/openai.py +99 -0
  153. mcli/workflow/politician_trading/commands.py +1790 -0
  154. mcli/workflow/politician_trading/config.py +134 -0
  155. mcli/workflow/politician_trading/connectivity.py +490 -0
  156. mcli/workflow/politician_trading/data_sources.py +395 -0
  157. mcli/workflow/politician_trading/database.py +410 -0
  158. mcli/workflow/politician_trading/demo.py +248 -0
  159. mcli/workflow/politician_trading/models.py +165 -0
  160. mcli/workflow/politician_trading/monitoring.py +413 -0
  161. mcli/workflow/politician_trading/scrapers.py +966 -0
  162. mcli/workflow/politician_trading/scrapers_california.py +412 -0
  163. mcli/workflow/politician_trading/scrapers_eu.py +377 -0
  164. mcli/workflow/politician_trading/scrapers_uk.py +350 -0
  165. mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
  166. mcli/workflow/politician_trading/supabase_functions.py +354 -0
  167. mcli/workflow/politician_trading/workflow.py +852 -0
  168. mcli/workflow/registry/registry.py +180 -0
  169. mcli/workflow/repo/repo.py +223 -0
  170. mcli/workflow/scheduler/commands.py +493 -0
  171. mcli/workflow/scheduler/cron_parser.py +238 -0
  172. mcli/workflow/scheduler/job.py +182 -0
  173. mcli/workflow/scheduler/monitor.py +139 -0
  174. mcli/workflow/scheduler/persistence.py +324 -0
  175. mcli/workflow/scheduler/scheduler.py +679 -0
  176. mcli/workflow/sync/sync_cmd.py +437 -0
  177. mcli/workflow/sync/test_cmd.py +314 -0
  178. mcli/workflow/videos/videos.py +242 -0
  179. mcli/workflow/wakatime/wakatime.py +11 -0
  180. mcli/workflow/workflow.py +37 -0
  181. mcli_framework-7.0.0.dist-info/METADATA +479 -0
  182. mcli_framework-7.0.0.dist-info/RECORD +186 -0
  183. mcli_framework-7.0.0.dist-info/WHEEL +5 -0
  184. mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
  185. mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
  186. mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,679 @@
1
+ """
2
+ Main scheduler engine for MCLI cron functionality
3
+
4
+ Coordinates job scheduling, execution, monitoring, and persistence.
5
+ Provides the primary interface for the cron scheduling system.
6
+ """
7
+
8
+ import asyncio
9
+ import json
10
+ import os
11
+ import signal
12
+ import subprocess
13
+ import threading
14
+ import time
15
+ from datetime import datetime, timedelta
16
+ from typing import Any, Callable, Dict, List, Optional
17
+
18
+ from mcli.lib.logger.logger import get_logger
19
+
20
+ from .cron_parser import CronExpression
21
+ from .job import JobStatus, JobType, ScheduledJob
22
+ from .monitor import JobMonitor
23
+ from .persistence import JobStorage
24
+
25
+ logger = get_logger(__name__)
26
+
27
+
28
+ class JobExecutor:
29
+ """Handles job execution in separate processes/threads"""
30
+
31
+ def __init__(self):
32
+ self.running_processes: Dict[str, subprocess.Popen] = {}
33
+ self.lock = threading.Lock()
34
+
35
+ def execute_job(self, job: ScheduledJob) -> Dict[str, Any]:
36
+ """Execute a job and return execution results"""
37
+ start_time = datetime.now()
38
+ result = {
39
+ "job_id": job.id,
40
+ "started_at": start_time.isoformat(),
41
+ "status": JobStatus.RUNNING.value,
42
+ "output": "",
43
+ "error": "",
44
+ "exit_code": None,
45
+ "runtime_seconds": 0,
46
+ }
47
+
48
+ try:
49
+ job.update_status(JobStatus.RUNNING)
50
+ logger.info(f"Executing job {job.name} [{job.id}]")
51
+
52
+ if job.job_type == JobType.COMMAND:
53
+ result.update(self._execute_command(job))
54
+ elif job.job_type == JobType.PYTHON:
55
+ result.update(self._execute_python(job))
56
+ elif job.job_type == JobType.CLEANUP:
57
+ result.update(self._execute_cleanup(job))
58
+ elif job.job_type == JobType.SYSTEM:
59
+ result.update(self._execute_system(job))
60
+ elif job.job_type == JobType.API_CALL:
61
+ result.update(self._execute_api_call(job))
62
+ else:
63
+ result.update(self._execute_custom(job))
64
+
65
+ except Exception as e:
66
+ logger.error(f"Job execution failed for {job.name}: {e}")
67
+ result.update({"status": JobStatus.FAILED.value, "error": str(e), "exit_code": -1})
68
+
69
+ # Calculate runtime
70
+ end_time = datetime.now()
71
+ runtime = (end_time - start_time).total_seconds()
72
+ result["runtime_seconds"] = runtime
73
+ result["completed_at"] = end_time.isoformat()
74
+
75
+ # Update job status
76
+ if result["status"] == JobStatus.RUNNING.value:
77
+ if result.get("exit_code") == 0:
78
+ job.update_status(JobStatus.COMPLETED, result["output"], result["error"])
79
+ result["status"] = JobStatus.COMPLETED.value
80
+ else:
81
+ job.update_status(JobStatus.FAILED, result["output"], result["error"])
82
+ result["status"] = JobStatus.FAILED.value
83
+
84
+ job.runtime_seconds = runtime
85
+ return result
86
+
87
+ def _execute_command(self, job: ScheduledJob) -> Dict[str, Any]:
88
+ """Execute shell command"""
89
+ env = os.environ.copy()
90
+ env.update(job.environment)
91
+
92
+ process = subprocess.Popen(
93
+ job.command,
94
+ shell=True,
95
+ stdout=subprocess.PIPE,
96
+ stderr=subprocess.PIPE,
97
+ text=True,
98
+ env=env,
99
+ cwd=job.working_directory,
100
+ )
101
+
102
+ # Store process for potential cancellation
103
+ with self.lock:
104
+ self.running_processes[job.id] = process
105
+
106
+ try:
107
+ stdout, stderr = process.communicate(timeout=job.max_runtime)
108
+ return {"output": stdout, "error": stderr, "exit_code": process.returncode}
109
+ except subprocess.TimeoutExpired:
110
+ process.kill()
111
+ return {
112
+ "output": "",
113
+ "error": f"Job timed out after {job.max_runtime} seconds",
114
+ "exit_code": -1,
115
+ "status": JobStatus.FAILED.value,
116
+ }
117
+ finally:
118
+ with self.lock:
119
+ self.running_processes.pop(job.id, None)
120
+
121
+ def _execute_python(self, job: ScheduledJob) -> Dict[str, Any]:
122
+ """Execute Python code"""
123
+ try:
124
+ # Create temporary Python file
125
+ import tempfile
126
+
127
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False) as f:
128
+ f.write(job.command)
129
+ temp_file = f.name
130
+
131
+ try:
132
+ # Execute Python file
133
+ env = os.environ.copy()
134
+ env.update(job.environment)
135
+
136
+ process = subprocess.Popen(
137
+ [os.sys.executable, temp_file],
138
+ stdout=subprocess.PIPE,
139
+ stderr=subprocess.PIPE,
140
+ text=True,
141
+ env=env,
142
+ cwd=job.working_directory,
143
+ )
144
+
145
+ stdout, stderr = process.communicate(timeout=job.max_runtime)
146
+ return {"output": stdout, "error": stderr, "exit_code": process.returncode}
147
+ finally:
148
+ os.unlink(temp_file)
149
+
150
+ except Exception as e:
151
+ return {
152
+ "output": "",
153
+ "error": f"Python execution failed: {e}",
154
+ "exit_code": -1,
155
+ "status": JobStatus.FAILED.value,
156
+ }
157
+
158
+ def _execute_cleanup(self, job: ScheduledJob) -> Dict[str, Any]:
159
+ """Execute file system cleanup tasks"""
160
+ try:
161
+ # Parse cleanup command (JSON format expected)
162
+ cleanup_config = json.loads(job.command)
163
+
164
+ results = []
165
+ for task in cleanup_config.get("tasks", []):
166
+ task_type = task.get("type")
167
+ path = task.get("path")
168
+
169
+ if task_type == "delete_old_files":
170
+ days = task.get("days", 30)
171
+ pattern = task.get("pattern", "*")
172
+ result = self._cleanup_old_files(path, days, pattern)
173
+ results.append(result)
174
+ elif task_type == "empty_trash":
175
+ result = self._empty_trash()
176
+ results.append(result)
177
+ elif task_type == "organize_desktop":
178
+ result = self._organize_desktop()
179
+ results.append(result)
180
+
181
+ return {"output": json.dumps(results, indent=2), "error": "", "exit_code": 0}
182
+
183
+ except Exception as e:
184
+ return {
185
+ "output": "",
186
+ "error": f"Cleanup task failed: {e}",
187
+ "exit_code": -1,
188
+ "status": JobStatus.FAILED.value,
189
+ }
190
+
191
+ def _execute_system(self, job: ScheduledJob) -> Dict[str, Any]:
192
+ """Execute system maintenance tasks"""
193
+ # Similar to cleanup but for system-level tasks
194
+ return self._execute_command(job)
195
+
196
+ def _execute_api_call(self, job: ScheduledJob) -> Dict[str, Any]:
197
+ """Execute HTTP API calls"""
198
+ try:
199
+ import requests
200
+
201
+ # Parse API call configuration
202
+ api_config = json.loads(job.command)
203
+
204
+ method = api_config.get("method", "GET").upper()
205
+ url = api_config["url"]
206
+ headers = api_config.get("headers", {})
207
+ data = api_config.get("data")
208
+ timeout = min(api_config.get("timeout", 30), job.max_runtime)
209
+
210
+ response = requests.request(
211
+ method=method,
212
+ url=url,
213
+ headers=headers,
214
+ json=data if data else None,
215
+ timeout=timeout,
216
+ )
217
+
218
+ return {
219
+ "output": json.dumps(
220
+ {
221
+ "status_code": response.status_code,
222
+ "headers": dict(response.headers),
223
+ "body": response.text,
224
+ },
225
+ indent=2,
226
+ ),
227
+ "error": "",
228
+ "exit_code": 0 if response.ok else 1,
229
+ }
230
+
231
+ except Exception as e:
232
+ return {
233
+ "output": "",
234
+ "error": f"API call failed: {e}",
235
+ "exit_code": -1,
236
+ "status": JobStatus.FAILED.value,
237
+ }
238
+
239
+ def _execute_custom(self, job: ScheduledJob) -> Dict[str, Any]:
240
+ """Execute custom job types"""
241
+ # Default to command execution
242
+ return self._execute_command(job)
243
+
244
+ def _cleanup_old_files(self, path: str, days: int, pattern: str) -> Dict[str, Any]:
245
+ """Clean up old files in a directory"""
246
+ try:
247
+ import glob
248
+ from pathlib import Path
249
+
250
+ cutoff_time = datetime.now() - timedelta(days=days)
251
+ deleted_files = []
252
+
253
+ for file_path in glob.glob(os.path.join(path, pattern)):
254
+ file_obj = Path(file_path)
255
+ if file_obj.is_file():
256
+ mod_time = datetime.fromtimestamp(file_obj.stat().st_mtime)
257
+ if mod_time < cutoff_time:
258
+ file_obj.unlink()
259
+ deleted_files.append(str(file_path))
260
+
261
+ return {
262
+ "task": "delete_old_files",
263
+ "path": path,
264
+ "deleted_count": len(deleted_files),
265
+ "deleted_files": deleted_files[:10], # Limit output
266
+ }
267
+
268
+ except Exception as e:
269
+ return {"task": "delete_old_files", "error": str(e)}
270
+
271
+ def _empty_trash(self) -> Dict[str, Any]:
272
+ """Empty system trash/recycle bin"""
273
+ try:
274
+ import platform
275
+
276
+ system = platform.system()
277
+
278
+ if system == "Darwin": # macOS
279
+ subprocess.run(
280
+ ["osascript", "-e", 'tell application "Finder" to empty trash'], check=True
281
+ )
282
+ elif system == "Windows":
283
+ subprocess.run(["powershell", "-Command", "Clear-RecycleBin -Force"], check=True)
284
+ else: # Linux
285
+ trash_dir = os.path.expanduser("~/.local/share/Trash/files")
286
+ if os.path.exists(trash_dir):
287
+ import shutil
288
+
289
+ shutil.rmtree(trash_dir)
290
+ os.makedirs(trash_dir)
291
+
292
+ return {"task": "empty_trash", "status": "completed"}
293
+
294
+ except Exception as e:
295
+ return {"task": "empty_trash", "error": str(e)}
296
+
297
+ def _organize_desktop(self) -> Dict[str, Any]:
298
+ """Organize desktop files into folders"""
299
+ try:
300
+ desktop_path = os.path.join(os.path.expanduser("~"), "Desktop")
301
+ if not os.path.exists(desktop_path):
302
+ return {"task": "organize_desktop", "error": "Desktop path not found"}
303
+
304
+ organized_files = []
305
+ file_types = {
306
+ "Documents": [".pdf", ".doc", ".docx", ".txt", ".rtf"],
307
+ "Images": [".jpg", ".jpeg", ".png", ".gif", ".bmp", ".svg"],
308
+ "Archives": [".zip", ".rar", ".7z", ".tar", ".gz"],
309
+ "Videos": [".mp4", ".avi", ".mov", ".mkv", ".wmv"],
310
+ "Audio": [".mp3", ".wav", ".flac", ".aac", ".ogg"],
311
+ }
312
+
313
+ for filename in os.listdir(desktop_path):
314
+ file_path = os.path.join(desktop_path, filename)
315
+ if os.path.isfile(file_path):
316
+ file_ext = os.path.splitext(filename)[1].lower()
317
+
318
+ for folder, extensions in file_types.items():
319
+ if file_ext in extensions:
320
+ folder_path = os.path.join(desktop_path, folder)
321
+ os.makedirs(folder_path, exist_ok=True)
322
+
323
+ new_path = os.path.join(folder_path, filename)
324
+ os.rename(file_path, new_path)
325
+ organized_files.append(f"{filename} -> {folder}/")
326
+ break
327
+
328
+ return {
329
+ "task": "organize_desktop",
330
+ "organized_count": len(organized_files),
331
+ "organized_files": organized_files[:10], # Limit output
332
+ }
333
+
334
+ except Exception as e:
335
+ return {"task": "organize_desktop", "error": str(e)}
336
+
337
+ def kill_job(self, job_id: str) -> bool:
338
+ """Kill a running job process"""
339
+ with self.lock:
340
+ process = self.running_processes.get(job_id)
341
+ if process and process.poll() is None:
342
+ try:
343
+ process.terminate()
344
+ time.sleep(2) # Give it time to terminate gracefully
345
+ if process.poll() is None:
346
+ process.kill()
347
+ return True
348
+ except Exception as e:
349
+ logger.error(f"Failed to kill job {job_id}: {e}")
350
+ return False
351
+ return False
352
+
353
+
354
+ class JobScheduler:
355
+ """Main scheduler that coordinates all cron functionality"""
356
+
357
+ def __init__(self, storage_dir: Optional[str] = None):
358
+ self.storage = JobStorage(storage_dir)
359
+ self.monitor = JobMonitor()
360
+ self.executor = JobExecutor()
361
+
362
+ self.jobs: Dict[str, ScheduledJob] = {}
363
+ self.running = False
364
+ self.scheduler_thread: Optional[threading.Thread] = None
365
+ self.lock = threading.Lock()
366
+
367
+ # Load existing jobs
368
+ self._load_jobs()
369
+
370
+ # Set up signal handlers
371
+ signal.signal(signal.SIGINT, self._signal_handler)
372
+ signal.signal(signal.SIGTERM, self._signal_handler)
373
+
374
+ def _load_jobs(self):
375
+ """Load jobs from persistent storage"""
376
+ jobs = self.storage.load_jobs()
377
+ self.jobs = {job.id: job for job in jobs}
378
+ logger.info(f"Loaded {len(self.jobs)} jobs from storage")
379
+
380
+ def _save_jobs(self):
381
+ """Save all jobs to persistent storage"""
382
+ jobs_list = list(self.jobs.values())
383
+ self.storage.save_jobs(jobs_list)
384
+
385
+ def start(self):
386
+ """Start the scheduler"""
387
+ if self.running:
388
+ logger.warning("Scheduler already running")
389
+ return
390
+
391
+ self.running = True
392
+ self.monitor.start_monitoring()
393
+
394
+ # Execute @reboot jobs
395
+ self._execute_reboot_jobs()
396
+
397
+ # Start main scheduler loop
398
+ self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True)
399
+ self.scheduler_thread.start()
400
+
401
+ logger.info("Job scheduler started")
402
+
403
+ def stop(self):
404
+ """Stop the scheduler"""
405
+ if not self.running:
406
+ return
407
+
408
+ self.running = False
409
+ self.monitor.stop_monitoring()
410
+
411
+ if self.scheduler_thread:
412
+ self.scheduler_thread.join(timeout=10)
413
+
414
+ # Save current state
415
+ self._save_jobs()
416
+
417
+ logger.info("Job scheduler stopped")
418
+
419
+ def _signal_handler(self, signum, frame):
420
+ """Handle system signals"""
421
+ logger.info(f"Received signal {signum}, shutting down scheduler...")
422
+ self.stop()
423
+
424
+ def _scheduler_loop(self):
425
+ """Main scheduling loop"""
426
+ while self.running:
427
+ try:
428
+ current_time = datetime.now()
429
+
430
+ for job in list(self.jobs.values()):
431
+ if not job.enabled:
432
+ continue
433
+
434
+ # Check if job should run
435
+ if self._should_run_job(job, current_time):
436
+ self._queue_job_execution(job)
437
+
438
+ # Handle retries
439
+ if job.should_retry():
440
+ retry_time = job.get_next_retry_time()
441
+ if current_time >= retry_time:
442
+ job.current_retry += 1
443
+ self._queue_job_execution(job)
444
+
445
+ # Update next run times
446
+ self._update_next_run_times()
447
+
448
+ # Save state periodically
449
+ self._save_jobs()
450
+
451
+ time.sleep(30) # Check every 30 seconds
452
+
453
+ except Exception as e:
454
+ logger.error(f"Error in scheduler loop: {e}")
455
+ time.sleep(60) # Wait longer on error
456
+
457
+ def _should_run_job(self, job: ScheduledJob, current_time: datetime) -> bool:
458
+ """Check if a job should run at the current time"""
459
+ if job.status == JobStatus.RUNNING:
460
+ return False
461
+
462
+ try:
463
+ cron = CronExpression(job.cron_expression)
464
+
465
+ # For @reboot jobs, only run at startup
466
+ if cron.is_reboot:
467
+ return False
468
+
469
+ # Check if it's time to run
470
+ if job.next_run and current_time >= job.next_run:
471
+ return True
472
+
473
+ # Fallback: check if cron expression matches current time
474
+ return cron.matches_now()
475
+
476
+ except Exception as e:
477
+ logger.error(f"Error checking job schedule for {job.name}: {e}")
478
+ return False
479
+
480
+ def _queue_job_execution(self, job: ScheduledJob):
481
+ """Queue a job for execution"""
482
+
483
+ def execute_job_thread():
484
+ try:
485
+ # Execute the job
486
+ result = self.executor.execute_job(job)
487
+
488
+ # Record execution history
489
+ self.storage.record_job_execution(job, result)
490
+
491
+ # Update next run time
492
+ self._update_job_next_run(job)
493
+
494
+ logger.info(f"Job {job.name} completed with status: {result['status']}")
495
+
496
+ except Exception as e:
497
+ logger.error(f"Error executing job {job.name}: {e}")
498
+ job.update_status(JobStatus.FAILED, "", str(e))
499
+
500
+ # Execute in separate thread
501
+ thread = threading.Thread(target=execute_job_thread, daemon=True)
502
+ thread.start()
503
+
504
+ # Add to monitor
505
+ self.monitor.add_job(job, thread)
506
+
507
+ def _update_job_next_run(self, job: ScheduledJob):
508
+ """Update job's next run time"""
509
+ try:
510
+ cron = CronExpression(job.cron_expression)
511
+ if not cron.is_reboot:
512
+ job.next_run = cron.get_next_run_time()
513
+ except Exception as e:
514
+ logger.error(f"Error updating next run time for {job.name}: {e}")
515
+
516
+ def _update_next_run_times(self):
517
+ """Update next run times for all jobs"""
518
+ for job in self.jobs.values():
519
+ if job.enabled and job.next_run is None:
520
+ self._update_job_next_run(job)
521
+
522
+ def _execute_reboot_jobs(self):
523
+ """Execute jobs marked with @reboot"""
524
+ reboot_jobs = [
525
+ job
526
+ for job in self.jobs.values()
527
+ if job.enabled and job.cron_expression.strip().lower() == "@reboot"
528
+ ]
529
+
530
+ for job in reboot_jobs:
531
+ logger.info(f"Executing @reboot job: {job.name}")
532
+ self._queue_job_execution(job)
533
+
534
+ # Public API methods
535
+
536
+ def add_job(self, job: ScheduledJob) -> bool:
537
+ """Add a new job to the scheduler"""
538
+ try:
539
+ with self.lock:
540
+ self.jobs[job.id] = job
541
+ self._update_job_next_run(job)
542
+
543
+ self.storage.save_job(job)
544
+ logger.info(f"Added job: {job.name}")
545
+ return True
546
+
547
+ except Exception as e:
548
+ logger.error(f"Failed to add job {job.name}: {e}")
549
+ return False
550
+
551
+ def remove_job(self, job_id: str) -> bool:
552
+ """Remove a job from the scheduler"""
553
+ try:
554
+ with self.lock:
555
+ job = self.jobs.pop(job_id, None)
556
+
557
+ if job:
558
+ self.storage.delete_job(job_id)
559
+ # Try to kill if running
560
+ self.executor.kill_job(job_id)
561
+ logger.info(f"Removed job: {job.name}")
562
+ return True
563
+ return False
564
+
565
+ except Exception as e:
566
+ logger.error(f"Failed to remove job {job_id}: {e}")
567
+ return False
568
+
569
+ def get_job(self, job_id: str) -> Optional[ScheduledJob]:
570
+ """Get a job by ID"""
571
+ return self.jobs.get(job_id)
572
+
573
+ def get_all_jobs(self) -> List[ScheduledJob]:
574
+ """Get all jobs"""
575
+ return list(self.jobs.values())
576
+
577
+ def get_job_status(self, job_id: str) -> Optional[Dict[str, Any]]:
578
+ """Get detailed status of a job"""
579
+ job = self.jobs.get(job_id)
580
+ if not job:
581
+ return None
582
+
583
+ return {
584
+ "job": job.to_dict(),
585
+ "is_running": self.monitor.is_job_running(job_id),
586
+ "runtime": self.monitor.get_job_runtime(job_id),
587
+ "history": self.storage.get_job_history(job_id, limit=5),
588
+ }
589
+
590
+ def get_scheduler_stats(self) -> Dict[str, Any]:
591
+ """Get scheduler statistics"""
592
+ total_jobs = len(self.jobs)
593
+ enabled_jobs = len([j for j in self.jobs.values() if j.enabled])
594
+ running_jobs = len(self.monitor.get_running_jobs())
595
+
596
+ return {
597
+ "running": self.running,
598
+ "total_jobs": total_jobs,
599
+ "enabled_jobs": enabled_jobs,
600
+ "running_jobs": running_jobs,
601
+ "monitor_stats": self.monitor.get_monitor_stats(),
602
+ "storage_info": self.storage.get_storage_info(),
603
+ }
604
+
605
+ def create_json_response(self) -> Dict[str, Any]:
606
+ """Create JSON response for frontend integration"""
607
+ jobs_data = []
608
+ for job in self.jobs.values():
609
+ job_data = job.to_dict()
610
+ job_data["is_running"] = self.monitor.is_job_running(job.id)
611
+ job_data["runtime"] = self.monitor.get_job_runtime(job.id)
612
+ jobs_data.append(job_data)
613
+
614
+ return {
615
+ "timestamp": datetime.now().isoformat(),
616
+ "scheduler": self.get_scheduler_stats(),
617
+ "jobs": jobs_data,
618
+ }
619
+
620
+
621
+ # Convenience functions for common job types
622
+
623
+
624
+ def create_desktop_cleanup_job(
625
+ name: str = "Desktop Cleanup",
626
+ cron_expression: str = "0 9 * * 1", # Monday 9 AM
627
+ enabled: bool = True,
628
+ ) -> ScheduledJob:
629
+ """Create a job to organize desktop files"""
630
+ cleanup_config = {"tasks": [{"type": "organize_desktop"}]}
631
+
632
+ return ScheduledJob(
633
+ name=name,
634
+ cron_expression=cron_expression,
635
+ job_type=JobType.CLEANUP,
636
+ command=json.dumps(cleanup_config),
637
+ description="Automatically organize desktop files into folders",
638
+ enabled=enabled,
639
+ )
640
+
641
+
642
+ def create_temp_cleanup_job(
643
+ name: str = "Temp File Cleanup",
644
+ cron_expression: str = "0 2 * * *", # Daily at 2 AM
645
+ temp_path: str = "/tmp",
646
+ days: int = 7,
647
+ enabled: bool = True,
648
+ ) -> ScheduledJob:
649
+ """Create a job to clean up old temporary files"""
650
+ cleanup_config = {
651
+ "tasks": [{"type": "delete_old_files", "path": temp_path, "days": days, "pattern": "*"}]
652
+ }
653
+
654
+ return ScheduledJob(
655
+ name=name,
656
+ cron_expression=cron_expression,
657
+ job_type=JobType.CLEANUP,
658
+ command=json.dumps(cleanup_config),
659
+ description=f"Clean up files older than {days} days from {temp_path}",
660
+ enabled=enabled,
661
+ )
662
+
663
+
664
+ def create_system_backup_job(
665
+ name: str = "System Backup",
666
+ cron_expression: str = "0 1 * * 0", # Sunday 1 AM
667
+ backup_command: str = "rsync -av /home/user/ /backup/",
668
+ enabled: bool = True,
669
+ ) -> ScheduledJob:
670
+ """Create a system backup job"""
671
+ return ScheduledJob(
672
+ name=name,
673
+ cron_expression=cron_expression,
674
+ job_type=JobType.SYSTEM,
675
+ command=backup_command,
676
+ description="Weekly system backup",
677
+ enabled=enabled,
678
+ max_runtime=7200, # 2 hours
679
+ )