mcli-framework 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (186) hide show
  1. mcli/app/chat_cmd.py +42 -0
  2. mcli/app/commands_cmd.py +226 -0
  3. mcli/app/completion_cmd.py +216 -0
  4. mcli/app/completion_helpers.py +288 -0
  5. mcli/app/cron_test_cmd.py +697 -0
  6. mcli/app/logs_cmd.py +419 -0
  7. mcli/app/main.py +492 -0
  8. mcli/app/model/model.py +1060 -0
  9. mcli/app/model_cmd.py +227 -0
  10. mcli/app/redis_cmd.py +269 -0
  11. mcli/app/video/video.py +1114 -0
  12. mcli/app/visual_cmd.py +303 -0
  13. mcli/chat/chat.py +2409 -0
  14. mcli/chat/command_rag.py +514 -0
  15. mcli/chat/enhanced_chat.py +652 -0
  16. mcli/chat/system_controller.py +1010 -0
  17. mcli/chat/system_integration.py +1016 -0
  18. mcli/cli.py +25 -0
  19. mcli/config.toml +20 -0
  20. mcli/lib/api/api.py +586 -0
  21. mcli/lib/api/daemon_client.py +203 -0
  22. mcli/lib/api/daemon_client_local.py +44 -0
  23. mcli/lib/api/daemon_decorator.py +217 -0
  24. mcli/lib/api/mcli_decorators.py +1032 -0
  25. mcli/lib/auth/auth.py +85 -0
  26. mcli/lib/auth/aws_manager.py +85 -0
  27. mcli/lib/auth/azure_manager.py +91 -0
  28. mcli/lib/auth/credential_manager.py +192 -0
  29. mcli/lib/auth/gcp_manager.py +93 -0
  30. mcli/lib/auth/key_manager.py +117 -0
  31. mcli/lib/auth/mcli_manager.py +93 -0
  32. mcli/lib/auth/token_manager.py +75 -0
  33. mcli/lib/auth/token_util.py +1011 -0
  34. mcli/lib/config/config.py +47 -0
  35. mcli/lib/discovery/__init__.py +1 -0
  36. mcli/lib/discovery/command_discovery.py +274 -0
  37. mcli/lib/erd/erd.py +1345 -0
  38. mcli/lib/erd/generate_graph.py +453 -0
  39. mcli/lib/files/files.py +76 -0
  40. mcli/lib/fs/fs.py +109 -0
  41. mcli/lib/lib.py +29 -0
  42. mcli/lib/logger/logger.py +611 -0
  43. mcli/lib/performance/optimizer.py +409 -0
  44. mcli/lib/performance/rust_bridge.py +502 -0
  45. mcli/lib/performance/uvloop_config.py +154 -0
  46. mcli/lib/pickles/pickles.py +50 -0
  47. mcli/lib/search/cached_vectorizer.py +479 -0
  48. mcli/lib/services/data_pipeline.py +460 -0
  49. mcli/lib/services/lsh_client.py +441 -0
  50. mcli/lib/services/redis_service.py +387 -0
  51. mcli/lib/shell/shell.py +137 -0
  52. mcli/lib/toml/toml.py +33 -0
  53. mcli/lib/ui/styling.py +47 -0
  54. mcli/lib/ui/visual_effects.py +634 -0
  55. mcli/lib/watcher/watcher.py +185 -0
  56. mcli/ml/api/app.py +215 -0
  57. mcli/ml/api/middleware.py +224 -0
  58. mcli/ml/api/routers/admin_router.py +12 -0
  59. mcli/ml/api/routers/auth_router.py +244 -0
  60. mcli/ml/api/routers/backtest_router.py +12 -0
  61. mcli/ml/api/routers/data_router.py +12 -0
  62. mcli/ml/api/routers/model_router.py +302 -0
  63. mcli/ml/api/routers/monitoring_router.py +12 -0
  64. mcli/ml/api/routers/portfolio_router.py +12 -0
  65. mcli/ml/api/routers/prediction_router.py +267 -0
  66. mcli/ml/api/routers/trade_router.py +12 -0
  67. mcli/ml/api/routers/websocket_router.py +76 -0
  68. mcli/ml/api/schemas.py +64 -0
  69. mcli/ml/auth/auth_manager.py +425 -0
  70. mcli/ml/auth/models.py +154 -0
  71. mcli/ml/auth/permissions.py +302 -0
  72. mcli/ml/backtesting/backtest_engine.py +502 -0
  73. mcli/ml/backtesting/performance_metrics.py +393 -0
  74. mcli/ml/cache.py +400 -0
  75. mcli/ml/cli/main.py +398 -0
  76. mcli/ml/config/settings.py +394 -0
  77. mcli/ml/configs/dvc_config.py +230 -0
  78. mcli/ml/configs/mlflow_config.py +131 -0
  79. mcli/ml/configs/mlops_manager.py +293 -0
  80. mcli/ml/dashboard/app.py +532 -0
  81. mcli/ml/dashboard/app_integrated.py +738 -0
  82. mcli/ml/dashboard/app_supabase.py +560 -0
  83. mcli/ml/dashboard/app_training.py +615 -0
  84. mcli/ml/dashboard/cli.py +51 -0
  85. mcli/ml/data_ingestion/api_connectors.py +501 -0
  86. mcli/ml/data_ingestion/data_pipeline.py +567 -0
  87. mcli/ml/data_ingestion/stream_processor.py +512 -0
  88. mcli/ml/database/migrations/env.py +94 -0
  89. mcli/ml/database/models.py +667 -0
  90. mcli/ml/database/session.py +200 -0
  91. mcli/ml/experimentation/ab_testing.py +845 -0
  92. mcli/ml/features/ensemble_features.py +607 -0
  93. mcli/ml/features/political_features.py +676 -0
  94. mcli/ml/features/recommendation_engine.py +809 -0
  95. mcli/ml/features/stock_features.py +573 -0
  96. mcli/ml/features/test_feature_engineering.py +346 -0
  97. mcli/ml/logging.py +85 -0
  98. mcli/ml/mlops/data_versioning.py +518 -0
  99. mcli/ml/mlops/experiment_tracker.py +377 -0
  100. mcli/ml/mlops/model_serving.py +481 -0
  101. mcli/ml/mlops/pipeline_orchestrator.py +614 -0
  102. mcli/ml/models/base_models.py +324 -0
  103. mcli/ml/models/ensemble_models.py +675 -0
  104. mcli/ml/models/recommendation_models.py +474 -0
  105. mcli/ml/models/test_models.py +487 -0
  106. mcli/ml/monitoring/drift_detection.py +676 -0
  107. mcli/ml/monitoring/metrics.py +45 -0
  108. mcli/ml/optimization/portfolio_optimizer.py +834 -0
  109. mcli/ml/preprocessing/data_cleaners.py +451 -0
  110. mcli/ml/preprocessing/feature_extractors.py +491 -0
  111. mcli/ml/preprocessing/ml_pipeline.py +382 -0
  112. mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
  113. mcli/ml/preprocessing/test_preprocessing.py +294 -0
  114. mcli/ml/scripts/populate_sample_data.py +200 -0
  115. mcli/ml/tasks.py +400 -0
  116. mcli/ml/tests/test_integration.py +429 -0
  117. mcli/ml/tests/test_training_dashboard.py +387 -0
  118. mcli/public/oi/oi.py +15 -0
  119. mcli/public/public.py +4 -0
  120. mcli/self/self_cmd.py +1246 -0
  121. mcli/workflow/daemon/api_daemon.py +800 -0
  122. mcli/workflow/daemon/async_command_database.py +681 -0
  123. mcli/workflow/daemon/async_process_manager.py +591 -0
  124. mcli/workflow/daemon/client.py +530 -0
  125. mcli/workflow/daemon/commands.py +1196 -0
  126. mcli/workflow/daemon/daemon.py +905 -0
  127. mcli/workflow/daemon/daemon_api.py +59 -0
  128. mcli/workflow/daemon/enhanced_daemon.py +571 -0
  129. mcli/workflow/daemon/process_cli.py +244 -0
  130. mcli/workflow/daemon/process_manager.py +439 -0
  131. mcli/workflow/daemon/test_daemon.py +275 -0
  132. mcli/workflow/dashboard/dashboard_cmd.py +113 -0
  133. mcli/workflow/docker/docker.py +0 -0
  134. mcli/workflow/file/file.py +100 -0
  135. mcli/workflow/gcloud/config.toml +21 -0
  136. mcli/workflow/gcloud/gcloud.py +58 -0
  137. mcli/workflow/git_commit/ai_service.py +328 -0
  138. mcli/workflow/git_commit/commands.py +430 -0
  139. mcli/workflow/lsh_integration.py +355 -0
  140. mcli/workflow/model_service/client.py +594 -0
  141. mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
  142. mcli/workflow/model_service/lightweight_embedder.py +397 -0
  143. mcli/workflow/model_service/lightweight_model_server.py +714 -0
  144. mcli/workflow/model_service/lightweight_test.py +241 -0
  145. mcli/workflow/model_service/model_service.py +1955 -0
  146. mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
  147. mcli/workflow/model_service/pdf_processor.py +386 -0
  148. mcli/workflow/model_service/test_efficient_runner.py +234 -0
  149. mcli/workflow/model_service/test_example.py +315 -0
  150. mcli/workflow/model_service/test_integration.py +131 -0
  151. mcli/workflow/model_service/test_new_features.py +149 -0
  152. mcli/workflow/openai/openai.py +99 -0
  153. mcli/workflow/politician_trading/commands.py +1790 -0
  154. mcli/workflow/politician_trading/config.py +134 -0
  155. mcli/workflow/politician_trading/connectivity.py +490 -0
  156. mcli/workflow/politician_trading/data_sources.py +395 -0
  157. mcli/workflow/politician_trading/database.py +410 -0
  158. mcli/workflow/politician_trading/demo.py +248 -0
  159. mcli/workflow/politician_trading/models.py +165 -0
  160. mcli/workflow/politician_trading/monitoring.py +413 -0
  161. mcli/workflow/politician_trading/scrapers.py +966 -0
  162. mcli/workflow/politician_trading/scrapers_california.py +412 -0
  163. mcli/workflow/politician_trading/scrapers_eu.py +377 -0
  164. mcli/workflow/politician_trading/scrapers_uk.py +350 -0
  165. mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
  166. mcli/workflow/politician_trading/supabase_functions.py +354 -0
  167. mcli/workflow/politician_trading/workflow.py +852 -0
  168. mcli/workflow/registry/registry.py +180 -0
  169. mcli/workflow/repo/repo.py +223 -0
  170. mcli/workflow/scheduler/commands.py +493 -0
  171. mcli/workflow/scheduler/cron_parser.py +238 -0
  172. mcli/workflow/scheduler/job.py +182 -0
  173. mcli/workflow/scheduler/monitor.py +139 -0
  174. mcli/workflow/scheduler/persistence.py +324 -0
  175. mcli/workflow/scheduler/scheduler.py +679 -0
  176. mcli/workflow/sync/sync_cmd.py +437 -0
  177. mcli/workflow/sync/test_cmd.py +314 -0
  178. mcli/workflow/videos/videos.py +242 -0
  179. mcli/workflow/wakatime/wakatime.py +11 -0
  180. mcli/workflow/workflow.py +37 -0
  181. mcli_framework-7.0.0.dist-info/METADATA +479 -0
  182. mcli_framework-7.0.0.dist-info/RECORD +186 -0
  183. mcli_framework-7.0.0.dist-info/WHEEL +5 -0
  184. mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
  185. mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
  186. mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,324 @@
1
+ """
2
+ Job persistence and storage for the MCLI scheduler
3
+
4
+ Handles saving/loading jobs to/from disk, ensuring persistence across power cycles
5
+ """
6
+
7
+ import json
8
+ import os
9
+ import threading
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import Dict, List, Optional
13
+
14
+ from mcli.lib.logger.logger import get_logger
15
+
16
+ from .job import ScheduledJob
17
+
18
+ logger = get_logger(__name__)
19
+
20
+
21
+ class JobStorage:
22
+ """Handles persistent storage of scheduled jobs"""
23
+
24
+ def __init__(self, storage_dir: Optional[str] = None):
25
+ self.storage_dir = Path(storage_dir) if storage_dir else self._get_default_storage_dir()
26
+ self.jobs_file = self.storage_dir / "jobs.json"
27
+ self.history_file = self.storage_dir / "job_history.json"
28
+ self.lock = threading.Lock()
29
+
30
+ # Ensure storage directory exists
31
+ self.storage_dir.mkdir(parents=True, exist_ok=True)
32
+
33
+ # Initialize files if they don't exist
34
+ self._initialize_storage()
35
+
36
+ def _get_default_storage_dir(self) -> Path:
37
+ """Get default storage directory"""
38
+ home = Path.home()
39
+ storage_dir = home / ".mcli" / "scheduler"
40
+ return storage_dir
41
+
42
+ def _initialize_storage(self):
43
+ """Initialize storage files if they don't exist"""
44
+ if not self.jobs_file.exists():
45
+ self._write_json_file(self.jobs_file, {"jobs": [], "version": "1.0"})
46
+
47
+ if not self.history_file.exists():
48
+ self._write_json_file(self.history_file, {"history": [], "version": "1.0"})
49
+
50
+ def _read_json_file(self, file_path: Path) -> dict:
51
+ """Safely read JSON file with error handling"""
52
+ try:
53
+ with open(file_path, "r", encoding="utf-8") as f:
54
+ return json.load(f)
55
+ except FileNotFoundError:
56
+ logger.warning(f"File not found: {file_path}")
57
+ return {}
58
+ except json.JSONDecodeError as e:
59
+ logger.error(f"Invalid JSON in {file_path}: {e}")
60
+ return {}
61
+ except Exception as e:
62
+ logger.error(f"Error reading {file_path}: {e}")
63
+ return {}
64
+
65
+ def _write_json_file(self, file_path: Path, data: dict):
66
+ """Safely write JSON file with atomic operation"""
67
+ temp_file = file_path.with_suffix(".tmp")
68
+ try:
69
+ with open(temp_file, "w", encoding="utf-8") as f:
70
+ json.dump(data, f, indent=2, ensure_ascii=False)
71
+
72
+ # Atomic move
73
+ temp_file.replace(file_path)
74
+
75
+ except Exception as e:
76
+ logger.error(f"Error writing {file_path}: {e}")
77
+ if temp_file.exists():
78
+ temp_file.unlink()
79
+
80
+ def save_jobs(self, jobs: List[ScheduledJob]) -> bool:
81
+ """Save list of jobs to persistent storage"""
82
+ with self.lock:
83
+ try:
84
+ jobs_data = {
85
+ "jobs": [job.to_dict() for job in jobs],
86
+ "version": "1.0",
87
+ "saved_at": datetime.now().isoformat(),
88
+ "count": len(jobs),
89
+ }
90
+
91
+ self._write_json_file(self.jobs_file, jobs_data)
92
+ logger.info(f"Saved {len(jobs)} jobs to {self.jobs_file}")
93
+ return True
94
+
95
+ except Exception as e:
96
+ logger.error(f"Failed to save jobs: {e}")
97
+ return False
98
+
99
+ def load_jobs(self) -> List[ScheduledJob]:
100
+ """Load jobs from persistent storage"""
101
+ with self.lock:
102
+ try:
103
+ data = self._read_json_file(self.jobs_file)
104
+ jobs_data = data.get("jobs", [])
105
+
106
+ jobs = []
107
+ for job_dict in jobs_data:
108
+ try:
109
+ job = ScheduledJob.from_dict(job_dict)
110
+ jobs.append(job)
111
+ except Exception as e:
112
+ logger.error(f"Failed to load job {job_dict.get('id', 'unknown')}: {e}")
113
+
114
+ logger.info(f"Loaded {len(jobs)} jobs from {self.jobs_file}")
115
+ return jobs
116
+
117
+ except Exception as e:
118
+ logger.error(f"Failed to load jobs: {e}")
119
+ return []
120
+
121
+ def save_job(self, job: ScheduledJob) -> bool:
122
+ """Save a single job (update existing or add new)"""
123
+ jobs = self.load_jobs()
124
+
125
+ # Find existing job or add new one
126
+ updated = False
127
+ for i, existing_job in enumerate(jobs):
128
+ if existing_job.id == job.id:
129
+ jobs[i] = job
130
+ updated = True
131
+ break
132
+
133
+ if not updated:
134
+ jobs.append(job)
135
+
136
+ return self.save_jobs(jobs)
137
+
138
+ def delete_job(self, job_id: str) -> bool:
139
+ """Delete a job from storage"""
140
+ jobs = self.load_jobs()
141
+ original_count = len(jobs)
142
+
143
+ jobs = [job for job in jobs if job.id != job_id]
144
+
145
+ if len(jobs) < original_count:
146
+ return self.save_jobs(jobs)
147
+ return False
148
+
149
+ def get_job(self, job_id: str) -> Optional[ScheduledJob]:
150
+ """Get a specific job by ID"""
151
+ jobs = self.load_jobs()
152
+ for job in jobs:
153
+ if job.id == job_id:
154
+ return job
155
+ return None
156
+
157
+ def record_job_execution(self, job: ScheduledJob, execution_data: dict):
158
+ """Record job execution in history"""
159
+ with self.lock:
160
+ try:
161
+ history_data = self._read_json_file(self.history_file)
162
+ history = history_data.get("history", [])
163
+
164
+ # Create execution record
165
+ record = {
166
+ "job_id": job.id,
167
+ "job_name": job.name,
168
+ "executed_at": datetime.now().isoformat(),
169
+ "status": execution_data.get("status", "unknown"),
170
+ "runtime_seconds": execution_data.get("runtime_seconds", 0),
171
+ "output": execution_data.get("output", "")[:1000], # Limit output size
172
+ "error": execution_data.get("error", "")[:1000], # Limit error size
173
+ "exit_code": execution_data.get("exit_code"),
174
+ "retries": execution_data.get("retries", 0),
175
+ }
176
+
177
+ history.append(record)
178
+
179
+ # Keep only last 1000 records
180
+ if len(history) > 1000:
181
+ history = history[-1000:]
182
+
183
+ history_data = {
184
+ "history": history,
185
+ "version": "1.0",
186
+ "updated_at": datetime.now().isoformat(),
187
+ }
188
+
189
+ self._write_json_file(self.history_file, history_data)
190
+
191
+ except Exception as e:
192
+ logger.error(f"Failed to record job execution: {e}")
193
+
194
+ def get_job_history(self, job_id: Optional[str] = None, limit: int = 100) -> List[dict]:
195
+ """Get job execution history"""
196
+ try:
197
+ history_data = self._read_json_file(self.history_file)
198
+ history = history_data.get("history", [])
199
+
200
+ if job_id:
201
+ history = [record for record in history if record.get("job_id") == job_id]
202
+
203
+ # Return most recent records first
204
+ history = sorted(history, key=lambda x: x.get("executed_at", ""), reverse=True)
205
+
206
+ return history[:limit]
207
+
208
+ except Exception as e:
209
+ logger.error(f"Failed to get job history: {e}")
210
+ return []
211
+
212
+ def cleanup_old_history(self, days: int = 30):
213
+ """Remove job history older than specified days"""
214
+ with self.lock:
215
+ try:
216
+ cutoff_date = datetime.now() - timedelta(days=days)
217
+ cutoff_str = cutoff_date.isoformat()
218
+
219
+ history_data = self._read_json_file(self.history_file)
220
+ history = history_data.get("history", [])
221
+
222
+ # Filter out old records
223
+ filtered_history = [
224
+ record for record in history if record.get("executed_at", "") > cutoff_str
225
+ ]
226
+
227
+ removed_count = len(history) - len(filtered_history)
228
+
229
+ if removed_count > 0:
230
+ history_data = {
231
+ "history": filtered_history,
232
+ "version": "1.0",
233
+ "updated_at": datetime.now().isoformat(),
234
+ }
235
+
236
+ self._write_json_file(self.history_file, history_data)
237
+ logger.info(f"Cleaned up {removed_count} old history records")
238
+
239
+ except Exception as e:
240
+ logger.error(f"Failed to cleanup old history: {e}")
241
+
242
+ def export_jobs(self, export_path: str) -> bool:
243
+ """Export all jobs to a file"""
244
+ try:
245
+ jobs = self.load_jobs()
246
+ export_data = {
247
+ "jobs": [job.to_dict() for job in jobs],
248
+ "exported_at": datetime.now().isoformat(),
249
+ "export_version": "1.0",
250
+ "source": "mcli-scheduler",
251
+ }
252
+
253
+ with open(export_path, "w", encoding="utf-8") as f:
254
+ json.dump(export_data, f, indent=2, ensure_ascii=False)
255
+
256
+ logger.info(f"Exported {len(jobs)} jobs to {export_path}")
257
+ return True
258
+
259
+ except Exception as e:
260
+ logger.error(f"Failed to export jobs: {e}")
261
+ return False
262
+
263
+ def import_jobs(self, import_path: str, replace: bool = False) -> int:
264
+ """Import jobs from a file"""
265
+ try:
266
+ with open(import_path, "r", encoding="utf-8") as f:
267
+ import_data = json.load(f)
268
+
269
+ imported_jobs_data = import_data.get("jobs", [])
270
+ existing_jobs = [] if replace else self.load_jobs()
271
+
272
+ imported_count = 0
273
+ for job_data in imported_jobs_data:
274
+ try:
275
+ job = ScheduledJob.from_dict(job_data)
276
+
277
+ # Check for duplicates by name
278
+ if not replace and any(existing.name == job.name for existing in existing_jobs):
279
+ logger.warning(f"Skipping duplicate job: {job.name}")
280
+ continue
281
+
282
+ existing_jobs.append(job)
283
+ imported_count += 1
284
+
285
+ except Exception as e:
286
+ logger.error(f"Failed to import job: {e}")
287
+
288
+ if imported_count > 0:
289
+ self.save_jobs(existing_jobs)
290
+ logger.info(f"Imported {imported_count} jobs from {import_path}")
291
+
292
+ return imported_count
293
+
294
+ except Exception as e:
295
+ logger.error(f"Failed to import jobs: {e}")
296
+ return 0
297
+
298
+ def get_storage_info(self) -> dict:
299
+ """Get information about storage usage"""
300
+ try:
301
+ jobs_size = self.jobs_file.stat().st_size if self.jobs_file.exists() else 0
302
+ history_size = self.history_file.stat().st_size if self.history_file.exists() else 0
303
+
304
+ jobs_count = len(self.load_jobs())
305
+ history_count = len(self.get_job_history())
306
+
307
+ return {
308
+ "storage_dir": str(self.storage_dir),
309
+ "jobs_file_size": jobs_size,
310
+ "history_file_size": history_size,
311
+ "total_size": jobs_size + history_size,
312
+ "jobs_count": jobs_count,
313
+ "history_count": history_count,
314
+ "jobs_file": str(self.jobs_file),
315
+ "history_file": str(self.history_file),
316
+ }
317
+
318
+ except Exception as e:
319
+ logger.error(f"Failed to get storage info: {e}")
320
+ return {}
321
+
322
+
323
+ # Import required for datetime operations
324
+ from datetime import timedelta