mcli-framework 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (186) hide show
  1. mcli/app/chat_cmd.py +42 -0
  2. mcli/app/commands_cmd.py +226 -0
  3. mcli/app/completion_cmd.py +216 -0
  4. mcli/app/completion_helpers.py +288 -0
  5. mcli/app/cron_test_cmd.py +697 -0
  6. mcli/app/logs_cmd.py +419 -0
  7. mcli/app/main.py +492 -0
  8. mcli/app/model/model.py +1060 -0
  9. mcli/app/model_cmd.py +227 -0
  10. mcli/app/redis_cmd.py +269 -0
  11. mcli/app/video/video.py +1114 -0
  12. mcli/app/visual_cmd.py +303 -0
  13. mcli/chat/chat.py +2409 -0
  14. mcli/chat/command_rag.py +514 -0
  15. mcli/chat/enhanced_chat.py +652 -0
  16. mcli/chat/system_controller.py +1010 -0
  17. mcli/chat/system_integration.py +1016 -0
  18. mcli/cli.py +25 -0
  19. mcli/config.toml +20 -0
  20. mcli/lib/api/api.py +586 -0
  21. mcli/lib/api/daemon_client.py +203 -0
  22. mcli/lib/api/daemon_client_local.py +44 -0
  23. mcli/lib/api/daemon_decorator.py +217 -0
  24. mcli/lib/api/mcli_decorators.py +1032 -0
  25. mcli/lib/auth/auth.py +85 -0
  26. mcli/lib/auth/aws_manager.py +85 -0
  27. mcli/lib/auth/azure_manager.py +91 -0
  28. mcli/lib/auth/credential_manager.py +192 -0
  29. mcli/lib/auth/gcp_manager.py +93 -0
  30. mcli/lib/auth/key_manager.py +117 -0
  31. mcli/lib/auth/mcli_manager.py +93 -0
  32. mcli/lib/auth/token_manager.py +75 -0
  33. mcli/lib/auth/token_util.py +1011 -0
  34. mcli/lib/config/config.py +47 -0
  35. mcli/lib/discovery/__init__.py +1 -0
  36. mcli/lib/discovery/command_discovery.py +274 -0
  37. mcli/lib/erd/erd.py +1345 -0
  38. mcli/lib/erd/generate_graph.py +453 -0
  39. mcli/lib/files/files.py +76 -0
  40. mcli/lib/fs/fs.py +109 -0
  41. mcli/lib/lib.py +29 -0
  42. mcli/lib/logger/logger.py +611 -0
  43. mcli/lib/performance/optimizer.py +409 -0
  44. mcli/lib/performance/rust_bridge.py +502 -0
  45. mcli/lib/performance/uvloop_config.py +154 -0
  46. mcli/lib/pickles/pickles.py +50 -0
  47. mcli/lib/search/cached_vectorizer.py +479 -0
  48. mcli/lib/services/data_pipeline.py +460 -0
  49. mcli/lib/services/lsh_client.py +441 -0
  50. mcli/lib/services/redis_service.py +387 -0
  51. mcli/lib/shell/shell.py +137 -0
  52. mcli/lib/toml/toml.py +33 -0
  53. mcli/lib/ui/styling.py +47 -0
  54. mcli/lib/ui/visual_effects.py +634 -0
  55. mcli/lib/watcher/watcher.py +185 -0
  56. mcli/ml/api/app.py +215 -0
  57. mcli/ml/api/middleware.py +224 -0
  58. mcli/ml/api/routers/admin_router.py +12 -0
  59. mcli/ml/api/routers/auth_router.py +244 -0
  60. mcli/ml/api/routers/backtest_router.py +12 -0
  61. mcli/ml/api/routers/data_router.py +12 -0
  62. mcli/ml/api/routers/model_router.py +302 -0
  63. mcli/ml/api/routers/monitoring_router.py +12 -0
  64. mcli/ml/api/routers/portfolio_router.py +12 -0
  65. mcli/ml/api/routers/prediction_router.py +267 -0
  66. mcli/ml/api/routers/trade_router.py +12 -0
  67. mcli/ml/api/routers/websocket_router.py +76 -0
  68. mcli/ml/api/schemas.py +64 -0
  69. mcli/ml/auth/auth_manager.py +425 -0
  70. mcli/ml/auth/models.py +154 -0
  71. mcli/ml/auth/permissions.py +302 -0
  72. mcli/ml/backtesting/backtest_engine.py +502 -0
  73. mcli/ml/backtesting/performance_metrics.py +393 -0
  74. mcli/ml/cache.py +400 -0
  75. mcli/ml/cli/main.py +398 -0
  76. mcli/ml/config/settings.py +394 -0
  77. mcli/ml/configs/dvc_config.py +230 -0
  78. mcli/ml/configs/mlflow_config.py +131 -0
  79. mcli/ml/configs/mlops_manager.py +293 -0
  80. mcli/ml/dashboard/app.py +532 -0
  81. mcli/ml/dashboard/app_integrated.py +738 -0
  82. mcli/ml/dashboard/app_supabase.py +560 -0
  83. mcli/ml/dashboard/app_training.py +615 -0
  84. mcli/ml/dashboard/cli.py +51 -0
  85. mcli/ml/data_ingestion/api_connectors.py +501 -0
  86. mcli/ml/data_ingestion/data_pipeline.py +567 -0
  87. mcli/ml/data_ingestion/stream_processor.py +512 -0
  88. mcli/ml/database/migrations/env.py +94 -0
  89. mcli/ml/database/models.py +667 -0
  90. mcli/ml/database/session.py +200 -0
  91. mcli/ml/experimentation/ab_testing.py +845 -0
  92. mcli/ml/features/ensemble_features.py +607 -0
  93. mcli/ml/features/political_features.py +676 -0
  94. mcli/ml/features/recommendation_engine.py +809 -0
  95. mcli/ml/features/stock_features.py +573 -0
  96. mcli/ml/features/test_feature_engineering.py +346 -0
  97. mcli/ml/logging.py +85 -0
  98. mcli/ml/mlops/data_versioning.py +518 -0
  99. mcli/ml/mlops/experiment_tracker.py +377 -0
  100. mcli/ml/mlops/model_serving.py +481 -0
  101. mcli/ml/mlops/pipeline_orchestrator.py +614 -0
  102. mcli/ml/models/base_models.py +324 -0
  103. mcli/ml/models/ensemble_models.py +675 -0
  104. mcli/ml/models/recommendation_models.py +474 -0
  105. mcli/ml/models/test_models.py +487 -0
  106. mcli/ml/monitoring/drift_detection.py +676 -0
  107. mcli/ml/monitoring/metrics.py +45 -0
  108. mcli/ml/optimization/portfolio_optimizer.py +834 -0
  109. mcli/ml/preprocessing/data_cleaners.py +451 -0
  110. mcli/ml/preprocessing/feature_extractors.py +491 -0
  111. mcli/ml/preprocessing/ml_pipeline.py +382 -0
  112. mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
  113. mcli/ml/preprocessing/test_preprocessing.py +294 -0
  114. mcli/ml/scripts/populate_sample_data.py +200 -0
  115. mcli/ml/tasks.py +400 -0
  116. mcli/ml/tests/test_integration.py +429 -0
  117. mcli/ml/tests/test_training_dashboard.py +387 -0
  118. mcli/public/oi/oi.py +15 -0
  119. mcli/public/public.py +4 -0
  120. mcli/self/self_cmd.py +1246 -0
  121. mcli/workflow/daemon/api_daemon.py +800 -0
  122. mcli/workflow/daemon/async_command_database.py +681 -0
  123. mcli/workflow/daemon/async_process_manager.py +591 -0
  124. mcli/workflow/daemon/client.py +530 -0
  125. mcli/workflow/daemon/commands.py +1196 -0
  126. mcli/workflow/daemon/daemon.py +905 -0
  127. mcli/workflow/daemon/daemon_api.py +59 -0
  128. mcli/workflow/daemon/enhanced_daemon.py +571 -0
  129. mcli/workflow/daemon/process_cli.py +244 -0
  130. mcli/workflow/daemon/process_manager.py +439 -0
  131. mcli/workflow/daemon/test_daemon.py +275 -0
  132. mcli/workflow/dashboard/dashboard_cmd.py +113 -0
  133. mcli/workflow/docker/docker.py +0 -0
  134. mcli/workflow/file/file.py +100 -0
  135. mcli/workflow/gcloud/config.toml +21 -0
  136. mcli/workflow/gcloud/gcloud.py +58 -0
  137. mcli/workflow/git_commit/ai_service.py +328 -0
  138. mcli/workflow/git_commit/commands.py +430 -0
  139. mcli/workflow/lsh_integration.py +355 -0
  140. mcli/workflow/model_service/client.py +594 -0
  141. mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
  142. mcli/workflow/model_service/lightweight_embedder.py +397 -0
  143. mcli/workflow/model_service/lightweight_model_server.py +714 -0
  144. mcli/workflow/model_service/lightweight_test.py +241 -0
  145. mcli/workflow/model_service/model_service.py +1955 -0
  146. mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
  147. mcli/workflow/model_service/pdf_processor.py +386 -0
  148. mcli/workflow/model_service/test_efficient_runner.py +234 -0
  149. mcli/workflow/model_service/test_example.py +315 -0
  150. mcli/workflow/model_service/test_integration.py +131 -0
  151. mcli/workflow/model_service/test_new_features.py +149 -0
  152. mcli/workflow/openai/openai.py +99 -0
  153. mcli/workflow/politician_trading/commands.py +1790 -0
  154. mcli/workflow/politician_trading/config.py +134 -0
  155. mcli/workflow/politician_trading/connectivity.py +490 -0
  156. mcli/workflow/politician_trading/data_sources.py +395 -0
  157. mcli/workflow/politician_trading/database.py +410 -0
  158. mcli/workflow/politician_trading/demo.py +248 -0
  159. mcli/workflow/politician_trading/models.py +165 -0
  160. mcli/workflow/politician_trading/monitoring.py +413 -0
  161. mcli/workflow/politician_trading/scrapers.py +966 -0
  162. mcli/workflow/politician_trading/scrapers_california.py +412 -0
  163. mcli/workflow/politician_trading/scrapers_eu.py +377 -0
  164. mcli/workflow/politician_trading/scrapers_uk.py +350 -0
  165. mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
  166. mcli/workflow/politician_trading/supabase_functions.py +354 -0
  167. mcli/workflow/politician_trading/workflow.py +852 -0
  168. mcli/workflow/registry/registry.py +180 -0
  169. mcli/workflow/repo/repo.py +223 -0
  170. mcli/workflow/scheduler/commands.py +493 -0
  171. mcli/workflow/scheduler/cron_parser.py +238 -0
  172. mcli/workflow/scheduler/job.py +182 -0
  173. mcli/workflow/scheduler/monitor.py +139 -0
  174. mcli/workflow/scheduler/persistence.py +324 -0
  175. mcli/workflow/scheduler/scheduler.py +679 -0
  176. mcli/workflow/sync/sync_cmd.py +437 -0
  177. mcli/workflow/sync/test_cmd.py +314 -0
  178. mcli/workflow/videos/videos.py +242 -0
  179. mcli/workflow/wakatime/wakatime.py +11 -0
  180. mcli/workflow/workflow.py +37 -0
  181. mcli_framework-7.0.0.dist-info/METADATA +479 -0
  182. mcli_framework-7.0.0.dist-info/RECORD +186 -0
  183. mcli_framework-7.0.0.dist-info/WHEEL +5 -0
  184. mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
  185. mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
  186. mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,591 @@
1
+ import asyncio
2
+ import json
3
+ import os
4
+ import signal
5
+ import uuid
6
+ from contextlib import asynccontextmanager
7
+ from dataclasses import asdict, dataclass
8
+ from datetime import datetime
9
+ from enum import Enum
10
+ from pathlib import Path
11
+ from typing import Any, Dict, List, Optional, Union
12
+
13
+ import aiosqlite
14
+ import psutil
15
+ import redis.asyncio as redis
16
+
17
+ from mcli.lib.logger.logger import get_logger
18
+
19
+ logger = get_logger(__name__)
20
+
21
+
22
+ class ProcessStatus(Enum):
23
+ CREATED = "created"
24
+ RUNNING = "running"
25
+ EXITED = "exited"
26
+ KILLED = "killed"
27
+ FAILED = "failed"
28
+ TIMEOUT = "timeout"
29
+
30
+
31
+ @dataclass
32
+ class ProcessInfo:
33
+ """Information about a managed process"""
34
+
35
+ id: str
36
+ name: str
37
+ command: str
38
+ args: List[str]
39
+ status: ProcessStatus
40
+ pid: Optional[int] = None
41
+ exit_code: Optional[int] = None
42
+ created_at: datetime = None
43
+ started_at: Optional[datetime] = None
44
+ finished_at: Optional[datetime] = None
45
+ working_dir: Optional[str] = None
46
+ environment: Optional[Dict[str, str]] = None
47
+ stdout_lines: List[str] = None
48
+ stderr_lines: List[str] = None
49
+
50
+ def __post_init__(self):
51
+ if self.created_at is None:
52
+ self.created_at = datetime.now()
53
+ if self.stdout_lines is None:
54
+ self.stdout_lines = []
55
+ if self.stderr_lines is None:
56
+ self.stderr_lines = []
57
+
58
+
59
+ class AsyncProcessContainer:
60
+ """Manages a single async process with enhanced monitoring"""
61
+
62
+ def __init__(self, process_info: ProcessInfo, redis_client: Optional[redis.Redis] = None):
63
+ self.info = process_info
64
+ self.process: Optional[asyncio.subprocess.Process] = None
65
+ self.container_dir: Optional[Path] = None
66
+ self.stdout_task: Optional[asyncio.Task] = None
67
+ self.stderr_task: Optional[asyncio.Task] = None
68
+ self.redis_client = redis_client
69
+ self._setup_container_environment()
70
+
71
+ def _setup_container_environment(self):
72
+ """Setup isolated environment for the process"""
73
+ base_dir = Path.home() / ".local" / "mcli" / "containers"
74
+ self.container_dir = base_dir / self.info.id
75
+ self.container_dir.mkdir(parents=True, exist_ok=True)
76
+
77
+ # Create metadata file
78
+ metadata_file = self.container_dir / "metadata.json"
79
+ with open(metadata_file, "w") as f:
80
+ json.dump(asdict(self.info), f, indent=2, default=str)
81
+
82
+ async def start(self, timeout: Optional[float] = None) -> bool:
83
+ """Start the async process with optional timeout"""
84
+ try:
85
+ if self.process and self.process.returncode is None:
86
+ logger.warning(f"Process {self.info.id} is already running")
87
+ return False
88
+
89
+ # Create the subprocess
90
+ self.process = await asyncio.create_subprocess_exec(
91
+ self.info.command,
92
+ *self.info.args,
93
+ stdout=asyncio.subprocess.PIPE,
94
+ stderr=asyncio.subprocess.PIPE,
95
+ cwd=self.info.working_dir or str(self.container_dir),
96
+ env=self.info.environment or os.environ.copy(),
97
+ )
98
+
99
+ self.info.pid = self.process.pid
100
+ self.info.status = ProcessStatus.RUNNING
101
+ self.info.started_at = datetime.now()
102
+
103
+ # Start output monitoring tasks
104
+ self.stdout_task = asyncio.create_task(self._monitor_stdout())
105
+ self.stderr_task = asyncio.create_task(self._monitor_stderr())
106
+
107
+ # Cache process info in Redis if available
108
+ if self.redis_client:
109
+ await self._cache_process_info()
110
+
111
+ logger.info(f"Started async process {self.info.id} with PID {self.process.pid}")
112
+
113
+ # Handle timeout if specified
114
+ if timeout:
115
+ asyncio.create_task(self._timeout_handler(timeout))
116
+
117
+ return True
118
+
119
+ except Exception as e:
120
+ logger.error(f"Failed to start process {self.info.id}: {e}")
121
+ self.info.status = ProcessStatus.FAILED
122
+ return False
123
+
124
+ async def stop(self, timeout: float = 10.0) -> bool:
125
+ """Stop the process gracefully with timeout"""
126
+ if not self.process or self.process.returncode is not None:
127
+ return True
128
+
129
+ try:
130
+ # Send SIGTERM
131
+ self.process.terminate()
132
+
133
+ # Wait for graceful shutdown with timeout
134
+ try:
135
+ await asyncio.wait_for(self.process.wait(), timeout=timeout)
136
+ self.info.status = ProcessStatus.EXITED
137
+ except asyncio.TimeoutError:
138
+ # Force kill if timeout
139
+ self.process.kill()
140
+ await self.process.wait()
141
+ self.info.status = ProcessStatus.KILLED
142
+
143
+ self.info.exit_code = self.process.returncode
144
+ self.info.finished_at = datetime.now()
145
+
146
+ # Cancel monitoring tasks
147
+ if self.stdout_task:
148
+ self.stdout_task.cancel()
149
+ if self.stderr_task:
150
+ self.stderr_task.cancel()
151
+
152
+ # Update cache
153
+ if self.redis_client:
154
+ await self._cache_process_info()
155
+
156
+ logger.info(f"Stopped process {self.info.id}")
157
+ return True
158
+
159
+ except Exception as e:
160
+ logger.error(f"Failed to stop process {self.info.id}: {e}")
161
+ return False
162
+
163
+ async def kill(self) -> bool:
164
+ """Force kill the process"""
165
+ if not self.process or self.process.returncode is not None:
166
+ return True
167
+
168
+ try:
169
+ self.process.kill()
170
+ await self.process.wait()
171
+
172
+ self.info.status = ProcessStatus.KILLED
173
+ self.info.exit_code = self.process.returncode
174
+ self.info.finished_at = datetime.now()
175
+
176
+ # Cancel monitoring tasks
177
+ if self.stdout_task:
178
+ self.stdout_task.cancel()
179
+ if self.stderr_task:
180
+ self.stderr_task.cancel()
181
+
182
+ # Update cache
183
+ if self.redis_client:
184
+ await self._cache_process_info()
185
+
186
+ logger.info(f"Killed process {self.info.id}")
187
+ return True
188
+
189
+ except Exception as e:
190
+ logger.error(f"Failed to kill process {self.info.id}: {e}")
191
+ return False
192
+
193
+ async def wait(self, timeout: Optional[float] = None) -> int:
194
+ """Wait for process to complete with optional timeout"""
195
+ if not self.process:
196
+ raise RuntimeError("Process not started")
197
+
198
+ if timeout:
199
+ try:
200
+ await asyncio.wait_for(self.process.wait(), timeout=timeout)
201
+ except asyncio.TimeoutError:
202
+ self.info.status = ProcessStatus.TIMEOUT
203
+ self.info.finished_at = datetime.now()
204
+ raise
205
+ else:
206
+ await self.process.wait()
207
+
208
+ self.info.exit_code = self.process.returncode
209
+ self.info.status = (
210
+ ProcessStatus.EXITED if self.process.returncode == 0 else ProcessStatus.FAILED
211
+ )
212
+ self.info.finished_at = datetime.now()
213
+
214
+ # Update cache
215
+ if self.redis_client:
216
+ await self._cache_process_info()
217
+
218
+ return self.process.returncode
219
+
220
+ async def _monitor_stdout(self):
221
+ """Monitor stdout and collect lines"""
222
+ if not self.process or not self.process.stdout:
223
+ return
224
+
225
+ try:
226
+ while True:
227
+ line = await self.process.stdout.readline()
228
+ if not line:
229
+ break
230
+
231
+ line_str = line.decode("utf-8", errors="replace").strip()
232
+ self.info.stdout_lines.append(line_str)
233
+
234
+ # Limit memory usage - keep only last 1000 lines
235
+ if len(self.info.stdout_lines) > 1000:
236
+ self.info.stdout_lines = self.info.stdout_lines[-1000:]
237
+
238
+ # Stream to Redis if available
239
+ if self.redis_client:
240
+ await self.redis_client.lpush(f"process:{self.info.id}:stdout", line_str)
241
+ await self.redis_client.ltrim(f"process:{self.info.id}:stdout", 0, 999)
242
+
243
+ except asyncio.CancelledError:
244
+ pass
245
+ except Exception as e:
246
+ logger.error(f"Error monitoring stdout for {self.info.id}: {e}")
247
+
248
+ async def _monitor_stderr(self):
249
+ """Monitor stderr and collect lines"""
250
+ if not self.process or not self.process.stderr:
251
+ return
252
+
253
+ try:
254
+ while True:
255
+ line = await self.process.stderr.readline()
256
+ if not line:
257
+ break
258
+
259
+ line_str = line.decode("utf-8", errors="replace").strip()
260
+ self.info.stderr_lines.append(line_str)
261
+
262
+ # Limit memory usage - keep only last 1000 lines
263
+ if len(self.info.stderr_lines) > 1000:
264
+ self.info.stderr_lines = self.info.stderr_lines[-1000:]
265
+
266
+ # Stream to Redis if available
267
+ if self.redis_client:
268
+ await self.redis_client.lpush(f"process:{self.info.id}:stderr", line_str)
269
+ await self.redis_client.ltrim(f"process:{self.info.id}:stderr", 0, 999)
270
+
271
+ except asyncio.CancelledError:
272
+ pass
273
+ except Exception as e:
274
+ logger.error(f"Error monitoring stderr for {self.info.id}: {e}")
275
+
276
+ async def _timeout_handler(self, timeout: float):
277
+ """Handle process timeout"""
278
+ await asyncio.sleep(timeout)
279
+
280
+ if self.process and self.process.returncode is None:
281
+ logger.warning(f"Process {self.info.id} timed out after {timeout}s")
282
+ await self.kill()
283
+
284
+ async def _cache_process_info(self):
285
+ """Cache process info in Redis"""
286
+ if not self.redis_client:
287
+ return
288
+
289
+ try:
290
+ process_data = {
291
+ "id": self.info.id,
292
+ "name": self.info.name,
293
+ "status": self.info.status.value,
294
+ "pid": self.info.pid,
295
+ "exit_code": self.info.exit_code,
296
+ "created_at": self.info.created_at.isoformat() if self.info.created_at else None,
297
+ "started_at": self.info.started_at.isoformat() if self.info.started_at else None,
298
+ "finished_at": self.info.finished_at.isoformat() if self.info.finished_at else None,
299
+ }
300
+
301
+ await self.redis_client.hset(f"process:{self.info.id}:info", mapping=process_data)
302
+ await self.redis_client.expire(f"process:{self.info.id}:info", 3600) # 1 hour TTL
303
+
304
+ except Exception as e:
305
+ logger.error(f"Failed to cache process info for {self.info.id}: {e}")
306
+
307
+
308
+ class AsyncProcessManager:
309
+ """High-performance async process manager with SQLite and Redis"""
310
+
311
+ def __init__(self, db_path: Optional[str] = None, redis_url: Optional[str] = None):
312
+ if db_path is None:
313
+ db_path = Path.home() / ".local" / "mcli" / "daemon" / "processes.db"
314
+
315
+ self.db_path = Path(db_path)
316
+ self.db_path.parent.mkdir(parents=True, exist_ok=True)
317
+
318
+ self.processes: Dict[str, AsyncProcessContainer] = {}
319
+ self.redis_url = redis_url or "redis://localhost:6379"
320
+ self.redis_client: Optional[redis.Redis] = None
321
+
322
+ # Connection pool for SQLite
323
+ self._db_pool_size = 10
324
+ self._db_pool: List[aiosqlite.Connection] = []
325
+ self._db_pool_lock = asyncio.Lock()
326
+
327
+ async def initialize(self):
328
+ """Initialize the process manager"""
329
+ await self._init_database()
330
+ await self._init_redis()
331
+ await self._init_db_pool()
332
+
333
+ async def _init_database(self):
334
+ """Initialize SQLite database with optimizations"""
335
+ async with aiosqlite.connect(self.db_path) as db:
336
+ # Enable WAL mode for better concurrency
337
+ await db.execute("PRAGMA journal_mode=WAL")
338
+ await db.execute("PRAGMA synchronous=NORMAL")
339
+ await db.execute("PRAGMA cache_size=10000")
340
+ await db.execute("PRAGMA temp_store=memory")
341
+
342
+ # Create processes table
343
+ await db.execute(
344
+ """
345
+ CREATE TABLE IF NOT EXISTS processes (
346
+ id TEXT PRIMARY KEY,
347
+ name TEXT NOT NULL,
348
+ command TEXT NOT NULL,
349
+ args TEXT NOT NULL,
350
+ status TEXT NOT NULL,
351
+ pid INTEGER,
352
+ exit_code INTEGER,
353
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
354
+ started_at TIMESTAMP,
355
+ finished_at TIMESTAMP,
356
+ working_dir TEXT,
357
+ environment TEXT,
358
+ stdout_lines TEXT,
359
+ stderr_lines TEXT
360
+ )
361
+ """
362
+ )
363
+
364
+ # Create indexes for better performance
365
+ await db.execute("CREATE INDEX IF NOT EXISTS idx_processes_status ON processes(status)")
366
+ await db.execute(
367
+ "CREATE INDEX IF NOT EXISTS idx_processes_created_at ON processes(created_at)"
368
+ )
369
+
370
+ await db.commit()
371
+
372
+ async def _init_redis(self):
373
+ """Initialize Redis connection for caching"""
374
+ try:
375
+ self.redis_client = redis.from_url(self.redis_url, decode_responses=True)
376
+ await self.redis_client.ping()
377
+ logger.info("Connected to Redis for caching")
378
+ except Exception as e:
379
+ logger.warning(f"Failed to connect to Redis: {e}. Caching disabled.")
380
+ self.redis_client = None
381
+
382
+ async def _init_db_pool(self):
383
+ """Initialize connection pool for SQLite"""
384
+ async with self._db_pool_lock:
385
+ for _ in range(self._db_pool_size):
386
+ conn = await aiosqlite.connect(self.db_path)
387
+ await conn.execute("PRAGMA journal_mode=WAL")
388
+ self._db_pool.append(conn)
389
+
390
+ @asynccontextmanager
391
+ async def _get_db_connection(self):
392
+ """Get a database connection from the pool"""
393
+ async with self._db_pool_lock:
394
+ if self._db_pool:
395
+ conn = self._db_pool.pop()
396
+ else:
397
+ conn = await aiosqlite.connect(self.db_path)
398
+ await conn.execute("PRAGMA journal_mode=WAL")
399
+
400
+ try:
401
+ yield conn
402
+ finally:
403
+ async with self._db_pool_lock:
404
+ if len(self._db_pool) < self._db_pool_size:
405
+ self._db_pool.append(conn)
406
+ else:
407
+ await conn.close()
408
+
409
+ async def start_process(
410
+ self,
411
+ name: str,
412
+ command: str,
413
+ args: List[str],
414
+ working_dir: Optional[str] = None,
415
+ environment: Optional[Dict[str, str]] = None,
416
+ timeout: Optional[float] = None,
417
+ ) -> str:
418
+ """Start a new async process"""
419
+ process_info = ProcessInfo(
420
+ id=str(uuid.uuid4()),
421
+ name=name,
422
+ command=command,
423
+ args=args,
424
+ status=ProcessStatus.CREATED,
425
+ working_dir=working_dir,
426
+ environment=environment,
427
+ )
428
+
429
+ container = AsyncProcessContainer(process_info, self.redis_client)
430
+ self.processes[process_info.id] = container
431
+
432
+ # Save to database
433
+ await self._save_process_info(process_info)
434
+
435
+ # Start the process
436
+ success = await container.start(timeout)
437
+ if success:
438
+ await self._save_process_info(process_info)
439
+ return process_info.id
440
+ else:
441
+ del self.processes[process_info.id]
442
+ raise RuntimeError(f"Failed to start process: {name}")
443
+
444
+ async def stop_process(self, process_id: str, timeout: float = 10.0) -> bool:
445
+ """Stop a process gracefully"""
446
+ if process_id not in self.processes:
447
+ raise KeyError(f"Process not found: {process_id}")
448
+
449
+ container = self.processes[process_id]
450
+ success = await container.stop(timeout)
451
+
452
+ if success:
453
+ await self._save_process_info(container.info)
454
+
455
+ return success
456
+
457
+ async def kill_process(self, process_id: str) -> bool:
458
+ """Force kill a process"""
459
+ if process_id not in self.processes:
460
+ raise KeyError(f"Process not found: {process_id}")
461
+
462
+ container = self.processes[process_id]
463
+ success = await container.kill()
464
+
465
+ if success:
466
+ await self._save_process_info(container.info)
467
+
468
+ return success
469
+
470
+ async def get_process_info(self, process_id: str) -> ProcessInfo:
471
+ """Get process information"""
472
+ if process_id in self.processes:
473
+ return self.processes[process_id].info
474
+
475
+ # Try to load from database
476
+ async with self._get_db_connection() as db:
477
+ async with db.execute("SELECT * FROM processes WHERE id = ?", (process_id,)) as cursor:
478
+ row = await cursor.fetchone()
479
+ if row:
480
+ return self._row_to_process_info(row)
481
+
482
+ raise KeyError(f"Process not found: {process_id}")
483
+
484
+ async def list_processes(self, status_filter: Optional[str] = None) -> List[ProcessInfo]:
485
+ """List all processes with optional status filter"""
486
+ processes = []
487
+
488
+ # Add active processes
489
+ for container in self.processes.values():
490
+ if not status_filter or container.info.status.value == status_filter:
491
+ processes.append(container.info)
492
+
493
+ # Add historical processes from database
494
+ query = "SELECT * FROM processes"
495
+ params = []
496
+
497
+ if status_filter:
498
+ query += " WHERE status = ?"
499
+ params.append(status_filter)
500
+
501
+ query += " ORDER BY created_at DESC"
502
+
503
+ async with self._get_db_connection() as db:
504
+ async with db.execute(query, params) as cursor:
505
+ async for row in cursor:
506
+ process_info = self._row_to_process_info(row)
507
+ # Avoid duplicates
508
+ if process_info.id not in self.processes:
509
+ processes.append(process_info)
510
+
511
+ return processes
512
+
513
+ async def cleanup_finished(self) -> List[str]:
514
+ """Remove finished processes from memory"""
515
+ finished_ids = []
516
+
517
+ for process_id, container in list(self.processes.items()):
518
+ if container.info.status in [
519
+ ProcessStatus.EXITED,
520
+ ProcessStatus.FAILED,
521
+ ProcessStatus.KILLED,
522
+ ProcessStatus.TIMEOUT,
523
+ ]:
524
+ finished_ids.append(process_id)
525
+ del self.processes[process_id]
526
+
527
+ return finished_ids
528
+
529
+ async def _save_process_info(self, process_info: ProcessInfo):
530
+ """Save process info to database"""
531
+ async with self._get_db_connection() as db:
532
+ await db.execute(
533
+ """
534
+ INSERT OR REPLACE INTO processes
535
+ (id, name, command, args, status, pid, exit_code, created_at,
536
+ started_at, finished_at, working_dir, environment, stdout_lines, stderr_lines)
537
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
538
+ """,
539
+ (
540
+ process_info.id,
541
+ process_info.name,
542
+ process_info.command,
543
+ json.dumps(process_info.args),
544
+ process_info.status.value,
545
+ process_info.pid,
546
+ process_info.exit_code,
547
+ process_info.created_at.isoformat() if process_info.created_at else None,
548
+ process_info.started_at.isoformat() if process_info.started_at else None,
549
+ process_info.finished_at.isoformat() if process_info.finished_at else None,
550
+ process_info.working_dir,
551
+ json.dumps(process_info.environment) if process_info.environment else None,
552
+ json.dumps(process_info.stdout_lines),
553
+ json.dumps(process_info.stderr_lines),
554
+ ),
555
+ )
556
+ await db.commit()
557
+
558
+ def _row_to_process_info(self, row) -> ProcessInfo:
559
+ """Convert database row to ProcessInfo"""
560
+ return ProcessInfo(
561
+ id=row[0],
562
+ name=row[1],
563
+ command=row[2],
564
+ args=json.loads(row[3]),
565
+ status=ProcessStatus(row[4]),
566
+ pid=row[5],
567
+ exit_code=row[6],
568
+ created_at=datetime.fromisoformat(row[7]) if row[7] else None,
569
+ started_at=datetime.fromisoformat(row[8]) if row[8] else None,
570
+ finished_at=datetime.fromisoformat(row[9]) if row[9] else None,
571
+ working_dir=row[10],
572
+ environment=json.loads(row[11]) if row[11] else None,
573
+ stdout_lines=json.loads(row[12]) if row[12] else [],
574
+ stderr_lines=json.loads(row[13]) if row[13] else [],
575
+ )
576
+
577
+ async def close(self):
578
+ """Clean up resources"""
579
+ # Close all active processes
580
+ for container in self.processes.values():
581
+ await container.stop()
582
+
583
+ # Close database connections
584
+ async with self._db_pool_lock:
585
+ for conn in self._db_pool:
586
+ await conn.close()
587
+ self._db_pool.clear()
588
+
589
+ # Close Redis connection
590
+ if self.redis_client:
591
+ await self.redis_client.close()