mcli-framework 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcli-framework might be problematic. Click here for more details.
- mcli/app/chat_cmd.py +42 -0
- mcli/app/commands_cmd.py +226 -0
- mcli/app/completion_cmd.py +216 -0
- mcli/app/completion_helpers.py +288 -0
- mcli/app/cron_test_cmd.py +697 -0
- mcli/app/logs_cmd.py +419 -0
- mcli/app/main.py +492 -0
- mcli/app/model/model.py +1060 -0
- mcli/app/model_cmd.py +227 -0
- mcli/app/redis_cmd.py +269 -0
- mcli/app/video/video.py +1114 -0
- mcli/app/visual_cmd.py +303 -0
- mcli/chat/chat.py +2409 -0
- mcli/chat/command_rag.py +514 -0
- mcli/chat/enhanced_chat.py +652 -0
- mcli/chat/system_controller.py +1010 -0
- mcli/chat/system_integration.py +1016 -0
- mcli/cli.py +25 -0
- mcli/config.toml +20 -0
- mcli/lib/api/api.py +586 -0
- mcli/lib/api/daemon_client.py +203 -0
- mcli/lib/api/daemon_client_local.py +44 -0
- mcli/lib/api/daemon_decorator.py +217 -0
- mcli/lib/api/mcli_decorators.py +1032 -0
- mcli/lib/auth/auth.py +85 -0
- mcli/lib/auth/aws_manager.py +85 -0
- mcli/lib/auth/azure_manager.py +91 -0
- mcli/lib/auth/credential_manager.py +192 -0
- mcli/lib/auth/gcp_manager.py +93 -0
- mcli/lib/auth/key_manager.py +117 -0
- mcli/lib/auth/mcli_manager.py +93 -0
- mcli/lib/auth/token_manager.py +75 -0
- mcli/lib/auth/token_util.py +1011 -0
- mcli/lib/config/config.py +47 -0
- mcli/lib/discovery/__init__.py +1 -0
- mcli/lib/discovery/command_discovery.py +274 -0
- mcli/lib/erd/erd.py +1345 -0
- mcli/lib/erd/generate_graph.py +453 -0
- mcli/lib/files/files.py +76 -0
- mcli/lib/fs/fs.py +109 -0
- mcli/lib/lib.py +29 -0
- mcli/lib/logger/logger.py +611 -0
- mcli/lib/performance/optimizer.py +409 -0
- mcli/lib/performance/rust_bridge.py +502 -0
- mcli/lib/performance/uvloop_config.py +154 -0
- mcli/lib/pickles/pickles.py +50 -0
- mcli/lib/search/cached_vectorizer.py +479 -0
- mcli/lib/services/data_pipeline.py +460 -0
- mcli/lib/services/lsh_client.py +441 -0
- mcli/lib/services/redis_service.py +387 -0
- mcli/lib/shell/shell.py +137 -0
- mcli/lib/toml/toml.py +33 -0
- mcli/lib/ui/styling.py +47 -0
- mcli/lib/ui/visual_effects.py +634 -0
- mcli/lib/watcher/watcher.py +185 -0
- mcli/ml/api/app.py +215 -0
- mcli/ml/api/middleware.py +224 -0
- mcli/ml/api/routers/admin_router.py +12 -0
- mcli/ml/api/routers/auth_router.py +244 -0
- mcli/ml/api/routers/backtest_router.py +12 -0
- mcli/ml/api/routers/data_router.py +12 -0
- mcli/ml/api/routers/model_router.py +302 -0
- mcli/ml/api/routers/monitoring_router.py +12 -0
- mcli/ml/api/routers/portfolio_router.py +12 -0
- mcli/ml/api/routers/prediction_router.py +267 -0
- mcli/ml/api/routers/trade_router.py +12 -0
- mcli/ml/api/routers/websocket_router.py +76 -0
- mcli/ml/api/schemas.py +64 -0
- mcli/ml/auth/auth_manager.py +425 -0
- mcli/ml/auth/models.py +154 -0
- mcli/ml/auth/permissions.py +302 -0
- mcli/ml/backtesting/backtest_engine.py +502 -0
- mcli/ml/backtesting/performance_metrics.py +393 -0
- mcli/ml/cache.py +400 -0
- mcli/ml/cli/main.py +398 -0
- mcli/ml/config/settings.py +394 -0
- mcli/ml/configs/dvc_config.py +230 -0
- mcli/ml/configs/mlflow_config.py +131 -0
- mcli/ml/configs/mlops_manager.py +293 -0
- mcli/ml/dashboard/app.py +532 -0
- mcli/ml/dashboard/app_integrated.py +738 -0
- mcli/ml/dashboard/app_supabase.py +560 -0
- mcli/ml/dashboard/app_training.py +615 -0
- mcli/ml/dashboard/cli.py +51 -0
- mcli/ml/data_ingestion/api_connectors.py +501 -0
- mcli/ml/data_ingestion/data_pipeline.py +567 -0
- mcli/ml/data_ingestion/stream_processor.py +512 -0
- mcli/ml/database/migrations/env.py +94 -0
- mcli/ml/database/models.py +667 -0
- mcli/ml/database/session.py +200 -0
- mcli/ml/experimentation/ab_testing.py +845 -0
- mcli/ml/features/ensemble_features.py +607 -0
- mcli/ml/features/political_features.py +676 -0
- mcli/ml/features/recommendation_engine.py +809 -0
- mcli/ml/features/stock_features.py +573 -0
- mcli/ml/features/test_feature_engineering.py +346 -0
- mcli/ml/logging.py +85 -0
- mcli/ml/mlops/data_versioning.py +518 -0
- mcli/ml/mlops/experiment_tracker.py +377 -0
- mcli/ml/mlops/model_serving.py +481 -0
- mcli/ml/mlops/pipeline_orchestrator.py +614 -0
- mcli/ml/models/base_models.py +324 -0
- mcli/ml/models/ensemble_models.py +675 -0
- mcli/ml/models/recommendation_models.py +474 -0
- mcli/ml/models/test_models.py +487 -0
- mcli/ml/monitoring/drift_detection.py +676 -0
- mcli/ml/monitoring/metrics.py +45 -0
- mcli/ml/optimization/portfolio_optimizer.py +834 -0
- mcli/ml/preprocessing/data_cleaners.py +451 -0
- mcli/ml/preprocessing/feature_extractors.py +491 -0
- mcli/ml/preprocessing/ml_pipeline.py +382 -0
- mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
- mcli/ml/preprocessing/test_preprocessing.py +294 -0
- mcli/ml/scripts/populate_sample_data.py +200 -0
- mcli/ml/tasks.py +400 -0
- mcli/ml/tests/test_integration.py +429 -0
- mcli/ml/tests/test_training_dashboard.py +387 -0
- mcli/public/oi/oi.py +15 -0
- mcli/public/public.py +4 -0
- mcli/self/self_cmd.py +1246 -0
- mcli/workflow/daemon/api_daemon.py +800 -0
- mcli/workflow/daemon/async_command_database.py +681 -0
- mcli/workflow/daemon/async_process_manager.py +591 -0
- mcli/workflow/daemon/client.py +530 -0
- mcli/workflow/daemon/commands.py +1196 -0
- mcli/workflow/daemon/daemon.py +905 -0
- mcli/workflow/daemon/daemon_api.py +59 -0
- mcli/workflow/daemon/enhanced_daemon.py +571 -0
- mcli/workflow/daemon/process_cli.py +244 -0
- mcli/workflow/daemon/process_manager.py +439 -0
- mcli/workflow/daemon/test_daemon.py +275 -0
- mcli/workflow/dashboard/dashboard_cmd.py +113 -0
- mcli/workflow/docker/docker.py +0 -0
- mcli/workflow/file/file.py +100 -0
- mcli/workflow/gcloud/config.toml +21 -0
- mcli/workflow/gcloud/gcloud.py +58 -0
- mcli/workflow/git_commit/ai_service.py +328 -0
- mcli/workflow/git_commit/commands.py +430 -0
- mcli/workflow/lsh_integration.py +355 -0
- mcli/workflow/model_service/client.py +594 -0
- mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
- mcli/workflow/model_service/lightweight_embedder.py +397 -0
- mcli/workflow/model_service/lightweight_model_server.py +714 -0
- mcli/workflow/model_service/lightweight_test.py +241 -0
- mcli/workflow/model_service/model_service.py +1955 -0
- mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
- mcli/workflow/model_service/pdf_processor.py +386 -0
- mcli/workflow/model_service/test_efficient_runner.py +234 -0
- mcli/workflow/model_service/test_example.py +315 -0
- mcli/workflow/model_service/test_integration.py +131 -0
- mcli/workflow/model_service/test_new_features.py +149 -0
- mcli/workflow/openai/openai.py +99 -0
- mcli/workflow/politician_trading/commands.py +1790 -0
- mcli/workflow/politician_trading/config.py +134 -0
- mcli/workflow/politician_trading/connectivity.py +490 -0
- mcli/workflow/politician_trading/data_sources.py +395 -0
- mcli/workflow/politician_trading/database.py +410 -0
- mcli/workflow/politician_trading/demo.py +248 -0
- mcli/workflow/politician_trading/models.py +165 -0
- mcli/workflow/politician_trading/monitoring.py +413 -0
- mcli/workflow/politician_trading/scrapers.py +966 -0
- mcli/workflow/politician_trading/scrapers_california.py +412 -0
- mcli/workflow/politician_trading/scrapers_eu.py +377 -0
- mcli/workflow/politician_trading/scrapers_uk.py +350 -0
- mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
- mcli/workflow/politician_trading/supabase_functions.py +354 -0
- mcli/workflow/politician_trading/workflow.py +852 -0
- mcli/workflow/registry/registry.py +180 -0
- mcli/workflow/repo/repo.py +223 -0
- mcli/workflow/scheduler/commands.py +493 -0
- mcli/workflow/scheduler/cron_parser.py +238 -0
- mcli/workflow/scheduler/job.py +182 -0
- mcli/workflow/scheduler/monitor.py +139 -0
- mcli/workflow/scheduler/persistence.py +324 -0
- mcli/workflow/scheduler/scheduler.py +679 -0
- mcli/workflow/sync/sync_cmd.py +437 -0
- mcli/workflow/sync/test_cmd.py +314 -0
- mcli/workflow/videos/videos.py +242 -0
- mcli/workflow/wakatime/wakatime.py +11 -0
- mcli/workflow/workflow.py +37 -0
- mcli_framework-7.0.0.dist-info/METADATA +479 -0
- mcli_framework-7.0.0.dist-info/RECORD +186 -0
- mcli_framework-7.0.0.dist-info/WHEEL +5 -0
- mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
- mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
- mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
import click
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
from mcli.lib.logger.logger import get_logger
|
|
8
|
+
|
|
9
|
+
logger = get_logger(__name__)
|
|
10
|
+
|
|
11
|
+
# Default API URL - should match the daemon configuration
|
|
12
|
+
API_BASE_URL = "http://localhost:8000"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@click.group(name="process")
|
|
16
|
+
def process_cli():
|
|
17
|
+
"""Docker-like process management commands"""
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@process_cli.command("ps")
|
|
22
|
+
@click.option("--all", "-a", is_flag=True, help="Show all processes including exited ones")
|
|
23
|
+
@click.option("--json", "as_json", is_flag=True, help="Output as JSON")
|
|
24
|
+
def list_processes(all: bool, as_json: bool):
|
|
25
|
+
"""List processes (like 'docker ps')"""
|
|
26
|
+
try:
|
|
27
|
+
params = {"all": "true"} if all else {}
|
|
28
|
+
response = requests.get(f"{API_BASE_URL}/processes", params=params)
|
|
29
|
+
|
|
30
|
+
if response.status_code == 200:
|
|
31
|
+
data = response.json()
|
|
32
|
+
processes = data.get("processes", [])
|
|
33
|
+
|
|
34
|
+
if as_json:
|
|
35
|
+
click.echo(json.dumps(data, indent=2))
|
|
36
|
+
return
|
|
37
|
+
|
|
38
|
+
if not processes:
|
|
39
|
+
click.echo("No processes found")
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
# Print header
|
|
43
|
+
click.echo(
|
|
44
|
+
f"{'CONTAINER ID':<13} {'NAME':<15} {'COMMAND':<25} {'STATUS':<10} {'UPTIME':<10} {'CPU':<8} {'MEMORY'}"
|
|
45
|
+
)
|
|
46
|
+
click.echo("-" * 90)
|
|
47
|
+
|
|
48
|
+
# Print process rows
|
|
49
|
+
for proc in processes:
|
|
50
|
+
click.echo(
|
|
51
|
+
f"{proc['id']:<13} {proc['name']:<15} {proc['command'][:24]:<25} {proc['status']:<10} {proc['uptime']:<10} {proc['cpu']:<8} {proc['memory']}"
|
|
52
|
+
)
|
|
53
|
+
else:
|
|
54
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
55
|
+
|
|
56
|
+
except requests.exceptions.RequestException as e:
|
|
57
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@process_cli.command("run")
|
|
61
|
+
@click.argument("command")
|
|
62
|
+
@click.argument("args", nargs=-1)
|
|
63
|
+
@click.option("--name", help="Name for the process container")
|
|
64
|
+
@click.option("--detach", "-d", is_flag=True, default=True, help="Run in detached mode")
|
|
65
|
+
@click.option("--working-dir", help="Working directory inside container")
|
|
66
|
+
def run_process(
|
|
67
|
+
command: str, args: tuple, name: Optional[str], detach: bool, working_dir: Optional[str]
|
|
68
|
+
):
|
|
69
|
+
"""Create and start a process (like 'docker run')"""
|
|
70
|
+
try:
|
|
71
|
+
data = {
|
|
72
|
+
"name": name or f"proc-{command}",
|
|
73
|
+
"command": command,
|
|
74
|
+
"args": list(args),
|
|
75
|
+
"detach": detach,
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if working_dir:
|
|
79
|
+
data["working_dir"] = working_dir
|
|
80
|
+
|
|
81
|
+
response = requests.post(f"{API_BASE_URL}/processes/run", json=data)
|
|
82
|
+
|
|
83
|
+
if response.status_code == 200:
|
|
84
|
+
result = response.json()
|
|
85
|
+
click.echo(f"Started process with ID: {result['id']}")
|
|
86
|
+
if detach:
|
|
87
|
+
click.echo("Use 'mcli workflow daemon process logs <id>' to view output")
|
|
88
|
+
else:
|
|
89
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
90
|
+
if response.text:
|
|
91
|
+
click.echo(response.text)
|
|
92
|
+
|
|
93
|
+
except requests.exceptions.RequestException as e:
|
|
94
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@process_cli.command("logs")
|
|
98
|
+
@click.argument("process_id")
|
|
99
|
+
@click.option("--lines", "-n", type=int, help="Number of lines to show from end of logs")
|
|
100
|
+
def show_logs(process_id: str, lines: Optional[int]):
|
|
101
|
+
"""Show logs for a process (like 'docker logs')"""
|
|
102
|
+
try:
|
|
103
|
+
params = {}
|
|
104
|
+
if lines:
|
|
105
|
+
params["lines"] = lines
|
|
106
|
+
|
|
107
|
+
response = requests.get(f"{API_BASE_URL}/processes/{process_id}/logs", params=params)
|
|
108
|
+
|
|
109
|
+
if response.status_code == 200:
|
|
110
|
+
logs = response.json()
|
|
111
|
+
|
|
112
|
+
if logs.get("stdout"):
|
|
113
|
+
click.echo(logs["stdout"], nl=False)
|
|
114
|
+
if logs.get("stderr"):
|
|
115
|
+
click.echo(logs["stderr"], nl=False)
|
|
116
|
+
if not logs.get("stdout") and not logs.get("stderr"):
|
|
117
|
+
click.echo("No logs available")
|
|
118
|
+
elif response.status_code == 404:
|
|
119
|
+
click.echo(f"Process {process_id} not found")
|
|
120
|
+
else:
|
|
121
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
122
|
+
|
|
123
|
+
except requests.exceptions.RequestException as e:
|
|
124
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@process_cli.command("inspect")
|
|
128
|
+
@click.argument("process_id")
|
|
129
|
+
@click.option("--json", "as_json", is_flag=True, help="Output as JSON")
|
|
130
|
+
def inspect_process(process_id: str, as_json: bool):
|
|
131
|
+
"""Show detailed information about a process (like 'docker inspect')"""
|
|
132
|
+
try:
|
|
133
|
+
response = requests.get(f"{API_BASE_URL}/processes/{process_id}")
|
|
134
|
+
|
|
135
|
+
if response.status_code == 200:
|
|
136
|
+
info = response.json()
|
|
137
|
+
|
|
138
|
+
if as_json:
|
|
139
|
+
click.echo(json.dumps(info, indent=2))
|
|
140
|
+
return
|
|
141
|
+
|
|
142
|
+
click.echo(f"Process ID: {info['id']}")
|
|
143
|
+
click.echo(f"Name: {info['name']}")
|
|
144
|
+
click.echo(f"Status: {info['status']}")
|
|
145
|
+
click.echo(f"PID: {info.get('pid', 'N/A')}")
|
|
146
|
+
click.echo(f"Command: {info['command']} {' '.join(info.get('args', []))}")
|
|
147
|
+
click.echo(f"Working Dir: {info.get('working_dir', 'N/A')}")
|
|
148
|
+
click.echo(f"Created: {info.get('created_at', 'N/A')}")
|
|
149
|
+
click.echo(f"Started: {info.get('started_at', 'N/A')}")
|
|
150
|
+
|
|
151
|
+
if info.get("stats"):
|
|
152
|
+
stats = info["stats"]
|
|
153
|
+
click.echo(f"\nResource Usage:")
|
|
154
|
+
click.echo(f" CPU: {stats.get('cpu_percent', 0):.1f}%")
|
|
155
|
+
click.echo(f" Memory: {stats.get('memory_mb', 0):.1f} MB")
|
|
156
|
+
click.echo(f" Uptime: {stats.get('uptime_seconds', 0)} seconds")
|
|
157
|
+
|
|
158
|
+
elif response.status_code == 404:
|
|
159
|
+
click.echo(f"Process {process_id} not found")
|
|
160
|
+
else:
|
|
161
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
162
|
+
|
|
163
|
+
except requests.exceptions.RequestException as e:
|
|
164
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
@process_cli.command("stop")
|
|
168
|
+
@click.argument("process_id")
|
|
169
|
+
@click.option("--timeout", "-t", type=int, default=10, help="Timeout in seconds")
|
|
170
|
+
def stop_process(process_id: str, timeout: int):
|
|
171
|
+
"""Stop a process (like 'docker stop')"""
|
|
172
|
+
try:
|
|
173
|
+
data = {"timeout": timeout}
|
|
174
|
+
response = requests.post(f"{API_BASE_URL}/processes/{process_id}/stop", json=data)
|
|
175
|
+
|
|
176
|
+
if response.status_code == 200:
|
|
177
|
+
click.echo(f"Process {process_id} stopped")
|
|
178
|
+
elif response.status_code == 404:
|
|
179
|
+
click.echo(f"Process {process_id} not found")
|
|
180
|
+
else:
|
|
181
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
182
|
+
|
|
183
|
+
except requests.exceptions.RequestException as e:
|
|
184
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@process_cli.command("start")
|
|
188
|
+
@click.argument("process_id")
|
|
189
|
+
def start_process(process_id: str):
|
|
190
|
+
"""Start a stopped process (like 'docker start')"""
|
|
191
|
+
try:
|
|
192
|
+
response = requests.post(f"{API_BASE_URL}/processes/{process_id}/start")
|
|
193
|
+
|
|
194
|
+
if response.status_code == 200:
|
|
195
|
+
click.echo(f"Process {process_id} started")
|
|
196
|
+
elif response.status_code == 404:
|
|
197
|
+
click.echo(f"Process {process_id} not found")
|
|
198
|
+
else:
|
|
199
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
200
|
+
|
|
201
|
+
except requests.exceptions.RequestException as e:
|
|
202
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
@process_cli.command("kill")
|
|
206
|
+
@click.argument("process_id")
|
|
207
|
+
def kill_process(process_id: str):
|
|
208
|
+
"""Kill a process (like 'docker kill')"""
|
|
209
|
+
try:
|
|
210
|
+
response = requests.post(f"{API_BASE_URL}/processes/{process_id}/kill")
|
|
211
|
+
|
|
212
|
+
if response.status_code == 200:
|
|
213
|
+
click.echo(f"Process {process_id} killed")
|
|
214
|
+
elif response.status_code == 404:
|
|
215
|
+
click.echo(f"Process {process_id} not found")
|
|
216
|
+
else:
|
|
217
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
218
|
+
|
|
219
|
+
except requests.exceptions.RequestException as e:
|
|
220
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
@process_cli.command("rm")
|
|
224
|
+
@click.argument("process_id")
|
|
225
|
+
@click.option("--force", "-f", is_flag=True, help="Force remove running process")
|
|
226
|
+
def remove_process(process_id: str, force: bool):
|
|
227
|
+
"""Remove a process (like 'docker rm')"""
|
|
228
|
+
try:
|
|
229
|
+
params = {"force": "true"} if force else {}
|
|
230
|
+
response = requests.delete(f"{API_BASE_URL}/processes/{process_id}", params=params)
|
|
231
|
+
|
|
232
|
+
if response.status_code == 200:
|
|
233
|
+
click.echo(f"Process {process_id} removed")
|
|
234
|
+
elif response.status_code == 404:
|
|
235
|
+
click.echo(f"Process {process_id} not found")
|
|
236
|
+
else:
|
|
237
|
+
click.echo(f"Error: HTTP {response.status_code}")
|
|
238
|
+
|
|
239
|
+
except requests.exceptions.RequestException as e:
|
|
240
|
+
click.echo(f"Error connecting to daemon: {e}")
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
if __name__ == "__main__":
|
|
244
|
+
process_cli()
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import signal
|
|
4
|
+
import subprocess
|
|
5
|
+
import tempfile
|
|
6
|
+
import threading
|
|
7
|
+
import time
|
|
8
|
+
import uuid
|
|
9
|
+
from dataclasses import asdict, dataclass
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from enum import Enum
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Dict, List, Optional
|
|
14
|
+
|
|
15
|
+
import psutil
|
|
16
|
+
|
|
17
|
+
from mcli.lib.logger.logger import get_logger
|
|
18
|
+
|
|
19
|
+
logger = get_logger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ProcessStatus(Enum):
|
|
23
|
+
CREATED = "created"
|
|
24
|
+
RUNNING = "running"
|
|
25
|
+
EXITED = "exited"
|
|
26
|
+
KILLED = "killed"
|
|
27
|
+
FAILED = "failed"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class ProcessInfo:
|
|
32
|
+
"""Information about a managed process"""
|
|
33
|
+
|
|
34
|
+
id: str
|
|
35
|
+
name: str
|
|
36
|
+
command: str
|
|
37
|
+
args: List[str]
|
|
38
|
+
status: ProcessStatus
|
|
39
|
+
pid: Optional[int] = None
|
|
40
|
+
exit_code: Optional[int] = None
|
|
41
|
+
created_at: datetime = None
|
|
42
|
+
started_at: Optional[datetime] = None
|
|
43
|
+
finished_at: Optional[datetime] = None
|
|
44
|
+
working_dir: Optional[str] = None
|
|
45
|
+
environment: Optional[Dict[str, str]] = None
|
|
46
|
+
|
|
47
|
+
def __post_init__(self):
|
|
48
|
+
if self.created_at is None:
|
|
49
|
+
self.created_at = datetime.now()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class ProcessContainer:
|
|
53
|
+
"""Manages a single containerized process"""
|
|
54
|
+
|
|
55
|
+
def __init__(self, process_info: ProcessInfo):
|
|
56
|
+
self.info = process_info
|
|
57
|
+
self.process: Optional[subprocess.Popen] = None
|
|
58
|
+
self.stdout_file: Optional[Path] = None
|
|
59
|
+
self.stderr_file: Optional[Path] = None
|
|
60
|
+
self.container_dir: Optional[Path] = None
|
|
61
|
+
self._setup_container_environment()
|
|
62
|
+
|
|
63
|
+
def _setup_container_environment(self):
|
|
64
|
+
"""Setup isolated environment for the process"""
|
|
65
|
+
# Create container directory
|
|
66
|
+
base_dir = Path.home() / ".local" / "mcli" / "containers"
|
|
67
|
+
self.container_dir = base_dir / self.info.id
|
|
68
|
+
self.container_dir.mkdir(parents=True, exist_ok=True)
|
|
69
|
+
|
|
70
|
+
# Setup log files
|
|
71
|
+
self.stdout_file = self.container_dir / "stdout.log"
|
|
72
|
+
self.stderr_file = self.container_dir / "stderr.log"
|
|
73
|
+
|
|
74
|
+
# Create metadata file
|
|
75
|
+
metadata_file = self.container_dir / "metadata.json"
|
|
76
|
+
with open(metadata_file, "w") as f:
|
|
77
|
+
json.dump(asdict(self.info), f, indent=2, default=str)
|
|
78
|
+
|
|
79
|
+
def start(self) -> bool:
|
|
80
|
+
"""Start the containerized process"""
|
|
81
|
+
try:
|
|
82
|
+
if self.process and self.process.poll() is None:
|
|
83
|
+
logger.warning(f"Process {self.info.id} is already running")
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
# Open log files
|
|
87
|
+
stdout_handle = open(self.stdout_file, "w")
|
|
88
|
+
stderr_handle = open(self.stderr_file, "w")
|
|
89
|
+
|
|
90
|
+
# Start process
|
|
91
|
+
self.process = subprocess.Popen(
|
|
92
|
+
[self.info.command] + self.info.args,
|
|
93
|
+
stdout=stdout_handle,
|
|
94
|
+
stderr=stderr_handle,
|
|
95
|
+
cwd=self.info.working_dir or str(self.container_dir),
|
|
96
|
+
env=self.info.environment or os.environ.copy(),
|
|
97
|
+
preexec_fn=os.setsid, # Create new process group for better control
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
self.info.pid = self.process.pid
|
|
101
|
+
self.info.status = ProcessStatus.RUNNING
|
|
102
|
+
self.info.started_at = datetime.now()
|
|
103
|
+
|
|
104
|
+
logger.info(f"Started process {self.info.id} with PID {self.process.pid}")
|
|
105
|
+
return True
|
|
106
|
+
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.error(f"Failed to start process {self.info.id}: {e}")
|
|
109
|
+
self.info.status = ProcessStatus.FAILED
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
def stop(self, timeout: int = 10) -> bool:
|
|
113
|
+
"""Stop the process gracefully"""
|
|
114
|
+
if not self.process or self.process.poll() is not None:
|
|
115
|
+
return True
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
# Send SIGTERM
|
|
119
|
+
os.killpg(os.getpgid(self.process.pid), signal.SIGTERM)
|
|
120
|
+
|
|
121
|
+
# Wait for graceful shutdown
|
|
122
|
+
try:
|
|
123
|
+
self.process.wait(timeout=timeout)
|
|
124
|
+
except subprocess.TimeoutExpired:
|
|
125
|
+
# Force kill if timeout
|
|
126
|
+
os.killpg(os.getpgid(self.process.pid), signal.SIGKILL)
|
|
127
|
+
self.process.wait()
|
|
128
|
+
self.info.status = ProcessStatus.KILLED
|
|
129
|
+
else:
|
|
130
|
+
self.info.status = ProcessStatus.EXITED
|
|
131
|
+
|
|
132
|
+
self.info.exit_code = self.process.returncode
|
|
133
|
+
self.info.finished_at = datetime.now()
|
|
134
|
+
|
|
135
|
+
logger.info(f"Stopped process {self.info.id}")
|
|
136
|
+
return True
|
|
137
|
+
|
|
138
|
+
except Exception as e:
|
|
139
|
+
logger.error(f"Failed to stop process {self.info.id}: {e}")
|
|
140
|
+
return False
|
|
141
|
+
|
|
142
|
+
def kill(self) -> bool:
|
|
143
|
+
"""Force kill the process"""
|
|
144
|
+
if not self.process or self.process.poll() is not None:
|
|
145
|
+
return True
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
os.killpg(os.getpgid(self.process.pid), signal.SIGKILL)
|
|
149
|
+
self.process.wait()
|
|
150
|
+
|
|
151
|
+
self.info.status = ProcessStatus.KILLED
|
|
152
|
+
self.info.exit_code = self.process.returncode
|
|
153
|
+
self.info.finished_at = datetime.now()
|
|
154
|
+
|
|
155
|
+
logger.info(f"Killed process {self.info.id}")
|
|
156
|
+
return True
|
|
157
|
+
|
|
158
|
+
except Exception as e:
|
|
159
|
+
logger.error(f"Failed to kill process {self.info.id}: {e}")
|
|
160
|
+
return False
|
|
161
|
+
|
|
162
|
+
def is_running(self) -> bool:
|
|
163
|
+
"""Check if process is currently running"""
|
|
164
|
+
if not self.process:
|
|
165
|
+
return False
|
|
166
|
+
return self.process.poll() is None
|
|
167
|
+
|
|
168
|
+
def get_logs(self, lines: Optional[int] = None, follow: bool = False) -> Dict[str, str]:
|
|
169
|
+
"""Get process logs"""
|
|
170
|
+
logs = {"stdout": "", "stderr": ""}
|
|
171
|
+
|
|
172
|
+
try:
|
|
173
|
+
if self.stdout_file and self.stdout_file.exists():
|
|
174
|
+
with open(self.stdout_file, "r") as f:
|
|
175
|
+
content = f.read()
|
|
176
|
+
if lines:
|
|
177
|
+
content = "\n".join(content.split("\n")[-lines:])
|
|
178
|
+
logs["stdout"] = content
|
|
179
|
+
|
|
180
|
+
if self.stderr_file and self.stderr_file.exists():
|
|
181
|
+
with open(self.stderr_file, "r") as f:
|
|
182
|
+
content = f.read()
|
|
183
|
+
if lines:
|
|
184
|
+
content = "\n".join(content.split("\n")[-lines:])
|
|
185
|
+
logs["stderr"] = content
|
|
186
|
+
|
|
187
|
+
except Exception as e:
|
|
188
|
+
logger.error(f"Failed to read logs for process {self.info.id}: {e}")
|
|
189
|
+
|
|
190
|
+
return logs
|
|
191
|
+
|
|
192
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
193
|
+
"""Get process statistics"""
|
|
194
|
+
stats = {
|
|
195
|
+
"cpu_percent": 0.0,
|
|
196
|
+
"memory_mb": 0.0,
|
|
197
|
+
"num_threads": 0,
|
|
198
|
+
"uptime_seconds": 0,
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
try:
|
|
202
|
+
if self.process and self.is_running():
|
|
203
|
+
proc = psutil.Process(self.process.pid)
|
|
204
|
+
stats["cpu_percent"] = proc.cpu_percent()
|
|
205
|
+
stats["memory_mb"] = proc.memory_info().rss / (1024 * 1024)
|
|
206
|
+
stats["num_threads"] = proc.num_threads()
|
|
207
|
+
|
|
208
|
+
if self.info.started_at:
|
|
209
|
+
uptime = datetime.now() - self.info.started_at
|
|
210
|
+
stats["uptime_seconds"] = int(uptime.total_seconds())
|
|
211
|
+
|
|
212
|
+
except Exception as e:
|
|
213
|
+
logger.error(f"Failed to get stats for process {self.info.id}: {e}")
|
|
214
|
+
|
|
215
|
+
return stats
|
|
216
|
+
|
|
217
|
+
def cleanup(self):
|
|
218
|
+
"""Clean up container resources"""
|
|
219
|
+
try:
|
|
220
|
+
# Stop process if running
|
|
221
|
+
if self.is_running():
|
|
222
|
+
self.stop()
|
|
223
|
+
|
|
224
|
+
# Optionally remove container directory
|
|
225
|
+
# (keeping logs for now, but this could be configurable)
|
|
226
|
+
|
|
227
|
+
except Exception as e:
|
|
228
|
+
logger.error(f"Failed to cleanup process {self.info.id}: {e}")
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
class ProcessManager:
|
|
232
|
+
"""Docker-like process management system"""
|
|
233
|
+
|
|
234
|
+
def __init__(self):
|
|
235
|
+
self.containers: Dict[str, ProcessContainer] = {}
|
|
236
|
+
self.base_dir = Path.home() / ".local" / "mcli" / "containers"
|
|
237
|
+
self.base_dir.mkdir(parents=True, exist_ok=True)
|
|
238
|
+
|
|
239
|
+
# Load existing containers
|
|
240
|
+
self._load_existing_containers()
|
|
241
|
+
|
|
242
|
+
def _load_existing_containers(self):
|
|
243
|
+
"""Load existing containers from disk"""
|
|
244
|
+
try:
|
|
245
|
+
for container_dir in self.base_dir.iterdir():
|
|
246
|
+
if container_dir.is_dir():
|
|
247
|
+
metadata_file = container_dir / "metadata.json"
|
|
248
|
+
if metadata_file.exists():
|
|
249
|
+
try:
|
|
250
|
+
with open(metadata_file, "r") as f:
|
|
251
|
+
data = json.load(f)
|
|
252
|
+
|
|
253
|
+
# Convert string datetime back to datetime objects
|
|
254
|
+
for date_field in ["created_at", "started_at", "finished_at"]:
|
|
255
|
+
if data.get(date_field):
|
|
256
|
+
data[date_field] = datetime.fromisoformat(data[date_field])
|
|
257
|
+
|
|
258
|
+
# Convert status back to enum
|
|
259
|
+
data["status"] = ProcessStatus(data["status"])
|
|
260
|
+
|
|
261
|
+
process_info = ProcessInfo(**data)
|
|
262
|
+
container = ProcessContainer(process_info)
|
|
263
|
+
self.containers[process_info.id] = container
|
|
264
|
+
|
|
265
|
+
# Check if process is still actually running
|
|
266
|
+
if process_info.pid and not psutil.pid_exists(process_info.pid):
|
|
267
|
+
container.info.status = ProcessStatus.EXITED
|
|
268
|
+
container.info.finished_at = datetime.now()
|
|
269
|
+
|
|
270
|
+
except Exception as e:
|
|
271
|
+
logger.error(f"Failed to load container {container_dir.name}: {e}")
|
|
272
|
+
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error(f"Failed to load existing containers: {e}")
|
|
275
|
+
|
|
276
|
+
def create(
|
|
277
|
+
self,
|
|
278
|
+
name: str,
|
|
279
|
+
command: str,
|
|
280
|
+
args: List[str] = None,
|
|
281
|
+
working_dir: str = None,
|
|
282
|
+
environment: Dict[str, str] = None,
|
|
283
|
+
) -> str:
|
|
284
|
+
"""Create a new process container"""
|
|
285
|
+
process_id = str(uuid.uuid4())
|
|
286
|
+
|
|
287
|
+
process_info = ProcessInfo(
|
|
288
|
+
id=process_id,
|
|
289
|
+
name=name,
|
|
290
|
+
command=command,
|
|
291
|
+
args=args or [],
|
|
292
|
+
status=ProcessStatus.CREATED,
|
|
293
|
+
working_dir=working_dir,
|
|
294
|
+
environment=environment,
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
container = ProcessContainer(process_info)
|
|
298
|
+
self.containers[process_id] = container
|
|
299
|
+
|
|
300
|
+
logger.info(f"Created container {process_id} for command: {command}")
|
|
301
|
+
return process_id
|
|
302
|
+
|
|
303
|
+
def start(self, process_id: str) -> bool:
|
|
304
|
+
"""Start a process container"""
|
|
305
|
+
if process_id not in self.containers:
|
|
306
|
+
logger.error(f"Container {process_id} not found")
|
|
307
|
+
return False
|
|
308
|
+
|
|
309
|
+
return self.containers[process_id].start()
|
|
310
|
+
|
|
311
|
+
def stop(self, process_id: str, timeout: int = 10) -> bool:
|
|
312
|
+
"""Stop a process container"""
|
|
313
|
+
if process_id not in self.containers:
|
|
314
|
+
logger.error(f"Container {process_id} not found")
|
|
315
|
+
return False
|
|
316
|
+
|
|
317
|
+
return self.containers[process_id].stop(timeout)
|
|
318
|
+
|
|
319
|
+
def kill(self, process_id: str) -> bool:
|
|
320
|
+
"""Kill a process container"""
|
|
321
|
+
if process_id not in self.containers:
|
|
322
|
+
logger.error(f"Container {process_id} not found")
|
|
323
|
+
return False
|
|
324
|
+
|
|
325
|
+
return self.containers[process_id].kill()
|
|
326
|
+
|
|
327
|
+
def remove(self, process_id: str, force: bool = False) -> bool:
|
|
328
|
+
"""Remove a process container"""
|
|
329
|
+
if process_id not in self.containers:
|
|
330
|
+
logger.error(f"Container {process_id} not found")
|
|
331
|
+
return False
|
|
332
|
+
|
|
333
|
+
container = self.containers[process_id]
|
|
334
|
+
|
|
335
|
+
# Stop if running (unless force kill)
|
|
336
|
+
if container.is_running():
|
|
337
|
+
if force:
|
|
338
|
+
container.kill()
|
|
339
|
+
else:
|
|
340
|
+
container.stop()
|
|
341
|
+
|
|
342
|
+
# Cleanup and remove
|
|
343
|
+
container.cleanup()
|
|
344
|
+
del self.containers[process_id]
|
|
345
|
+
|
|
346
|
+
logger.info(f"Removed container {process_id}")
|
|
347
|
+
return True
|
|
348
|
+
|
|
349
|
+
def list_processes(self, all_processes: bool = False) -> List[Dict[str, Any]]:
|
|
350
|
+
"""List all process containers (Docker ps style)"""
|
|
351
|
+
result = []
|
|
352
|
+
|
|
353
|
+
for container in self.containers.values():
|
|
354
|
+
if not all_processes and container.info.status in [
|
|
355
|
+
ProcessStatus.EXITED,
|
|
356
|
+
ProcessStatus.KILLED,
|
|
357
|
+
]:
|
|
358
|
+
continue
|
|
359
|
+
|
|
360
|
+
stats = container.get_stats()
|
|
361
|
+
|
|
362
|
+
result.append(
|
|
363
|
+
{
|
|
364
|
+
"id": container.info.id[:12], # Short ID like Docker
|
|
365
|
+
"name": container.info.name,
|
|
366
|
+
"command": f"{container.info.command} {' '.join(container.info.args)}",
|
|
367
|
+
"status": container.info.status.value,
|
|
368
|
+
"pid": container.info.pid,
|
|
369
|
+
"created": (
|
|
370
|
+
container.info.created_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
371
|
+
if container.info.created_at
|
|
372
|
+
else ""
|
|
373
|
+
),
|
|
374
|
+
"uptime": f"{stats['uptime_seconds']}s",
|
|
375
|
+
"cpu": f"{stats['cpu_percent']:.1f}%",
|
|
376
|
+
"memory": f"{stats['memory_mb']:.1f}MB",
|
|
377
|
+
}
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
return result
|
|
381
|
+
|
|
382
|
+
def inspect(self, process_id: str) -> Optional[Dict[str, Any]]:
|
|
383
|
+
"""Get detailed information about a process container"""
|
|
384
|
+
if process_id not in self.containers:
|
|
385
|
+
return None
|
|
386
|
+
|
|
387
|
+
container = self.containers[process_id]
|
|
388
|
+
stats = container.get_stats()
|
|
389
|
+
|
|
390
|
+
return {
|
|
391
|
+
"id": container.info.id,
|
|
392
|
+
"name": container.info.name,
|
|
393
|
+
"command": container.info.command,
|
|
394
|
+
"args": container.info.args,
|
|
395
|
+
"status": container.info.status.value,
|
|
396
|
+
"pid": container.info.pid,
|
|
397
|
+
"exit_code": container.info.exit_code,
|
|
398
|
+
"created_at": (
|
|
399
|
+
container.info.created_at.isoformat() if container.info.created_at else None
|
|
400
|
+
),
|
|
401
|
+
"started_at": (
|
|
402
|
+
container.info.started_at.isoformat() if container.info.started_at else None
|
|
403
|
+
),
|
|
404
|
+
"finished_at": (
|
|
405
|
+
container.info.finished_at.isoformat() if container.info.finished_at else None
|
|
406
|
+
),
|
|
407
|
+
"working_dir": container.info.working_dir,
|
|
408
|
+
"environment": container.info.environment,
|
|
409
|
+
"stats": stats,
|
|
410
|
+
"container_dir": str(container.container_dir),
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
def logs(
|
|
414
|
+
self, process_id: str, lines: Optional[int] = None, follow: bool = False
|
|
415
|
+
) -> Optional[Dict[str, str]]:
|
|
416
|
+
"""Get logs from a process container"""
|
|
417
|
+
if process_id not in self.containers:
|
|
418
|
+
return None
|
|
419
|
+
|
|
420
|
+
return self.containers[process_id].get_logs(lines, follow)
|
|
421
|
+
|
|
422
|
+
def run(
|
|
423
|
+
self,
|
|
424
|
+
name: str,
|
|
425
|
+
command: str,
|
|
426
|
+
args: List[str] = None,
|
|
427
|
+
working_dir: str = None,
|
|
428
|
+
environment: Dict[str, str] = None,
|
|
429
|
+
detach: bool = True,
|
|
430
|
+
) -> str:
|
|
431
|
+
"""Create and start a process container in one step"""
|
|
432
|
+
process_id = self.create(name, command, args, working_dir, environment)
|
|
433
|
+
|
|
434
|
+
if self.start(process_id):
|
|
435
|
+
return process_id
|
|
436
|
+
else:
|
|
437
|
+
# Clean up failed container
|
|
438
|
+
self.remove(process_id, force=True)
|
|
439
|
+
raise RuntimeError(f"Failed to start container {process_id}")
|