mcli-framework 7.1.3__py3-none-any.whl → 7.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcli-framework might be problematic. Click here for more details.
- mcli/app/main.py +10 -0
- mcli/lib/custom_commands.py +424 -0
- mcli/lib/paths.py +12 -0
- mcli/ml/dashboard/app.py +13 -13
- mcli/ml/dashboard/app_integrated.py +1292 -148
- mcli/ml/dashboard/app_supabase.py +46 -21
- mcli/ml/dashboard/app_training.py +14 -14
- mcli/ml/dashboard/components/charts.py +258 -0
- mcli/ml/dashboard/components/metrics.py +125 -0
- mcli/ml/dashboard/components/tables.py +228 -0
- mcli/ml/dashboard/pages/cicd.py +382 -0
- mcli/ml/dashboard/pages/predictions_enhanced.py +820 -0
- mcli/ml/dashboard/pages/scrapers_and_logs.py +1060 -0
- mcli/ml/dashboard/pages/workflows.py +533 -0
- mcli/ml/training/train_model.py +569 -0
- mcli/self/self_cmd.py +322 -94
- mcli/workflow/politician_trading/data_sources.py +259 -1
- mcli/workflow/politician_trading/models.py +159 -1
- mcli/workflow/politician_trading/scrapers_corporate_registry.py +846 -0
- mcli/workflow/politician_trading/scrapers_free_sources.py +516 -0
- mcli/workflow/politician_trading/scrapers_third_party.py +391 -0
- mcli/workflow/politician_trading/seed_database.py +539 -0
- mcli/workflow/workflow.py +8 -27
- {mcli_framework-7.1.3.dist-info → mcli_framework-7.2.0.dist-info}/METADATA +1 -1
- {mcli_framework-7.1.3.dist-info → mcli_framework-7.2.0.dist-info}/RECORD +29 -25
- mcli/workflow/daemon/api_daemon.py +0 -800
- mcli/workflow/daemon/commands.py +0 -1196
- mcli/workflow/dashboard/dashboard_cmd.py +0 -120
- mcli/workflow/file/file.py +0 -100
- mcli/workflow/git_commit/commands.py +0 -430
- mcli/workflow/politician_trading/commands.py +0 -1939
- mcli/workflow/scheduler/commands.py +0 -493
- mcli/workflow/sync/sync_cmd.py +0 -437
- mcli/workflow/videos/videos.py +0 -242
- {mcli_framework-7.1.3.dist-info → mcli_framework-7.2.0.dist-info}/WHEEL +0 -0
- {mcli_framework-7.1.3.dist-info → mcli_framework-7.2.0.dist-info}/entry_points.txt +0 -0
- {mcli_framework-7.1.3.dist-info → mcli_framework-7.2.0.dist-info}/licenses/LICENSE +0 -0
- {mcli_framework-7.1.3.dist-info → mcli_framework-7.2.0.dist-info}/top_level.txt +0 -0
mcli/workflow/sync/sync_cmd.py
DELETED
|
@@ -1,437 +0,0 @@
|
|
|
1
|
-
"""Multi-cloud synchronization commands for mcli."""
|
|
2
|
-
|
|
3
|
-
import asyncio
|
|
4
|
-
import json
|
|
5
|
-
import os
|
|
6
|
-
import shutil
|
|
7
|
-
import subprocess
|
|
8
|
-
from datetime import datetime
|
|
9
|
-
from pathlib import Path
|
|
10
|
-
from typing import Dict, List, Optional
|
|
11
|
-
|
|
12
|
-
import click
|
|
13
|
-
|
|
14
|
-
from mcli.lib.logger.logger import get_logger
|
|
15
|
-
|
|
16
|
-
logger = get_logger(__name__)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class MultiCloudSync:
|
|
20
|
-
"""Handles synchronization across GitHub, OneDrive, iCloud, and Google Drive."""
|
|
21
|
-
|
|
22
|
-
def __init__(self, vault_path: str):
|
|
23
|
-
self.vault_path = Path(vault_path).resolve()
|
|
24
|
-
self.sync_config_path = self.vault_path / ".mcli_sync_config.json"
|
|
25
|
-
self.sync_log_path = self.vault_path / ".mcli_sync_log.json"
|
|
26
|
-
|
|
27
|
-
# Cloud storage paths - these will need to be configured by user
|
|
28
|
-
self.cloud_paths = {"onedrive": None, "icloud": None, "googledrive": None}
|
|
29
|
-
|
|
30
|
-
self.load_config()
|
|
31
|
-
|
|
32
|
-
def load_config(self) -> None:
|
|
33
|
-
"""Load sync configuration from file."""
|
|
34
|
-
if self.sync_config_path.exists():
|
|
35
|
-
try:
|
|
36
|
-
with open(self.sync_config_path, "r") as f:
|
|
37
|
-
config = json.load(f)
|
|
38
|
-
self.cloud_paths.update(config.get("cloud_paths", {}))
|
|
39
|
-
logger.info(f"Loaded sync config from {self.sync_config_path}")
|
|
40
|
-
except Exception as e:
|
|
41
|
-
logger.warning(f"Failed to load sync config: {e}")
|
|
42
|
-
|
|
43
|
-
def save_config(self) -> None:
|
|
44
|
-
"""Save sync configuration to file."""
|
|
45
|
-
config = {
|
|
46
|
-
"cloud_paths": self.cloud_paths,
|
|
47
|
-
"vault_path": str(self.vault_path),
|
|
48
|
-
"last_updated": datetime.now().isoformat(),
|
|
49
|
-
}
|
|
50
|
-
try:
|
|
51
|
-
with open(self.sync_config_path, "w") as f:
|
|
52
|
-
json.dump(config, f, indent=2)
|
|
53
|
-
logger.info(f"Saved sync config to {self.sync_config_path}")
|
|
54
|
-
except Exception as e:
|
|
55
|
-
logger.error(f"Failed to save sync config: {e}")
|
|
56
|
-
|
|
57
|
-
def log_sync_action(self, action: str, target: str, status: str, details: str = "") -> None:
|
|
58
|
-
"""Log sync actions for debugging and auditing."""
|
|
59
|
-
log_entry = {
|
|
60
|
-
"timestamp": datetime.now().isoformat(),
|
|
61
|
-
"action": action,
|
|
62
|
-
"target": target,
|
|
63
|
-
"status": status,
|
|
64
|
-
"details": details,
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
# Load existing log
|
|
68
|
-
logs = []
|
|
69
|
-
if self.sync_log_path.exists():
|
|
70
|
-
try:
|
|
71
|
-
with open(self.sync_log_path, "r") as f:
|
|
72
|
-
logs = json.load(f)
|
|
73
|
-
except Exception as e:
|
|
74
|
-
logger.warning(f"Failed to load sync log: {e}")
|
|
75
|
-
|
|
76
|
-
# Append new entry and keep only last 100 entries
|
|
77
|
-
logs.append(log_entry)
|
|
78
|
-
logs = logs[-100:]
|
|
79
|
-
|
|
80
|
-
# Save log
|
|
81
|
-
try:
|
|
82
|
-
with open(self.sync_log_path, "w") as f:
|
|
83
|
-
json.dump(logs, f, indent=2)
|
|
84
|
-
except Exception as e:
|
|
85
|
-
logger.warning(f"Failed to save sync log: {e}")
|
|
86
|
-
|
|
87
|
-
def sync_to_github(self) -> bool:
|
|
88
|
-
"""Sync vault to GitHub repository."""
|
|
89
|
-
try:
|
|
90
|
-
# Check if we're in a git repository
|
|
91
|
-
result = subprocess.run(
|
|
92
|
-
["git", "status"], cwd=self.vault_path, capture_output=True, text=True
|
|
93
|
-
)
|
|
94
|
-
if result.returncode != 0:
|
|
95
|
-
self.log_sync_action("git_sync", "github", "error", "Not a git repository")
|
|
96
|
-
return False
|
|
97
|
-
|
|
98
|
-
# Add all changes
|
|
99
|
-
subprocess.run(["git", "add", "."], cwd=self.vault_path, check=True)
|
|
100
|
-
|
|
101
|
-
# Check if there are changes to commit
|
|
102
|
-
result = subprocess.run(
|
|
103
|
-
["git", "diff", "--staged", "--quiet"], cwd=self.vault_path, capture_output=True
|
|
104
|
-
)
|
|
105
|
-
if result.returncode == 0:
|
|
106
|
-
self.log_sync_action("git_sync", "github", "success", "No changes to commit")
|
|
107
|
-
return True
|
|
108
|
-
|
|
109
|
-
# Commit changes
|
|
110
|
-
commit_msg = f"Auto-sync vault - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
|
111
|
-
subprocess.run(["git", "commit", "-m", commit_msg], cwd=self.vault_path, check=True)
|
|
112
|
-
|
|
113
|
-
# Push to remote
|
|
114
|
-
subprocess.run(["git", "push"], cwd=self.vault_path, check=True)
|
|
115
|
-
|
|
116
|
-
self.log_sync_action(
|
|
117
|
-
"git_sync", "github", "success", f"Committed and pushed: {commit_msg}"
|
|
118
|
-
)
|
|
119
|
-
return True
|
|
120
|
-
|
|
121
|
-
except subprocess.CalledProcessError as e:
|
|
122
|
-
self.log_sync_action("git_sync", "github", "error", str(e))
|
|
123
|
-
logger.error(f"Git sync failed: {e}")
|
|
124
|
-
return False
|
|
125
|
-
|
|
126
|
-
def sync_to_cloud_storage(self, cloud_name: str) -> bool:
|
|
127
|
-
"""Sync vault to specified cloud storage."""
|
|
128
|
-
cloud_path = self.cloud_paths.get(cloud_name)
|
|
129
|
-
if not cloud_path:
|
|
130
|
-
self.log_sync_action("cloud_sync", cloud_name, "error", "Cloud path not configured")
|
|
131
|
-
return False
|
|
132
|
-
|
|
133
|
-
cloud_path = Path(cloud_path)
|
|
134
|
-
if not cloud_path.exists():
|
|
135
|
-
try:
|
|
136
|
-
cloud_path.mkdir(parents=True, exist_ok=True)
|
|
137
|
-
self.log_sync_action(
|
|
138
|
-
"cloud_sync", cloud_name, "info", f"Created directory: {cloud_path}"
|
|
139
|
-
)
|
|
140
|
-
except Exception as e:
|
|
141
|
-
self.log_sync_action(
|
|
142
|
-
"cloud_sync", cloud_name, "error", f"Failed to create directory: {e}"
|
|
143
|
-
)
|
|
144
|
-
return False
|
|
145
|
-
|
|
146
|
-
try:
|
|
147
|
-
# Use rsync for efficient sync (macOS has rsync built-in)
|
|
148
|
-
result = subprocess.run(
|
|
149
|
-
[
|
|
150
|
-
"rsync",
|
|
151
|
-
"-av",
|
|
152
|
-
"--delete",
|
|
153
|
-
"--exclude=.git",
|
|
154
|
-
"--exclude=.obsidian/workspace*",
|
|
155
|
-
"--exclude=.mcli_sync_*",
|
|
156
|
-
"--exclude=__pycache__",
|
|
157
|
-
"--exclude=*.pyc",
|
|
158
|
-
f"{self.vault_path}/",
|
|
159
|
-
f"{cloud_path}/",
|
|
160
|
-
],
|
|
161
|
-
capture_output=True,
|
|
162
|
-
text=True,
|
|
163
|
-
)
|
|
164
|
-
|
|
165
|
-
if result.returncode == 0:
|
|
166
|
-
self.log_sync_action("cloud_sync", cloud_name, "success", f"Synced to {cloud_path}")
|
|
167
|
-
return True
|
|
168
|
-
else:
|
|
169
|
-
self.log_sync_action("cloud_sync", cloud_name, "error", result.stderr)
|
|
170
|
-
return False
|
|
171
|
-
|
|
172
|
-
except Exception as e:
|
|
173
|
-
self.log_sync_action("cloud_sync", cloud_name, "error", str(e))
|
|
174
|
-
logger.error(f"Cloud sync to {cloud_name} failed: {e}")
|
|
175
|
-
return False
|
|
176
|
-
|
|
177
|
-
def sync_from_cloud_storage(self, cloud_name: str) -> bool:
|
|
178
|
-
"""Sync from cloud storage to vault."""
|
|
179
|
-
cloud_path = self.cloud_paths.get(cloud_name)
|
|
180
|
-
if not cloud_path or not Path(cloud_path).exists():
|
|
181
|
-
self.log_sync_action("cloud_pull", cloud_name, "error", "Cloud path not found")
|
|
182
|
-
return False
|
|
183
|
-
|
|
184
|
-
try:
|
|
185
|
-
# Use rsync to pull changes from cloud
|
|
186
|
-
result = subprocess.run(
|
|
187
|
-
[
|
|
188
|
-
"rsync",
|
|
189
|
-
"-av",
|
|
190
|
-
"--exclude=.git",
|
|
191
|
-
"--exclude=.obsidian/workspace*",
|
|
192
|
-
"--exclude=.mcli_sync_*",
|
|
193
|
-
"--exclude=__pycache__",
|
|
194
|
-
"--exclude=*.pyc",
|
|
195
|
-
f"{cloud_path}/",
|
|
196
|
-
f"{self.vault_path}/",
|
|
197
|
-
],
|
|
198
|
-
capture_output=True,
|
|
199
|
-
text=True,
|
|
200
|
-
)
|
|
201
|
-
|
|
202
|
-
if result.returncode == 0:
|
|
203
|
-
self.log_sync_action(
|
|
204
|
-
"cloud_pull", cloud_name, "success", f"Pulled from {cloud_path}"
|
|
205
|
-
)
|
|
206
|
-
return True
|
|
207
|
-
else:
|
|
208
|
-
self.log_sync_action("cloud_pull", cloud_name, "error", result.stderr)
|
|
209
|
-
return False
|
|
210
|
-
|
|
211
|
-
except Exception as e:
|
|
212
|
-
self.log_sync_action("cloud_pull", cloud_name, "error", str(e))
|
|
213
|
-
logger.error(f"Cloud pull from {cloud_name} failed: {e}")
|
|
214
|
-
return False
|
|
215
|
-
|
|
216
|
-
def get_sync_status(self) -> Dict:
|
|
217
|
-
"""Get current sync status for all configured targets."""
|
|
218
|
-
status = {
|
|
219
|
-
"vault_path": str(self.vault_path),
|
|
220
|
-
"last_check": datetime.now().isoformat(),
|
|
221
|
-
"targets": {},
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
# Check git status
|
|
225
|
-
try:
|
|
226
|
-
result = subprocess.run(
|
|
227
|
-
["git", "status", "--porcelain"],
|
|
228
|
-
cwd=self.vault_path,
|
|
229
|
-
capture_output=True,
|
|
230
|
-
text=True,
|
|
231
|
-
)
|
|
232
|
-
if result.returncode == 0:
|
|
233
|
-
has_changes = bool(result.stdout.strip())
|
|
234
|
-
status["targets"]["github"] = {
|
|
235
|
-
"configured": True,
|
|
236
|
-
"has_uncommitted_changes": has_changes,
|
|
237
|
-
"status": "dirty" if has_changes else "clean",
|
|
238
|
-
}
|
|
239
|
-
else:
|
|
240
|
-
status["targets"]["github"] = {"configured": False, "error": "Not a git repository"}
|
|
241
|
-
except Exception as e:
|
|
242
|
-
status["targets"]["github"] = {"configured": False, "error": str(e)}
|
|
243
|
-
|
|
244
|
-
# Check cloud storage paths
|
|
245
|
-
for cloud_name, cloud_path in self.cloud_paths.items():
|
|
246
|
-
if cloud_path:
|
|
247
|
-
path_obj = Path(cloud_path)
|
|
248
|
-
status["targets"][cloud_name] = {
|
|
249
|
-
"configured": True,
|
|
250
|
-
"path": cloud_path,
|
|
251
|
-
"exists": path_obj.exists(),
|
|
252
|
-
"accessible": path_obj.exists() and os.access(path_obj, os.R_OK | os.W_OK),
|
|
253
|
-
}
|
|
254
|
-
else:
|
|
255
|
-
status["targets"][cloud_name] = {"configured": False, "path": None}
|
|
256
|
-
|
|
257
|
-
return status
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
@click.group(name="sync")
|
|
261
|
-
def sync():
|
|
262
|
-
"""Multi-cloud synchronization commands for vault management."""
|
|
263
|
-
pass
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
# Import and register test commands
|
|
267
|
-
try:
|
|
268
|
-
from .test_cmd import test as sync_test
|
|
269
|
-
|
|
270
|
-
sync.add_command(sync_test)
|
|
271
|
-
except ImportError:
|
|
272
|
-
pass
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
@sync.command()
|
|
276
|
-
@click.option("--vault-path", default=".", help="Path to vault directory")
|
|
277
|
-
def status(vault_path):
|
|
278
|
-
"""Show sync status for all configured targets."""
|
|
279
|
-
syncer = MultiCloudSync(vault_path)
|
|
280
|
-
status_info = syncer.get_sync_status()
|
|
281
|
-
|
|
282
|
-
click.echo(f"📁 Vault: {status_info['vault_path']}")
|
|
283
|
-
click.echo(f"🕒 Last check: {status_info['last_check']}")
|
|
284
|
-
click.echo()
|
|
285
|
-
|
|
286
|
-
for target, info in status_info["targets"].items():
|
|
287
|
-
if info["configured"]:
|
|
288
|
-
if target == "github":
|
|
289
|
-
icon = "📚" if info["status"] == "clean" else "⚠️"
|
|
290
|
-
click.echo(f"{icon} {target.title()}: {info['status']}")
|
|
291
|
-
if info.get("has_uncommitted_changes"):
|
|
292
|
-
click.echo(f" └─ Uncommitted changes present")
|
|
293
|
-
else:
|
|
294
|
-
icon = "☁️" if info["accessible"] else "❌"
|
|
295
|
-
click.echo(
|
|
296
|
-
f"{icon} {target.title()}: {'accessible' if info['accessible'] else 'not accessible'}"
|
|
297
|
-
)
|
|
298
|
-
click.echo(f" └─ Path: {info['path']}")
|
|
299
|
-
else:
|
|
300
|
-
click.echo(f"⚪ {target.title()}: not configured")
|
|
301
|
-
if "error" in info:
|
|
302
|
-
click.echo(f" └─ Error: {info['error']}")
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
@sync.command()
|
|
306
|
-
@click.option("--vault-path", default=".", help="Path to vault directory")
|
|
307
|
-
@click.option("--onedrive", help="Path to OneDrive sync folder")
|
|
308
|
-
@click.option("--icloud", help="Path to iCloud Drive sync folder")
|
|
309
|
-
@click.option("--googledrive", help="Path to Google Drive sync folder")
|
|
310
|
-
def configure(vault_path, onedrive, icloud, googledrive):
|
|
311
|
-
"""Configure cloud storage paths for synchronization."""
|
|
312
|
-
syncer = MultiCloudSync(vault_path)
|
|
313
|
-
|
|
314
|
-
if onedrive:
|
|
315
|
-
syncer.cloud_paths["onedrive"] = onedrive
|
|
316
|
-
click.echo(f"✅ OneDrive path set to: {onedrive}")
|
|
317
|
-
|
|
318
|
-
if icloud:
|
|
319
|
-
syncer.cloud_paths["icloud"] = icloud
|
|
320
|
-
click.echo(f"✅ iCloud path set to: {icloud}")
|
|
321
|
-
|
|
322
|
-
if googledrive:
|
|
323
|
-
syncer.cloud_paths["googledrive"] = googledrive
|
|
324
|
-
click.echo(f"✅ Google Drive path set to: {googledrive}")
|
|
325
|
-
|
|
326
|
-
syncer.save_config()
|
|
327
|
-
click.echo("🔧 Configuration saved!")
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
@sync.command()
|
|
331
|
-
@click.option("--vault-path", default=".", help="Path to vault directory")
|
|
332
|
-
@click.option(
|
|
333
|
-
"--target",
|
|
334
|
-
type=click.Choice(["all", "github", "onedrive", "icloud", "googledrive"]),
|
|
335
|
-
default="all",
|
|
336
|
-
help="Sync target",
|
|
337
|
-
)
|
|
338
|
-
def push(vault_path, target):
|
|
339
|
-
"""Push vault changes to specified target(s)."""
|
|
340
|
-
syncer = MultiCloudSync(vault_path)
|
|
341
|
-
|
|
342
|
-
targets = [target] if target != "all" else ["github", "onedrive", "icloud", "googledrive"]
|
|
343
|
-
|
|
344
|
-
results = {}
|
|
345
|
-
|
|
346
|
-
for t in targets:
|
|
347
|
-
if t == "github":
|
|
348
|
-
click.echo(f"🔄 Syncing to GitHub...")
|
|
349
|
-
results[t] = syncer.sync_to_github()
|
|
350
|
-
else:
|
|
351
|
-
if syncer.cloud_paths.get(t):
|
|
352
|
-
click.echo(f"🔄 Syncing to {t.title()}...")
|
|
353
|
-
results[t] = syncer.sync_to_cloud_storage(t)
|
|
354
|
-
else:
|
|
355
|
-
click.echo(f"⚠️ {t.title()} not configured, skipping...")
|
|
356
|
-
results[t] = False
|
|
357
|
-
|
|
358
|
-
# Show results
|
|
359
|
-
click.echo("\n📊 Sync Results:")
|
|
360
|
-
for target_name, success in results.items():
|
|
361
|
-
icon = "✅" if success else "❌"
|
|
362
|
-
click.echo(f"{icon} {target_name.title()}: {'Success' if success else 'Failed'}")
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
@sync.command()
|
|
366
|
-
@click.option("--vault-path", default=".", help="Path to vault directory")
|
|
367
|
-
@click.option(
|
|
368
|
-
"--target",
|
|
369
|
-
type=click.Choice(["onedrive", "icloud", "googledrive"]),
|
|
370
|
-
required=True,
|
|
371
|
-
help="Cloud storage to pull from",
|
|
372
|
-
)
|
|
373
|
-
def pull(vault_path, target):
|
|
374
|
-
"""Pull changes from cloud storage to vault."""
|
|
375
|
-
syncer = MultiCloudSync(vault_path)
|
|
376
|
-
|
|
377
|
-
if not syncer.cloud_paths.get(target):
|
|
378
|
-
click.echo(f"❌ {target.title()} not configured!")
|
|
379
|
-
return
|
|
380
|
-
|
|
381
|
-
click.echo(f"🔄 Pulling from {target.title()}...")
|
|
382
|
-
success = syncer.sync_from_cloud_storage(target)
|
|
383
|
-
|
|
384
|
-
if success:
|
|
385
|
-
click.echo(f"✅ Successfully pulled from {target.title()}")
|
|
386
|
-
else:
|
|
387
|
-
click.echo(f"❌ Failed to pull from {target.title()}")
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
@sync.command()
|
|
391
|
-
@click.option("--vault-path", default=".", help="Path to vault directory")
|
|
392
|
-
@click.option("--lines", default=20, help="Number of log lines to show")
|
|
393
|
-
def logs(vault_path, lines):
|
|
394
|
-
"""Show sync operation logs."""
|
|
395
|
-
syncer = MultiCloudSync(vault_path)
|
|
396
|
-
|
|
397
|
-
if not syncer.sync_log_path.exists():
|
|
398
|
-
click.echo("📝 No sync logs found.")
|
|
399
|
-
return
|
|
400
|
-
|
|
401
|
-
try:
|
|
402
|
-
with open(syncer.sync_log_path, "r") as f:
|
|
403
|
-
logs_data = json.load(f)
|
|
404
|
-
|
|
405
|
-
# Show last N entries
|
|
406
|
-
recent_logs = logs_data[-lines:]
|
|
407
|
-
|
|
408
|
-
click.echo(f"📝 Last {len(recent_logs)} sync operations:")
|
|
409
|
-
click.echo()
|
|
410
|
-
|
|
411
|
-
for log_entry in recent_logs:
|
|
412
|
-
timestamp = log_entry.get("timestamp", "Unknown")
|
|
413
|
-
action = log_entry.get("action", "Unknown")
|
|
414
|
-
target = log_entry.get("target", "Unknown")
|
|
415
|
-
status = log_entry.get("status", "Unknown")
|
|
416
|
-
details = log_entry.get("details", "")
|
|
417
|
-
|
|
418
|
-
# Format timestamp
|
|
419
|
-
try:
|
|
420
|
-
dt = datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
|
|
421
|
-
time_str = dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
422
|
-
except:
|
|
423
|
-
time_str = timestamp
|
|
424
|
-
|
|
425
|
-
# Status icon
|
|
426
|
-
icon = {"success": "✅", "error": "❌", "info": "ℹ️"}.get(status, "📝")
|
|
427
|
-
|
|
428
|
-
click.echo(f"{icon} {time_str} | {action} → {target} | {status}")
|
|
429
|
-
if details:
|
|
430
|
-
click.echo(f" └─ {details}")
|
|
431
|
-
|
|
432
|
-
except Exception as e:
|
|
433
|
-
click.echo(f"❌ Failed to read logs: {e}")
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
if __name__ == "__main__":
|
|
437
|
-
sync()
|
mcli/workflow/videos/videos.py
DELETED
|
@@ -1,242 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import sys
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
|
|
5
|
-
import click
|
|
6
|
-
|
|
7
|
-
# Add the app/video directory to the path so we can import the video processor
|
|
8
|
-
app_video_path = Path(__file__).parent.parent.parent / "app" / "video"
|
|
9
|
-
sys.path.insert(0, str(app_video_path))
|
|
10
|
-
|
|
11
|
-
# Lazy import variables
|
|
12
|
-
_video_module = None
|
|
13
|
-
_import_error = None
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def _get_video_module():
|
|
17
|
-
"""Lazy import of video processing module."""
|
|
18
|
-
global _video_module, _import_error
|
|
19
|
-
|
|
20
|
-
if _video_module is not None:
|
|
21
|
-
return _video_module
|
|
22
|
-
|
|
23
|
-
if _import_error is not None:
|
|
24
|
-
raise _import_error
|
|
25
|
-
|
|
26
|
-
try:
|
|
27
|
-
from mcli.app.video.video import (
|
|
28
|
-
CONFIG,
|
|
29
|
-
EnhancedVideoProcessor,
|
|
30
|
-
IntelligentVideoProcessor,
|
|
31
|
-
VideoProcessor,
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
_video_module = {
|
|
35
|
-
"VideoProcessor": VideoProcessor,
|
|
36
|
-
"EnhancedVideoProcessor": EnhancedVideoProcessor,
|
|
37
|
-
"IntelligentVideoProcessor": IntelligentVideoProcessor,
|
|
38
|
-
"CONFIG": CONFIG,
|
|
39
|
-
}
|
|
40
|
-
except ImportError:
|
|
41
|
-
try:
|
|
42
|
-
# Fallback import
|
|
43
|
-
import importlib.util
|
|
44
|
-
|
|
45
|
-
spec = importlib.util.spec_from_file_location("video", app_video_path / "video.py")
|
|
46
|
-
video_module = importlib.util.module_from_spec(spec)
|
|
47
|
-
spec.loader.exec_module(video_module)
|
|
48
|
-
|
|
49
|
-
_video_module = {
|
|
50
|
-
"VideoProcessor": video_module.VideoProcessor,
|
|
51
|
-
"EnhancedVideoProcessor": video_module.EnhancedVideoProcessor,
|
|
52
|
-
"IntelligentVideoProcessor": video_module.IntelligentVideoProcessor,
|
|
53
|
-
"CONFIG": video_module.CONFIG,
|
|
54
|
-
}
|
|
55
|
-
except Exception as e:
|
|
56
|
-
_import_error = ImportError(f"Could not import video processing modules: {e}")
|
|
57
|
-
# Return basic fallback
|
|
58
|
-
_video_module = {
|
|
59
|
-
"VideoProcessor": None,
|
|
60
|
-
"EnhancedVideoProcessor": None,
|
|
61
|
-
"IntelligentVideoProcessor": None,
|
|
62
|
-
"CONFIG": {"temp_dir": "./temp", "output_dir": "./output"},
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
return _video_module
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
@click.group()
|
|
69
|
-
def videos():
|
|
70
|
-
"""Video processing and overlay removal tools."""
|
|
71
|
-
pass
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
@videos.command()
|
|
75
|
-
@click.argument("input_video", type=click.Path(exists=True))
|
|
76
|
-
@click.option("--output", "-o", type=click.Path(), help="Output video path")
|
|
77
|
-
@click.option("--fps", "-f", default=30, help="Frame extraction rate (default: 30)")
|
|
78
|
-
@click.option("--context", "-c", default=3, help="Temporal context window size (default: 3)")
|
|
79
|
-
@click.option(
|
|
80
|
-
"--method",
|
|
81
|
-
type=click.Choice(["intelligent", "basic"]),
|
|
82
|
-
default="intelligent",
|
|
83
|
-
help="Processing method (default: intelligent)",
|
|
84
|
-
)
|
|
85
|
-
@click.option("--dry-run", is_flag=True, help="Only extract frames and analyze video")
|
|
86
|
-
def remove_overlay(input_video, output, fps, context, method, dry_run):
|
|
87
|
-
"""Remove overlays from videos with intelligent content reconstruction."""
|
|
88
|
-
|
|
89
|
-
try:
|
|
90
|
-
video_module = _get_video_module()
|
|
91
|
-
except ImportError as e:
|
|
92
|
-
click.echo(click.style(f"❌ Video processing modules not available: {e}", fg="red"))
|
|
93
|
-
return
|
|
94
|
-
|
|
95
|
-
VideoProcessor = video_module["VideoProcessor"]
|
|
96
|
-
EnhancedVideoProcessor = video_module["EnhancedVideoProcessor"]
|
|
97
|
-
IntelligentVideoProcessor = video_module["IntelligentVideoProcessor"]
|
|
98
|
-
|
|
99
|
-
if VideoProcessor is None:
|
|
100
|
-
click.echo(
|
|
101
|
-
click.style(
|
|
102
|
-
"❌ Video processing modules not available. Please install required dependencies.",
|
|
103
|
-
fg="red",
|
|
104
|
-
)
|
|
105
|
-
)
|
|
106
|
-
return
|
|
107
|
-
|
|
108
|
-
if method == "intelligent":
|
|
109
|
-
if IntelligentVideoProcessor is None:
|
|
110
|
-
click.echo(
|
|
111
|
-
click.style(
|
|
112
|
-
"❌ Intelligent video processor not available. Using basic processor.",
|
|
113
|
-
fg="yellow",
|
|
114
|
-
)
|
|
115
|
-
)
|
|
116
|
-
processor = EnhancedVideoProcessor() if EnhancedVideoProcessor else VideoProcessor()
|
|
117
|
-
else:
|
|
118
|
-
processor = IntelligentVideoProcessor()
|
|
119
|
-
|
|
120
|
-
if dry_run:
|
|
121
|
-
click.echo(
|
|
122
|
-
click.style(
|
|
123
|
-
"🔍 Dry run mode - extracting frames and analyzing video only", fg="cyan"
|
|
124
|
-
)
|
|
125
|
-
)
|
|
126
|
-
frame_paths = processor.extract_frames(input_video, fps)
|
|
127
|
-
click.echo(
|
|
128
|
-
click.style(
|
|
129
|
-
f"✅ Dry run complete. Extracted {len(frame_paths)} frames to {processor.temp_dir}",
|
|
130
|
-
fg="green",
|
|
131
|
-
)
|
|
132
|
-
)
|
|
133
|
-
click.echo(click.style(f"📁 Temp directory: {processor.temp_dir}", fg="blue"))
|
|
134
|
-
if hasattr(processor, "video_info"):
|
|
135
|
-
click.echo(click.style(f"🎬 Video info: {processor.video_info}", fg="blue"))
|
|
136
|
-
return
|
|
137
|
-
|
|
138
|
-
if hasattr(processor, "remove_overlay_from_video_intelligent"):
|
|
139
|
-
result = processor.remove_overlay_from_video_intelligent(
|
|
140
|
-
video_path=input_video, output_path=output, fps=fps, context_window=context
|
|
141
|
-
)
|
|
142
|
-
else:
|
|
143
|
-
result = processor.remove_overlay_from_video(
|
|
144
|
-
video_path=input_video, output_path=output, fps=fps
|
|
145
|
-
)
|
|
146
|
-
else:
|
|
147
|
-
processor = EnhancedVideoProcessor() if EnhancedVideoProcessor else VideoProcessor()
|
|
148
|
-
if hasattr(processor, "remove_overlay_from_video"):
|
|
149
|
-
result = processor.remove_overlay_from_video(
|
|
150
|
-
video_path=input_video, output_path=output, fps=fps
|
|
151
|
-
)
|
|
152
|
-
else:
|
|
153
|
-
click.echo(
|
|
154
|
-
click.style("❌ Overlay removal not available with current processor.", fg="red")
|
|
155
|
-
)
|
|
156
|
-
return
|
|
157
|
-
|
|
158
|
-
click.echo(f"Video processed successfully: {result}")
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
@videos.command()
|
|
162
|
-
@click.argument("input_video", type=click.Path(exists=True))
|
|
163
|
-
@click.option("--output", "-o", type=click.Path(), help="Output directory path")
|
|
164
|
-
@click.option("--fps", "-f", default=8, help="Frame extraction rate (default: 8)")
|
|
165
|
-
def extract_frames(input_video, output, fps):
|
|
166
|
-
"""Extract frames from video to timestamped directory."""
|
|
167
|
-
|
|
168
|
-
try:
|
|
169
|
-
video_module = _get_video_module()
|
|
170
|
-
except ImportError as e:
|
|
171
|
-
click.echo(click.style(f"❌ Video processing modules not available: {e}", fg="red"))
|
|
172
|
-
return
|
|
173
|
-
|
|
174
|
-
VideoProcessor = video_module["VideoProcessor"]
|
|
175
|
-
|
|
176
|
-
if VideoProcessor is None:
|
|
177
|
-
click.echo(
|
|
178
|
-
click.style(
|
|
179
|
-
"❌ Video processing modules not available. Please install required dependencies.",
|
|
180
|
-
fg="red",
|
|
181
|
-
)
|
|
182
|
-
)
|
|
183
|
-
return
|
|
184
|
-
|
|
185
|
-
processor = VideoProcessor()
|
|
186
|
-
|
|
187
|
-
result_dir = processor.extract_frames_to_directory(
|
|
188
|
-
video_path=input_video, output_dir=output, fps=fps
|
|
189
|
-
)
|
|
190
|
-
|
|
191
|
-
click.echo(click.style(f"✅ Frame extraction complete!", fg="bright_green"))
|
|
192
|
-
click.echo(click.style(f"📁 Output directory: {result_dir}", fg="green"))
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
@videos.command()
|
|
196
|
-
@click.argument("frame_directory", type=click.Path(exists=True))
|
|
197
|
-
@click.option("--output", "-o", type=click.Path(), help="Output video path")
|
|
198
|
-
@click.option("--fps", "-f", default=30.0, help="Output video FPS (default: 30)")
|
|
199
|
-
def frames_to_video(frame_directory, output, fps):
|
|
200
|
-
"""Convert frames back to video."""
|
|
201
|
-
|
|
202
|
-
try:
|
|
203
|
-
video_module = _get_video_module()
|
|
204
|
-
except ImportError as e:
|
|
205
|
-
click.echo(click.style(f"❌ Video processing modules not available: {e}", fg="red"))
|
|
206
|
-
return
|
|
207
|
-
|
|
208
|
-
VideoProcessor = video_module["VideoProcessor"]
|
|
209
|
-
|
|
210
|
-
if VideoProcessor is None:
|
|
211
|
-
click.echo(
|
|
212
|
-
click.style(
|
|
213
|
-
"❌ Video processing modules not available. Please install required dependencies.",
|
|
214
|
-
fg="red",
|
|
215
|
-
)
|
|
216
|
-
)
|
|
217
|
-
return
|
|
218
|
-
|
|
219
|
-
processor = VideoProcessor()
|
|
220
|
-
|
|
221
|
-
# Get all frame files from directory
|
|
222
|
-
frame_dir = Path(frame_directory)
|
|
223
|
-
frame_files = sorted([f for f in frame_dir.glob("*.png")])
|
|
224
|
-
|
|
225
|
-
if not frame_files:
|
|
226
|
-
click.echo(click.style("❌ No PNG frames found in directory", fg="red"))
|
|
227
|
-
return
|
|
228
|
-
|
|
229
|
-
if output is None:
|
|
230
|
-
output = str(frame_dir.parent / f"{frame_dir.name}_reconstructed.mp4")
|
|
231
|
-
|
|
232
|
-
frame_paths = [str(f) for f in frame_files]
|
|
233
|
-
|
|
234
|
-
# Set video info manually for frames_to_video
|
|
235
|
-
processor.video_info = {"original_fps": fps}
|
|
236
|
-
|
|
237
|
-
result = processor.frames_to_video(frame_paths, output, fps)
|
|
238
|
-
click.echo(f"Video created successfully: {result}")
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
if __name__ == "__main__":
|
|
242
|
-
videos()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|