pyvegh 0.7.0__cp310-abi3-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyvegh-0.7.0.dist-info/METADATA +241 -0
- pyvegh-0.7.0.dist-info/RECORD +9 -0
- pyvegh-0.7.0.dist-info/WHEEL +4 -0
- pyvegh-0.7.0.dist-info/entry_points.txt +3 -0
- pyvegh-0.7.0.dist-info/licenses/LICENSE +21 -0
- vegh/__init__.py +24 -0
- vegh/_core.pyd +0 -0
- vegh/analytics.py +621 -0
- vegh/cli.py +1588 -0
vegh/cli.py
ADDED
|
@@ -0,0 +1,1588 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
import time
|
|
3
|
+
import json
|
|
4
|
+
import requests
|
|
5
|
+
import math
|
|
6
|
+
import re
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import subprocess
|
|
10
|
+
import shutil
|
|
11
|
+
import hashlib
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Optional, List, Tuple, Dict
|
|
16
|
+
from rich.console import Console
|
|
17
|
+
from rich.table import Table
|
|
18
|
+
from rich.panel import Panel
|
|
19
|
+
from rich.tree import Tree
|
|
20
|
+
from rich.prompt import Prompt, Confirm
|
|
21
|
+
|
|
22
|
+
# Try to import package version metadata (Modern Pythonic way)
|
|
23
|
+
try:
|
|
24
|
+
from importlib.metadata import version as get_package_version, PackageNotFoundError
|
|
25
|
+
except ImportError:
|
|
26
|
+
# Fallback for older environments or odd setups
|
|
27
|
+
get_package_version = None
|
|
28
|
+
PackageNotFoundError = Exception
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Import core functionality
|
|
32
|
+
try:
|
|
33
|
+
from ._core import (
|
|
34
|
+
create_snap,
|
|
35
|
+
dry_run_snap,
|
|
36
|
+
restore_snap,
|
|
37
|
+
check_integrity,
|
|
38
|
+
list_files,
|
|
39
|
+
get_metadata,
|
|
40
|
+
count_locs,
|
|
41
|
+
scan_locs_dir,
|
|
42
|
+
cat_file,
|
|
43
|
+
list_files_details,
|
|
44
|
+
get_context_xml,
|
|
45
|
+
search_snap,
|
|
46
|
+
)
|
|
47
|
+
except ImportError:
|
|
48
|
+
print("Error: Rust core missing. Run 'maturin develop'!")
|
|
49
|
+
exit(1)
|
|
50
|
+
|
|
51
|
+
# Import Analytics module
|
|
52
|
+
try:
|
|
53
|
+
from .analytics import render_dashboard, scan_sloc, calculate_sloc
|
|
54
|
+
except ImportError:
|
|
55
|
+
render_dashboard = None
|
|
56
|
+
scan_sloc = None
|
|
57
|
+
calculate_sloc = None
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
# --- APP INITIALIZATION ---
|
|
61
|
+
|
|
62
|
+
# Define context settings to enable '-h' alongside '--help'
|
|
63
|
+
CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]}
|
|
64
|
+
|
|
65
|
+
app = typer.Typer(
|
|
66
|
+
name="vegh",
|
|
67
|
+
help="Vegh (Python Edition) - The Snapshot Tool",
|
|
68
|
+
add_completion=False,
|
|
69
|
+
no_args_is_help=True,
|
|
70
|
+
rich_markup_mode="rich",
|
|
71
|
+
context_settings=CONTEXT_SETTINGS, # Enable -h flag
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# Sub-app for configuration commands
|
|
75
|
+
config_app = typer.Typer(
|
|
76
|
+
help="Manage configuration settings (Server, Repo behavior, etc.)",
|
|
77
|
+
context_settings=CONTEXT_SETTINGS, # Enable -h flag for sub-commands too
|
|
78
|
+
)
|
|
79
|
+
app.add_typer(config_app, name="config")
|
|
80
|
+
|
|
81
|
+
console = Console()
|
|
82
|
+
|
|
83
|
+
# --- PATH CONSTANTS ---
|
|
84
|
+
VEGH_ROOT = Path.home() / ".vegh"
|
|
85
|
+
CONFIG_FILE = VEGH_ROOT / "config.json"
|
|
86
|
+
CACHE_ROOT = VEGH_ROOT / "cache"
|
|
87
|
+
REPO_CACHE_DIR = CACHE_ROOT / "repos"
|
|
88
|
+
HOOKS_FILE = ".veghhooks.json"
|
|
89
|
+
|
|
90
|
+
# Constants
|
|
91
|
+
CHUNK_THRESHOLD = 100 * 1024 * 1024 # 100MB
|
|
92
|
+
CHUNK_SIZE = 10 * 1024 * 1024 # 10MB
|
|
93
|
+
CONCURRENT_WORKERS = 4
|
|
94
|
+
SENSITIVE_PATTERNS = [
|
|
95
|
+
r"\.env(\..+)?$",
|
|
96
|
+
r".*id_rsa.*",
|
|
97
|
+
r".*\.pem$",
|
|
98
|
+
r".*\.key$",
|
|
99
|
+
r"credentials\.json",
|
|
100
|
+
r"secrets\..*",
|
|
101
|
+
]
|
|
102
|
+
|
|
103
|
+
# Noise Patterns for 'vegh prompt --clean'
|
|
104
|
+
# These are files that are technically part of the project but add noise/token cost for LLMs
|
|
105
|
+
NOISE_PATTERNS = [
|
|
106
|
+
# Lock files
|
|
107
|
+
"package-lock.json",
|
|
108
|
+
"yarn.lock",
|
|
109
|
+
"pnpm-lock.yaml",
|
|
110
|
+
"bun.lockb",
|
|
111
|
+
"Cargo.lock",
|
|
112
|
+
"uv.lock",
|
|
113
|
+
"poetry.lock",
|
|
114
|
+
"Gemfile.lock",
|
|
115
|
+
"composer.lock",
|
|
116
|
+
"mix.lock",
|
|
117
|
+
"go.sum",
|
|
118
|
+
|
|
119
|
+
# Build artifacts / Dist
|
|
120
|
+
"*.min.js",
|
|
121
|
+
"*.min.css",
|
|
122
|
+
"*.map",
|
|
123
|
+
"dist/",
|
|
124
|
+
"build/",
|
|
125
|
+
"target/",
|
|
126
|
+
"out/",
|
|
127
|
+
|
|
128
|
+
# Logs & Temp
|
|
129
|
+
"*.log",
|
|
130
|
+
"*.tmp",
|
|
131
|
+
".DS_Store",
|
|
132
|
+
|
|
133
|
+
# Sensitive (Double check against regex later, but filter file names here)
|
|
134
|
+
".env",
|
|
135
|
+
".env.*",
|
|
136
|
+
"*.pem",
|
|
137
|
+
"*.key",
|
|
138
|
+
"id_rsa",
|
|
139
|
+
"*.p12",
|
|
140
|
+
|
|
141
|
+
# Common Binary Assets (If not ignored by git)
|
|
142
|
+
"*.png", "*.jpg", "*.jpeg", "*.gif", "*.ico", "*.svg",
|
|
143
|
+
"*.pdf", "*.zip", "*.tar.gz", "*.rar", "*.7z",
|
|
144
|
+
"*.exe", "*.dll", "*.so", "*.dylib", "*.bin",
|
|
145
|
+
"*.sqlite", "*.db", "*.sqlite3",
|
|
146
|
+
"*.mp4", "*.mp3", "*.mov", "*.avi", "*.wmv",
|
|
147
|
+
"*.woff", "*.woff2", "*.ttf", "*.eot",
|
|
148
|
+
"*.flac", "*.aac", "*.ogg", "*.opus",
|
|
149
|
+
"*.m4a", "*.webm", "*.vegh",
|
|
150
|
+
|
|
151
|
+
# Other (Unnecessary for code understanding)
|
|
152
|
+
"LICENSE", "LICENSE.txt", "README.md", "README", "CHANGELOG", "CHANGELOG.md",
|
|
153
|
+
"CONTRIBUTING.md", "CODE_OF_CONDUCT.md", "SECURITY.md",
|
|
154
|
+
".vscode/", ".idea/"
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
# --- VERSION CALLBACK ---
|
|
158
|
+
|
|
159
|
+
def version_callback(value: bool):
|
|
160
|
+
"""
|
|
161
|
+
Callback function to handle version flags (-v, --version).
|
|
162
|
+
It fetches the installed package version or falls back to 'dev'.
|
|
163
|
+
"""
|
|
164
|
+
if value:
|
|
165
|
+
try:
|
|
166
|
+
# Attempt to get the installed version of pyvegh
|
|
167
|
+
v = get_package_version("pyvegh") if get_package_version else "dev"
|
|
168
|
+
except PackageNotFoundError:
|
|
169
|
+
v = "dev-build"
|
|
170
|
+
|
|
171
|
+
console.print(f"PyVegh CLI Version: [bold green]{v}[/bold green]")
|
|
172
|
+
raise typer.Exit()
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
@app.callback()
|
|
176
|
+
def main(
|
|
177
|
+
ctx: typer.Context,
|
|
178
|
+
version: Optional[bool] = typer.Option(
|
|
179
|
+
None,
|
|
180
|
+
"--version",
|
|
181
|
+
"-v",
|
|
182
|
+
callback=version_callback,
|
|
183
|
+
is_eager=True, # Process this before other commands
|
|
184
|
+
help="Show the application version and exit."
|
|
185
|
+
),
|
|
186
|
+
):
|
|
187
|
+
"""
|
|
188
|
+
Vegh: The lightning-fast snapshot and analytics tool.
|
|
189
|
+
"""
|
|
190
|
+
pass
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
# --- HELPER FUNCTIONS ---
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def load_config() -> Dict:
|
|
197
|
+
"""Load configuration from ~/.vegh/config.json"""
|
|
198
|
+
if CONFIG_FILE.exists():
|
|
199
|
+
try:
|
|
200
|
+
return json.loads(CONFIG_FILE.read_text())
|
|
201
|
+
except:
|
|
202
|
+
return {}
|
|
203
|
+
return {}
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def save_config(config: Dict):
|
|
207
|
+
"""Save configuration to ~/.vegh/config.json"""
|
|
208
|
+
if not VEGH_ROOT.exists():
|
|
209
|
+
VEGH_ROOT.mkdir(parents=True)
|
|
210
|
+
CONFIG_FILE.write_text(json.dumps(config, indent=2))
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def format_bytes(size):
|
|
214
|
+
power = 2**10
|
|
215
|
+
n = 0
|
|
216
|
+
power_labels = {0: "", 1: "K", 2: "M", 3: "G", 4: "T"}
|
|
217
|
+
while size > power:
|
|
218
|
+
size /= power
|
|
219
|
+
n += 1
|
|
220
|
+
return f"{size:.2f} {power_labels[n]}B"
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def get_dir_size(path: Path) -> int:
|
|
224
|
+
"""Calculate total size of a directory."""
|
|
225
|
+
total = 0
|
|
226
|
+
try:
|
|
227
|
+
for entry in path.rglob("*"):
|
|
228
|
+
if entry.is_file():
|
|
229
|
+
total += entry.stat().st_size
|
|
230
|
+
except:
|
|
231
|
+
pass
|
|
232
|
+
return total
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def build_tree(path_list: List[str], root_name: str) -> Tree:
|
|
236
|
+
tree = Tree(f"[bold cyan][ROOT] {root_name}[/bold cyan]")
|
|
237
|
+
folder_map = {"": tree}
|
|
238
|
+
|
|
239
|
+
for path in sorted(path_list):
|
|
240
|
+
parts = Path(path).parts
|
|
241
|
+
parent_path = ""
|
|
242
|
+
for i, part in enumerate(parts):
|
|
243
|
+
current_path = os.path.join(parent_path, part)
|
|
244
|
+
is_file = i == len(parts) - 1
|
|
245
|
+
|
|
246
|
+
if parent_path not in folder_map:
|
|
247
|
+
parent_node = tree
|
|
248
|
+
else:
|
|
249
|
+
parent_node = folder_map[parent_path]
|
|
250
|
+
|
|
251
|
+
if current_path not in folder_map:
|
|
252
|
+
if is_file:
|
|
253
|
+
if part == ".vegh.json":
|
|
254
|
+
parent_node.add(f"[dim]{part} (Meta)[/dim]")
|
|
255
|
+
else:
|
|
256
|
+
parent_node.add(f"[green]{part}[/green]")
|
|
257
|
+
else:
|
|
258
|
+
new_branch = parent_node.add(f"[bold blue]+ {part}[/bold blue]")
|
|
259
|
+
folder_map[current_path] = new_branch
|
|
260
|
+
parent_path = current_path
|
|
261
|
+
return tree
|
|
262
|
+
|
|
263
|
+
# --- NATIVE CLIPBOARD HELPER ---
|
|
264
|
+
def _copy_to_clipboard_native(text: str) -> bool:
|
|
265
|
+
"""
|
|
266
|
+
Copies text to clipboard using system tools (Zero-Dependency).
|
|
267
|
+
Supports macOS (pbcopy), Windows (clip), Linux (xclip/wl-copy).
|
|
268
|
+
"""
|
|
269
|
+
platform = sys.platform
|
|
270
|
+
try:
|
|
271
|
+
if platform == "darwin": # macOS
|
|
272
|
+
subprocess.run("pbcopy", input=text.encode("utf-8"), check=True)
|
|
273
|
+
return True
|
|
274
|
+
elif platform == "win32": # Windows
|
|
275
|
+
# 'clip' expects CRLF and UTF-16 le sometimes, but usually system encoding works for pipes
|
|
276
|
+
# Using strip() to avoid adding extra newline that 'clip' might add
|
|
277
|
+
subprocess.run("clip", input=text.encode("utf-16"), check=True)
|
|
278
|
+
return True
|
|
279
|
+
elif platform.startswith("linux"): # Linux (Wayland or X11)
|
|
280
|
+
# Try Wayland first
|
|
281
|
+
if shutil.which("wl-copy"):
|
|
282
|
+
subprocess.run("wl-copy", input=text.encode("utf-8"), check=True)
|
|
283
|
+
return True
|
|
284
|
+
# Try X11
|
|
285
|
+
elif shutil.which("xclip"):
|
|
286
|
+
subprocess.run(["xclip", "-selection", "clipboard"], input=text.encode("utf-8"), check=True)
|
|
287
|
+
return True
|
|
288
|
+
elif shutil.which("xsel"):
|
|
289
|
+
subprocess.run(["xsel", "--clipboard", "--input"], input=text.encode("utf-8"), check=True)
|
|
290
|
+
return True
|
|
291
|
+
else:
|
|
292
|
+
return False
|
|
293
|
+
except Exception:
|
|
294
|
+
return False
|
|
295
|
+
return False
|
|
296
|
+
|
|
297
|
+
# --- REPO MANAGEMENT ---
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def ensure_repo(
|
|
301
|
+
url: str, branch: Optional[str] = None, offline_flag: bool = False
|
|
302
|
+
) -> Tuple[Path, str]:
|
|
303
|
+
"""
|
|
304
|
+
Ensures a git repo is cached and up-to-date.
|
|
305
|
+
Returns (Path to cached repo, Friendly Name).
|
|
306
|
+
"""
|
|
307
|
+
if not shutil.which("git"):
|
|
308
|
+
console.print("[bold red]Error:[/bold red] Git is not installed.")
|
|
309
|
+
raise typer.Exit(1)
|
|
310
|
+
|
|
311
|
+
# 1. Prepare Cache Directory
|
|
312
|
+
if not REPO_CACHE_DIR.exists():
|
|
313
|
+
REPO_CACHE_DIR.mkdir(parents=True)
|
|
314
|
+
|
|
315
|
+
# 2. Check Global Config for "Always Offline" preference
|
|
316
|
+
cfg = load_config()
|
|
317
|
+
always_offline = cfg.get("repo_offline", False)
|
|
318
|
+
is_offline = offline_flag or always_offline
|
|
319
|
+
|
|
320
|
+
# 3. Identify Repo (Hash URL to avoid filesystem issues)
|
|
321
|
+
repo_hash = hashlib.md5(url.encode("utf-8")).hexdigest()
|
|
322
|
+
repo_path = REPO_CACHE_DIR / repo_hash
|
|
323
|
+
friendly_name = url.split("/")[-1].replace(".git", "")
|
|
324
|
+
|
|
325
|
+
# 4. Smart Sync
|
|
326
|
+
if is_offline and repo_path.exists():
|
|
327
|
+
reason = "CLI Flag" if offline_flag else "Global Config"
|
|
328
|
+
console.print(
|
|
329
|
+
f"[bold yellow]⚡ Using cached {friendly_name} (Offline Mode: {reason})[/bold yellow]"
|
|
330
|
+
)
|
|
331
|
+
return repo_path, friendly_name
|
|
332
|
+
|
|
333
|
+
action = "Cloning" if not repo_path.exists() else "Updating"
|
|
334
|
+
|
|
335
|
+
try:
|
|
336
|
+
if not repo_path.exists():
|
|
337
|
+
# A. First Clone (Shallow)
|
|
338
|
+
if is_offline:
|
|
339
|
+
console.print(
|
|
340
|
+
"[dim]Cache miss. Connecting to network to clone...[/dim]"
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
console.print(
|
|
344
|
+
f"[bold cyan]🚀 {action} {friendly_name} (fresh cache)...[/bold cyan]"
|
|
345
|
+
)
|
|
346
|
+
cmd = ["git", "clone", "--depth", "1", "--single-branch"]
|
|
347
|
+
if branch:
|
|
348
|
+
cmd.extend(["--branch", branch])
|
|
349
|
+
cmd.extend([url, str(repo_path)])
|
|
350
|
+
|
|
351
|
+
subprocess.run(
|
|
352
|
+
cmd,
|
|
353
|
+
check=True,
|
|
354
|
+
stdout=subprocess.DEVNULL,
|
|
355
|
+
stderr=subprocess.PIPE,
|
|
356
|
+
timeout=300,
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
else:
|
|
360
|
+
# B. Update Existing (Fetch + Reset)
|
|
361
|
+
console.print(
|
|
362
|
+
f"[bold cyan]🔄 {action} {friendly_name} (checking remote)...[/bold cyan]"
|
|
363
|
+
)
|
|
364
|
+
# Safety: Ensure remote URL matches
|
|
365
|
+
subprocess.run(
|
|
366
|
+
["git", "remote", "set-url", "origin", url],
|
|
367
|
+
cwd=repo_path,
|
|
368
|
+
check=True,
|
|
369
|
+
stderr=subprocess.PIPE,
|
|
370
|
+
)
|
|
371
|
+
# Fetch latest delta
|
|
372
|
+
fetch_cmd = ["git", "fetch", "--depth", "1", "origin"]
|
|
373
|
+
if branch:
|
|
374
|
+
fetch_cmd.append(branch)
|
|
375
|
+
subprocess.run(
|
|
376
|
+
fetch_cmd,
|
|
377
|
+
cwd=repo_path,
|
|
378
|
+
check=True,
|
|
379
|
+
stdout=subprocess.DEVNULL,
|
|
380
|
+
stderr=subprocess.PIPE,
|
|
381
|
+
timeout=120,
|
|
382
|
+
)
|
|
383
|
+
# Reset to match remote
|
|
384
|
+
target_ref = f"origin/{branch}" if branch else "origin/HEAD"
|
|
385
|
+
subprocess.run(
|
|
386
|
+
["git", "reset", "--hard", target_ref],
|
|
387
|
+
cwd=repo_path,
|
|
388
|
+
check=True,
|
|
389
|
+
stdout=subprocess.DEVNULL,
|
|
390
|
+
stderr=subprocess.PIPE,
|
|
391
|
+
)
|
|
392
|
+
# Cleanup artifacts
|
|
393
|
+
subprocess.run(
|
|
394
|
+
["git", "clean", "-fdx"],
|
|
395
|
+
cwd=repo_path,
|
|
396
|
+
stdout=subprocess.DEVNULL,
|
|
397
|
+
stderr=subprocess.DEVNULL,
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
except subprocess.TimeoutExpired:
|
|
401
|
+
console.print(
|
|
402
|
+
"[bold red]⏳ Timeout![/bold red] Repository operation took too long."
|
|
403
|
+
)
|
|
404
|
+
raise typer.Exit(1)
|
|
405
|
+
except subprocess.CalledProcessError as e:
|
|
406
|
+
err = e.stderr.decode().strip() if e.stderr else str(e)
|
|
407
|
+
console.print(f"[bold red]✘ Git Error:[/bold red] {err}")
|
|
408
|
+
if repo_path.exists():
|
|
409
|
+
console.print(
|
|
410
|
+
"[yellow]Tip: Run 'vegh clean' if the cache is corrupted.[/yellow]"
|
|
411
|
+
)
|
|
412
|
+
raise typer.Exit(1)
|
|
413
|
+
|
|
414
|
+
return repo_path, friendly_name
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
# --- HOOKS SYSTEM ---
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
def load_hooks(project_path: Path) -> Dict[str, List[str]]:
|
|
421
|
+
hook_path = project_path / HOOKS_FILE
|
|
422
|
+
if hook_path.exists():
|
|
423
|
+
try:
|
|
424
|
+
data = json.loads(hook_path.read_text(encoding="utf-8"))
|
|
425
|
+
return data.get("hooks", {})
|
|
426
|
+
except Exception as e:
|
|
427
|
+
console.print(f"[yellow][WARN] Failed to parse {HOOKS_FILE}: {e}[/yellow]")
|
|
428
|
+
return {}
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
def execute_hooks(commands: List[str], hook_name: str) -> bool:
|
|
432
|
+
if not commands:
|
|
433
|
+
return True
|
|
434
|
+
console.print(f"[bold magenta]>>> HOOK: {hook_name}[/bold magenta]")
|
|
435
|
+
env = os.environ.copy()
|
|
436
|
+
env["PYTHONIOENCODING"] = "utf-8"
|
|
437
|
+
for cmd in commands:
|
|
438
|
+
console.print(f" [dim]$ {cmd}[/dim]")
|
|
439
|
+
# Windows encoding fix
|
|
440
|
+
final_cmd = f"chcp 65001 >NUL && {cmd}" if os.name == "nt" else cmd
|
|
441
|
+
try:
|
|
442
|
+
sys.stdout.flush()
|
|
443
|
+
result = subprocess.run(
|
|
444
|
+
final_cmd, shell=True, capture_output=False, env=env
|
|
445
|
+
)
|
|
446
|
+
if result.returncode != 0:
|
|
447
|
+
console.print(
|
|
448
|
+
f"\n[bold red][ERR] Failed code {result.returncode}[/bold red]"
|
|
449
|
+
)
|
|
450
|
+
return False
|
|
451
|
+
except Exception as e:
|
|
452
|
+
console.print(f"\n[bold red][ERR] Error:[/bold red] {e}")
|
|
453
|
+
return False
|
|
454
|
+
console.print(f"[green][OK] {hook_name} passed.[/green]")
|
|
455
|
+
return True
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
# --- CONFIG COMMANDS ---
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
@config_app.command("send")
|
|
462
|
+
def config_send(
|
|
463
|
+
url: Optional[str] = typer.Option(None, help="Set default upload URL."),
|
|
464
|
+
auth: Optional[str] = typer.Option(None, help="Set default auth token."),
|
|
465
|
+
):
|
|
466
|
+
"""Configure Server/Upload settings."""
|
|
467
|
+
cfg = load_config()
|
|
468
|
+
|
|
469
|
+
console.print("[bold cyan]📡 Server Configuration[/bold cyan]")
|
|
470
|
+
if not url and not auth:
|
|
471
|
+
cfg["url"] = Prompt.ask("Default Server URL", default=cfg.get("url", ""))
|
|
472
|
+
cfg["auth"] = Prompt.ask(
|
|
473
|
+
"Default Auth Token", default=cfg.get("auth", ""), password=True
|
|
474
|
+
)
|
|
475
|
+
else:
|
|
476
|
+
if url:
|
|
477
|
+
cfg["url"] = url
|
|
478
|
+
if auth:
|
|
479
|
+
cfg["auth"] = auth
|
|
480
|
+
|
|
481
|
+
save_config(cfg)
|
|
482
|
+
console.print(f"[green][OK] Settings saved to {CONFIG_FILE}[/green]")
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
@config_app.command("repo")
|
|
486
|
+
def config_repo(
|
|
487
|
+
offline: Optional[bool] = typer.Option(
|
|
488
|
+
None, "--offline/--online", help="Set default offline mode."
|
|
489
|
+
),
|
|
490
|
+
):
|
|
491
|
+
"""Configure Git Repository behavior."""
|
|
492
|
+
cfg = load_config()
|
|
493
|
+
console.print("[bold cyan]📦 Repository Cache Configuration[/bold cyan]")
|
|
494
|
+
|
|
495
|
+
if offline is None:
|
|
496
|
+
current_setting = cfg.get("repo_offline", False)
|
|
497
|
+
offline = Confirm.ask(
|
|
498
|
+
"Always run in Offline Mode if cache exists? (Saves bandwidth)",
|
|
499
|
+
default=current_setting,
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
cfg["repo_offline"] = offline
|
|
503
|
+
save_config(cfg)
|
|
504
|
+
|
|
505
|
+
status = "OFFLINE (Fast)" if offline else "ONLINE (Fresh)"
|
|
506
|
+
console.print(
|
|
507
|
+
f"[green][OK] Repo default mode set to: [bold]{status}[/bold][/green]"
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
@config_app.command("list")
|
|
512
|
+
def config_list():
|
|
513
|
+
"""List current configuration."""
|
|
514
|
+
cfg = load_config()
|
|
515
|
+
console.print_json(data=cfg)
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
@config_app.command("reset")
|
|
519
|
+
def config_reset(
|
|
520
|
+
force: bool = typer.Option(False, "--force", "-f", help="Skip confirmation"),
|
|
521
|
+
):
|
|
522
|
+
"""Reset configuration to defaults."""
|
|
523
|
+
if not force:
|
|
524
|
+
if not Confirm.ask("Are you sure you want to reset all configuration?"):
|
|
525
|
+
raise typer.Abort()
|
|
526
|
+
|
|
527
|
+
save_config({})
|
|
528
|
+
console.print("[green]Configuration reset.[/green]")
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
# --- MAIN COMMANDS ---
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
@app.command()
|
|
535
|
+
def prune(
|
|
536
|
+
target_dir: Path = typer.Argument(
|
|
537
|
+
Path("."), help="Directory to scan for snapshots"
|
|
538
|
+
),
|
|
539
|
+
keep: int = typer.Option(
|
|
540
|
+
5, "--keep", "-k", help="Number of recent snapshots to keep"
|
|
541
|
+
),
|
|
542
|
+
force: bool = typer.Option(False, "--force", "-f", help="Skip confirmation"),
|
|
543
|
+
):
|
|
544
|
+
"""Clean up old snapshots, keeping only the most recent ones."""
|
|
545
|
+
if not target_dir.exists():
|
|
546
|
+
console.print(f"[red]Directory '{target_dir}' not found.[/red]")
|
|
547
|
+
raise typer.Exit(1)
|
|
548
|
+
|
|
549
|
+
snapshots = sorted(
|
|
550
|
+
target_dir.glob("*.vegh"), key=lambda f: f.stat().st_mtime, reverse=True
|
|
551
|
+
)
|
|
552
|
+
|
|
553
|
+
if len(snapshots) <= keep:
|
|
554
|
+
console.print(
|
|
555
|
+
f"[green]No cleanup needed. Found {len(snapshots)} snapshots (Keep: {keep}).[/green]"
|
|
556
|
+
)
|
|
557
|
+
return
|
|
558
|
+
|
|
559
|
+
keep_list = snapshots[:keep]
|
|
560
|
+
delete_list = snapshots[keep:]
|
|
561
|
+
|
|
562
|
+
console.print(
|
|
563
|
+
f"[bold cyan]Found {len(snapshots)} snapshots. Keeping {len(keep_list)} most recent.[/bold cyan]"
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
table = Table(title="Snapshots to Delete")
|
|
567
|
+
table.add_column("File", style="red")
|
|
568
|
+
table.add_column("Size", style="yellow")
|
|
569
|
+
table.add_column("Modified", style="dim")
|
|
570
|
+
|
|
571
|
+
total_size = 0
|
|
572
|
+
for s in delete_list:
|
|
573
|
+
size = s.stat().st_size
|
|
574
|
+
total_size += size
|
|
575
|
+
mtime = datetime.fromtimestamp(s.stat().st_mtime).strftime("%Y-%m-%d %H:%M:%S")
|
|
576
|
+
table.add_row(s.name, format_bytes(size), mtime)
|
|
577
|
+
|
|
578
|
+
console.print(table)
|
|
579
|
+
console.print(
|
|
580
|
+
f"[bold]Total space to free:[/bold] [green]{format_bytes(total_size)}[/green]"
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
if not force:
|
|
584
|
+
if not Confirm.ask(
|
|
585
|
+
f"Are you sure you want to delete {len(delete_list)} snapshots?"
|
|
586
|
+
):
|
|
587
|
+
console.print("[yellow]Aborted.[/yellow]")
|
|
588
|
+
raise typer.Abort()
|
|
589
|
+
|
|
590
|
+
with console.status("[red]Pruning...[/red]", spinner="bouncingBall"):
|
|
591
|
+
deleted_count = 0
|
|
592
|
+
for s in delete_list:
|
|
593
|
+
try:
|
|
594
|
+
s.unlink()
|
|
595
|
+
console.print(f"[dim]Deleted: {s.name}[/dim]")
|
|
596
|
+
deleted_count += 1
|
|
597
|
+
except Exception as e:
|
|
598
|
+
console.print(f"[red]Failed to delete {s.name}: {e}[/red]")
|
|
599
|
+
|
|
600
|
+
console.print(
|
|
601
|
+
f"[bold green]Prune complete. Deleted {deleted_count} files.[/bold green]"
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
|
|
605
|
+
@app.command()
|
|
606
|
+
def snap(
|
|
607
|
+
path: Optional[Path] = typer.Argument(
|
|
608
|
+
None, help="Source directory (Required unless --repo used)"
|
|
609
|
+
),
|
|
610
|
+
repo: Optional[str] = typer.Option(
|
|
611
|
+
None, "--repo", help="Snapshot a remote Git repo"
|
|
612
|
+
),
|
|
613
|
+
branch: Optional[str] = typer.Option(
|
|
614
|
+
None, "--branch", "-b", help="Branch for remote repo"
|
|
615
|
+
),
|
|
616
|
+
offline: bool = typer.Option(
|
|
617
|
+
False, "--offline", help="Force offline mode (overrides config)"
|
|
618
|
+
),
|
|
619
|
+
output: Optional[Path] = typer.Option(None, "--output", "-o"),
|
|
620
|
+
level: int = typer.Option(3, "--level", "-l", help="Compression level (1-21)"),
|
|
621
|
+
comment: Optional[str] = typer.Option(
|
|
622
|
+
None, "--comment", "-c", help="Metadata comment"
|
|
623
|
+
),
|
|
624
|
+
include: Optional[List[str]] = typer.Option(
|
|
625
|
+
None, "--include", "-i", help="Include patterns"
|
|
626
|
+
),
|
|
627
|
+
exclude: Optional[List[str]] = typer.Option(
|
|
628
|
+
None, "--exclude", "-e", help="Exclude patterns"
|
|
629
|
+
),
|
|
630
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Simulate only"),
|
|
631
|
+
skip_hooks: bool = typer.Option(False, "--skip-hooks", help="Bypass hooks"),
|
|
632
|
+
quiet: bool = typer.Option(False, "--quiet", "-q", help="Use minimal output (hides progress bar)"),
|
|
633
|
+
):
|
|
634
|
+
"""Create a snapshot (.vegh) from local folder OR remote repo."""
|
|
635
|
+
|
|
636
|
+
# 1. Resolve Source
|
|
637
|
+
if repo:
|
|
638
|
+
source_path, friendly_name = ensure_repo(repo, branch, offline)
|
|
639
|
+
else:
|
|
640
|
+
if not path:
|
|
641
|
+
console.print("[red]Missing argument 'PATH'. Or use --repo <url>.[/red]")
|
|
642
|
+
raise typer.Exit(1)
|
|
643
|
+
if not path.exists():
|
|
644
|
+
console.print(f"[red]Path '{path}' not found.[/red]")
|
|
645
|
+
raise typer.Exit(1)
|
|
646
|
+
source_path = path
|
|
647
|
+
friendly_name = path.name
|
|
648
|
+
|
|
649
|
+
hooks = load_hooks(source_path)
|
|
650
|
+
|
|
651
|
+
# --- DRY RUN ---
|
|
652
|
+
if dry_run:
|
|
653
|
+
console.print(
|
|
654
|
+
f"[yellow][DRY-RUN] Simulating snapshot for [b]{friendly_name}[/b]...[/yellow]"
|
|
655
|
+
)
|
|
656
|
+
try:
|
|
657
|
+
results: List[Tuple[str, int]] = dry_run_snap(
|
|
658
|
+
str(source_path), include, exclude
|
|
659
|
+
)
|
|
660
|
+
except Exception as e:
|
|
661
|
+
console.print(f"[red]Simulation failed:[/red] {e}")
|
|
662
|
+
raise typer.Exit(1)
|
|
663
|
+
|
|
664
|
+
total_files = len(results)
|
|
665
|
+
total_size = sum(size for _, size in results)
|
|
666
|
+
|
|
667
|
+
console.print(f"Files: [bold]{total_files:,}[/bold]")
|
|
668
|
+
console.print(f"Size: [bold]{format_bytes(total_size)}[/bold] (uncompressed)")
|
|
669
|
+
console.print("[bold green][OK] Simulation complete.[/bold green]")
|
|
670
|
+
return
|
|
671
|
+
|
|
672
|
+
# --- REAL SNAP ---
|
|
673
|
+
if not skip_hooks:
|
|
674
|
+
if not execute_hooks(hooks.get("pre"), "pre"):
|
|
675
|
+
console.print("[bold red][ABORT] Pre-snap hooks failed.[/bold red]")
|
|
676
|
+
raise typer.Exit(1)
|
|
677
|
+
|
|
678
|
+
folder_name = friendly_name or "backup"
|
|
679
|
+
output_path = output or Path(f"{folder_name}.vegh")
|
|
680
|
+
|
|
681
|
+
console.print(
|
|
682
|
+
f"[cyan]Packing[/cyan] [b]{friendly_name}[/b] -> [b]{output_path}[/b]"
|
|
683
|
+
)
|
|
684
|
+
start = time.time()
|
|
685
|
+
|
|
686
|
+
# LOGIC UPDATE: Switch between Rust UI (default) and Python Spinner (quiet)
|
|
687
|
+
try:
|
|
688
|
+
if not quiet:
|
|
689
|
+
# Default: Let Rust handle the UI with indicatif (No Python spinner to interfere)
|
|
690
|
+
count = create_snap(
|
|
691
|
+
str(source_path),
|
|
692
|
+
str(output_path),
|
|
693
|
+
level,
|
|
694
|
+
comment,
|
|
695
|
+
include,
|
|
696
|
+
exclude,
|
|
697
|
+
no_cache=offline,
|
|
698
|
+
verbose=True, # Enable Rust Progress Bar
|
|
699
|
+
)
|
|
700
|
+
else:
|
|
701
|
+
# Quiet Mode: Use Python spinner for minimal feedback, suppress Rust UI
|
|
702
|
+
with console.status("[bold cyan]Compressing (Quiet)...[/bold cyan]", spinner="dots"):
|
|
703
|
+
count = create_snap(
|
|
704
|
+
str(source_path),
|
|
705
|
+
str(output_path),
|
|
706
|
+
level,
|
|
707
|
+
comment,
|
|
708
|
+
include,
|
|
709
|
+
exclude,
|
|
710
|
+
no_cache=offline,
|
|
711
|
+
verbose=False, # Silence Rust
|
|
712
|
+
)
|
|
713
|
+
except Exception as e:
|
|
714
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
715
|
+
raise typer.Exit(1)
|
|
716
|
+
|
|
717
|
+
dur = time.time() - start
|
|
718
|
+
size = output_path.stat().st_size
|
|
719
|
+
grid = Table.grid(padding=1)
|
|
720
|
+
grid.add_column(justify="right", style="cyan")
|
|
721
|
+
grid.add_column(style="white")
|
|
722
|
+
grid.add_row("Files:", f"[bold]{count:,}[/bold]")
|
|
723
|
+
grid.add_row("Size:", format_bytes(size))
|
|
724
|
+
grid.add_row("Time:", f"{dur:.2f}s")
|
|
725
|
+
console.print(
|
|
726
|
+
Panel(
|
|
727
|
+
grid,
|
|
728
|
+
title="[bold green]Snapshot Created[/bold green]",
|
|
729
|
+
border_style="green",
|
|
730
|
+
expand=False,
|
|
731
|
+
)
|
|
732
|
+
)
|
|
733
|
+
|
|
734
|
+
if not skip_hooks:
|
|
735
|
+
if not execute_hooks(hooks.get("post"), "post"):
|
|
736
|
+
console.print("[yellow][WARN] Post-snap hooks error.[/yellow]")
|
|
737
|
+
|
|
738
|
+
@app.command()
|
|
739
|
+
def restore(
|
|
740
|
+
file: Path = typer.Argument(..., help=".vegh file"),
|
|
741
|
+
out_dir: Path = typer.Argument(Path("."), help="Dest dir"),
|
|
742
|
+
path: Optional[List[str]] = typer.Option(
|
|
743
|
+
None, "--path", "-p", help="Partial restore"
|
|
744
|
+
),
|
|
745
|
+
flatten: bool = typer.Option(
|
|
746
|
+
False, "--flatten", help="Flatten directory structure"
|
|
747
|
+
),
|
|
748
|
+
):
|
|
749
|
+
"""Restore a snapshot."""
|
|
750
|
+
if not file.exists():
|
|
751
|
+
console.print("[red]File not found.[/red]")
|
|
752
|
+
raise typer.Exit(1)
|
|
753
|
+
with console.status("[bold cyan]Restoring...[/bold cyan]", spinner="dots"):
|
|
754
|
+
try:
|
|
755
|
+
restore_snap(str(file), str(out_dir), path, flatten)
|
|
756
|
+
except Exception as e:
|
|
757
|
+
console.print(f"[red]Restore failed:[/red] {e}")
|
|
758
|
+
raise typer.Exit(1)
|
|
759
|
+
console.print(f"[green][OK] Restored to[/green] [bold]{out_dir}[/bold]")
|
|
760
|
+
|
|
761
|
+
|
|
762
|
+
@app.command()
|
|
763
|
+
def cat(
|
|
764
|
+
file: Path = typer.Argument(..., help=".vegh file"),
|
|
765
|
+
target: str = typer.Argument(..., help="Path inside snapshot"),
|
|
766
|
+
raw: bool = typer.Option(False, "--raw", help="Print raw content to stdout"),
|
|
767
|
+
):
|
|
768
|
+
"""View content of a file in the snapshot."""
|
|
769
|
+
if not file.exists():
|
|
770
|
+
console.print(f"[red]File '{file}' not found.[/red]")
|
|
771
|
+
raise typer.Exit(1)
|
|
772
|
+
try:
|
|
773
|
+
content_bytes = cat_file(str(file), target)
|
|
774
|
+
if raw:
|
|
775
|
+
sys.stdout.buffer.write(bytes(content_bytes))
|
|
776
|
+
sys.stdout.buffer.flush()
|
|
777
|
+
return
|
|
778
|
+
|
|
779
|
+
try:
|
|
780
|
+
content_str = bytes(content_bytes).decode("utf-8")
|
|
781
|
+
from rich.syntax import Syntax
|
|
782
|
+
|
|
783
|
+
ext = Path(target).suffix.lstrip(".") or "txt"
|
|
784
|
+
console.print(Syntax(content_str, ext, theme="monokai", line_numbers=True))
|
|
785
|
+
except UnicodeDecodeError:
|
|
786
|
+
console.print(
|
|
787
|
+
f"[yellow]Binary content detected ({len(content_bytes)} bytes).[/yellow]"
|
|
788
|
+
)
|
|
789
|
+
except Exception as e:
|
|
790
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
791
|
+
raise typer.Exit(1)
|
|
792
|
+
|
|
793
|
+
|
|
794
|
+
@app.command()
|
|
795
|
+
def diff(
|
|
796
|
+
file: Optional[Path] = typer.Argument(
|
|
797
|
+
None, help=".vegh file (Optional if using --repo)"
|
|
798
|
+
),
|
|
799
|
+
target: Path = typer.Argument(
|
|
800
|
+
Path("."), help="Local directory OR .vegh file to compare against"
|
|
801
|
+
),
|
|
802
|
+
repo: Optional[str] = typer.Option(
|
|
803
|
+
None, "--repo", help="Use remote repo as Source instead of .vegh file"
|
|
804
|
+
),
|
|
805
|
+
branch: Optional[str] = typer.Option(
|
|
806
|
+
None, "--branch", "-b", help="Branch for remote repo"
|
|
807
|
+
),
|
|
808
|
+
offline: bool = typer.Option(
|
|
809
|
+
False, "--offline", help="Force offline mode (overrides config)"
|
|
810
|
+
),
|
|
811
|
+
):
|
|
812
|
+
"""Compare snapshot OR remote repo with a local directory OR another snapshot."""
|
|
813
|
+
if not target.exists():
|
|
814
|
+
console.print(f"[red]Target '{target}' not found.[/red]")
|
|
815
|
+
raise typer.Exit(1)
|
|
816
|
+
|
|
817
|
+
snap_map = {}
|
|
818
|
+
source_name = "Unknown"
|
|
819
|
+
target_is_snap = target.suffix == ".vegh"
|
|
820
|
+
|
|
821
|
+
with console.status(
|
|
822
|
+
"[bold cyan]Preparing Comparison...[/bold cyan]", spinner="dots"
|
|
823
|
+
):
|
|
824
|
+
try:
|
|
825
|
+
if repo:
|
|
826
|
+
repo_path, source_name = ensure_repo(repo, branch, offline)
|
|
827
|
+
source_name = f"Repo: {source_name}"
|
|
828
|
+
snap_list = dry_run_snap(str(repo_path))
|
|
829
|
+
snap_map = {Path(p).as_posix(): s for p, s in snap_list}
|
|
830
|
+
elif file:
|
|
831
|
+
if not file.exists():
|
|
832
|
+
console.print(f"[red]File '{file}' not found.[/red]")
|
|
833
|
+
raise typer.Exit(1)
|
|
834
|
+
source_name = f"Snap: {file.name}"
|
|
835
|
+
snap_files = list_files_details(str(file))
|
|
836
|
+
snap_map = {
|
|
837
|
+
Path(p).as_posix(): s for p, s in snap_files if p != ".vegh.json"
|
|
838
|
+
}
|
|
839
|
+
else:
|
|
840
|
+
console.print(
|
|
841
|
+
"[red]Must specify either a .vegh file OR --repo <url>.[/red]"
|
|
842
|
+
)
|
|
843
|
+
raise typer.Exit(1)
|
|
844
|
+
|
|
845
|
+
if target_is_snap:
|
|
846
|
+
target_files = list_files_details(str(target))
|
|
847
|
+
local_files = {
|
|
848
|
+
Path(p).as_posix(): s for p, s in target_files if p != ".vegh.json"
|
|
849
|
+
}
|
|
850
|
+
else:
|
|
851
|
+
local_list = dry_run_snap(str(target))
|
|
852
|
+
local_files = {Path(p).as_posix(): s for p, s in local_list}
|
|
853
|
+
except Exception as e:
|
|
854
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
855
|
+
raise typer.Exit(1)
|
|
856
|
+
|
|
857
|
+
all_paths = set(snap_map.keys()) | set(local_files.keys())
|
|
858
|
+
table = Table(title=f"Diff: {source_name} vs {target}")
|
|
859
|
+
table.add_column("File Path", style="cyan")
|
|
860
|
+
table.add_column("Status", style="bold")
|
|
861
|
+
table.add_column("Details", style="dim")
|
|
862
|
+
|
|
863
|
+
changes = False
|
|
864
|
+
for path in sorted(all_paths):
|
|
865
|
+
in_src = path in snap_map
|
|
866
|
+
in_loc = path in local_files
|
|
867
|
+
|
|
868
|
+
if in_src and in_loc:
|
|
869
|
+
if snap_map[path] != local_files[path]:
|
|
870
|
+
table.add_row(
|
|
871
|
+
path,
|
|
872
|
+
"[yellow]MODIFIED[/yellow]",
|
|
873
|
+
f"Size: {format_bytes(snap_map[path])} -> {format_bytes(local_files[path])}",
|
|
874
|
+
)
|
|
875
|
+
changes = True
|
|
876
|
+
elif in_src and not in_loc:
|
|
877
|
+
msg = (
|
|
878
|
+
"In Source, missing in Target"
|
|
879
|
+
if target_is_snap
|
|
880
|
+
else "In Source, missing locally"
|
|
881
|
+
)
|
|
882
|
+
table.add_row(path, "[red]DELETED[/red]", msg)
|
|
883
|
+
changes = True
|
|
884
|
+
elif not in_src and in_loc:
|
|
885
|
+
msg = (
|
|
886
|
+
"In Target, missing in Source"
|
|
887
|
+
if target_is_snap
|
|
888
|
+
else "On Disk, missing in source"
|
|
889
|
+
)
|
|
890
|
+
table.add_row(path, "[green]NEW[/green]", msg)
|
|
891
|
+
changes = True
|
|
892
|
+
|
|
893
|
+
if changes:
|
|
894
|
+
console.print(table)
|
|
895
|
+
else:
|
|
896
|
+
console.print("[bold green]No changes detected (Sync).[/bold green]")
|
|
897
|
+
|
|
898
|
+
|
|
899
|
+
@app.command()
|
|
900
|
+
def audit(
|
|
901
|
+
file: Path = typer.Argument(..., help=".vegh file to audit"),
|
|
902
|
+
):
|
|
903
|
+
"""Scan snapshot for sensitive data and security risks."""
|
|
904
|
+
if not file.exists():
|
|
905
|
+
console.print(f"[red]File '{file}' not found.[/red]")
|
|
906
|
+
raise typer.Exit(1)
|
|
907
|
+
|
|
908
|
+
console.print(f"[bold cyan]Auditing {file.name}...[/bold cyan]")
|
|
909
|
+
|
|
910
|
+
risks = []
|
|
911
|
+
|
|
912
|
+
try:
|
|
913
|
+
files = list_files(str(file))
|
|
914
|
+
|
|
915
|
+
# 1. Filename Scan
|
|
916
|
+
for path in files:
|
|
917
|
+
for pattern in SENSITIVE_PATTERNS:
|
|
918
|
+
if re.search(pattern, path, re.IGNORECASE):
|
|
919
|
+
risks.append((path, "Filename Match", f"Pattern: {pattern}"))
|
|
920
|
+
|
|
921
|
+
# 2. Content Scan (Config files only)
|
|
922
|
+
# Scan for common secrets inside textual config files
|
|
923
|
+
config_exts = {
|
|
924
|
+
".env",
|
|
925
|
+
".json",
|
|
926
|
+
".yaml",
|
|
927
|
+
".yml",
|
|
928
|
+
".toml",
|
|
929
|
+
".conf",
|
|
930
|
+
".ini",
|
|
931
|
+
".xml",
|
|
932
|
+
}
|
|
933
|
+
secret_keywords = [
|
|
934
|
+
"PASSWORD",
|
|
935
|
+
"SECRET_KEY",
|
|
936
|
+
"TOKEN",
|
|
937
|
+
"API_KEY",
|
|
938
|
+
"ACCESS_KEY",
|
|
939
|
+
"PRIVATE_KEY",
|
|
940
|
+
]
|
|
941
|
+
|
|
942
|
+
for path in files:
|
|
943
|
+
p = Path(path)
|
|
944
|
+
if p.suffix in config_exts:
|
|
945
|
+
try:
|
|
946
|
+
# Limit content read size if needed.
|
|
947
|
+
content_bytes = cat_file(str(file), path)
|
|
948
|
+
try:
|
|
949
|
+
content = content_bytes.decode("utf-8")
|
|
950
|
+
for keyword in secret_keywords:
|
|
951
|
+
if keyword in content:
|
|
952
|
+
risks.append(
|
|
953
|
+
(path, "Content Match", f"Found keyword: {keyword}")
|
|
954
|
+
)
|
|
955
|
+
break # Report once per file
|
|
956
|
+
except UnicodeDecodeError:
|
|
957
|
+
pass # Skip binary files
|
|
958
|
+
except Exception:
|
|
959
|
+
pass
|
|
960
|
+
|
|
961
|
+
if not risks:
|
|
962
|
+
console.print("[bold green]No security risks found.[/bold green]")
|
|
963
|
+
else:
|
|
964
|
+
table = Table(title=f"Security Audit: {file.name}")
|
|
965
|
+
table.add_column("File Path", style="red")
|
|
966
|
+
table.add_column("Type", style="yellow")
|
|
967
|
+
table.add_column("Detail", style="dim")
|
|
968
|
+
|
|
969
|
+
for path, type_, detail in risks:
|
|
970
|
+
table.add_row(path, type_, detail)
|
|
971
|
+
|
|
972
|
+
console.print(table)
|
|
973
|
+
console.print(f"\n[bold red]Found {len(risks)} potential risks.[/bold red]")
|
|
974
|
+
|
|
975
|
+
except Exception as e:
|
|
976
|
+
console.print(f"[red]Audit failed:[/red] {e}")
|
|
977
|
+
raise typer.Exit(1)
|
|
978
|
+
|
|
979
|
+
|
|
980
|
+
@app.command()
|
|
981
|
+
def doctor(
|
|
982
|
+
file: Optional[Path] = typer.Argument(None, help="Optional: .vegh file to check"),
|
|
983
|
+
):
|
|
984
|
+
"""Check environment health and cache status."""
|
|
985
|
+
console.print("[bold cyan]Vegh Doctor[/bold cyan]")
|
|
986
|
+
|
|
987
|
+
py_ver = sys.version.split()[0]
|
|
988
|
+
console.print(f"Python Version: [green]{py_ver}[/green]")
|
|
989
|
+
|
|
990
|
+
# Check new config file
|
|
991
|
+
if CONFIG_FILE.exists():
|
|
992
|
+
console.print(f"Config: [green]Found[/green] ({CONFIG_FILE})")
|
|
993
|
+
else:
|
|
994
|
+
console.print("Config: [dim]Not configured[/dim]")
|
|
995
|
+
|
|
996
|
+
try:
|
|
997
|
+
from . import _core
|
|
998
|
+
|
|
999
|
+
console.print("Rust Core: [green]Loaded[/green]")
|
|
1000
|
+
except ImportError:
|
|
1001
|
+
console.print("Rust Core: [red]MISSING[/red]")
|
|
1002
|
+
|
|
1003
|
+
# Updated Cache Check
|
|
1004
|
+
if REPO_CACHE_DIR.exists():
|
|
1005
|
+
repo_count = len([x for x in REPO_CACHE_DIR.iterdir() if x.is_dir()])
|
|
1006
|
+
total_size = get_dir_size(REPO_CACHE_DIR)
|
|
1007
|
+
size_str = format_bytes(total_size)
|
|
1008
|
+
color = "green" if total_size < 5 * 1024 * 1024 * 1024 else "yellow"
|
|
1009
|
+
|
|
1010
|
+
console.print(
|
|
1011
|
+
f"Repo Cache: [bold]{repo_count}[/bold] repos ([{color}]{size_str}[/{color}])"
|
|
1012
|
+
)
|
|
1013
|
+
console.print(f"Cache Location: [dim]{REPO_CACHE_DIR}[/dim]")
|
|
1014
|
+
if total_size > 5 * 1024 * 1024 * 1024:
|
|
1015
|
+
console.print(
|
|
1016
|
+
"[yellow]WARN: Cache is large. Run 'vegh clean' to free space.[/yellow]"
|
|
1017
|
+
)
|
|
1018
|
+
else:
|
|
1019
|
+
console.print("Repo Cache: [dim]Empty[/dim]")
|
|
1020
|
+
|
|
1021
|
+
if file:
|
|
1022
|
+
console.print(f"\n[bold cyan]Checking Snapshot: {file.name}[/bold cyan]")
|
|
1023
|
+
if file.exists():
|
|
1024
|
+
try:
|
|
1025
|
+
check_integrity(str(file))
|
|
1026
|
+
console.print("Integrity: [green]OK[/green]")
|
|
1027
|
+
except Exception as e:
|
|
1028
|
+
console.print(f"Integrity: [bold red]CORRUPT ({e})[/bold red]")
|
|
1029
|
+
else:
|
|
1030
|
+
console.print("[red]File not found![/red]")
|
|
1031
|
+
|
|
1032
|
+
console.print("\n[bold green]System seems healthy![/bold green]")
|
|
1033
|
+
|
|
1034
|
+
|
|
1035
|
+
@app.command()
|
|
1036
|
+
def explore(file: Path = typer.Argument(..., help=".vegh file to explore")):
|
|
1037
|
+
"""Interactive Explorer for .vegh files."""
|
|
1038
|
+
if not file.exists():
|
|
1039
|
+
console.print(f"[red]File '{file}' not found.[/red]")
|
|
1040
|
+
raise typer.Exit(1)
|
|
1041
|
+
|
|
1042
|
+
console.print(
|
|
1043
|
+
f"[bold cyan]Exploring {file.name}. Type 'help' for commands.[/bold cyan]"
|
|
1044
|
+
)
|
|
1045
|
+
|
|
1046
|
+
try:
|
|
1047
|
+
# Load file structure once
|
|
1048
|
+
raw_files = list_files(str(file))
|
|
1049
|
+
# Ensure paths are consistently posix
|
|
1050
|
+
all_files = sorted([Path(p).as_posix() for p in raw_files])
|
|
1051
|
+
except Exception as e:
|
|
1052
|
+
console.print(f"[red]Failed to load snapshot:[/red] {e}")
|
|
1053
|
+
raise typer.Exit(1)
|
|
1054
|
+
|
|
1055
|
+
current_path = "/"
|
|
1056
|
+
|
|
1057
|
+
while True:
|
|
1058
|
+
try:
|
|
1059
|
+
cmd_input = Prompt.ask(f"[bold green]vegh:{current_path}>[/bold green]")
|
|
1060
|
+
parts = cmd_input.split()
|
|
1061
|
+
if not parts:
|
|
1062
|
+
continue
|
|
1063
|
+
|
|
1064
|
+
cmd = parts[0]
|
|
1065
|
+
args = parts[1:]
|
|
1066
|
+
|
|
1067
|
+
if cmd in ("exit", "quit"):
|
|
1068
|
+
break
|
|
1069
|
+
elif cmd == "clear":
|
|
1070
|
+
console.clear()
|
|
1071
|
+
elif cmd == "help":
|
|
1072
|
+
console.print("""
|
|
1073
|
+
[bold]Available Commands:[/bold]
|
|
1074
|
+
ls [dir] List files
|
|
1075
|
+
cd <dir> Change directory
|
|
1076
|
+
cat <file> View file content
|
|
1077
|
+
pwd Show current path
|
|
1078
|
+
clear Clear screen
|
|
1079
|
+
grep <text> [-i] Search text in files (-i for case-insensitive)
|
|
1080
|
+
exit Exit explorer
|
|
1081
|
+
""")
|
|
1082
|
+
elif cmd == "pwd":
|
|
1083
|
+
console.print(current_path)
|
|
1084
|
+
|
|
1085
|
+
elif cmd == "grep":
|
|
1086
|
+
if not args:
|
|
1087
|
+
console.print("[red]Usage: grep <text> [-i][/red]")
|
|
1088
|
+
continue
|
|
1089
|
+
|
|
1090
|
+
# Simple argument parsing for -i flag
|
|
1091
|
+
case_sensitive = True
|
|
1092
|
+
search_text = args[0]
|
|
1093
|
+
|
|
1094
|
+
# Handle cases like: grep -i "foo" OR grep "foo" -i
|
|
1095
|
+
if len(args) > 1:
|
|
1096
|
+
if args[0] == "-i":
|
|
1097
|
+
case_sensitive = False
|
|
1098
|
+
search_text = args[1]
|
|
1099
|
+
elif args[1] == "-i":
|
|
1100
|
+
case_sensitive = False
|
|
1101
|
+
search_text = args[0]
|
|
1102
|
+
|
|
1103
|
+
# Determine search scope based on current directory in explore
|
|
1104
|
+
# If root, search everything. If subdir, filter by prefix.
|
|
1105
|
+
search_prefix = current_path if current_path != "/" else ""
|
|
1106
|
+
|
|
1107
|
+
# Remove leading slash to match tar paths (e.g., "src/main.rs" not "/src/main.rs")
|
|
1108
|
+
if search_prefix.startswith("/"):
|
|
1109
|
+
search_prefix = search_prefix[1:]
|
|
1110
|
+
|
|
1111
|
+
with console.status(f"[cyan]Searching '{search_text}'...[/cyan]"):
|
|
1112
|
+
try:
|
|
1113
|
+
# Call Rust Core (Zero extra dep!)
|
|
1114
|
+
matches = search_snap(str(file), search_text, search_prefix, case_sensitive)
|
|
1115
|
+
|
|
1116
|
+
if not matches:
|
|
1117
|
+
console.print("[yellow]No matches found.[/yellow]")
|
|
1118
|
+
else:
|
|
1119
|
+
# Render results nicely
|
|
1120
|
+
table = Table(box=None, show_header=False)
|
|
1121
|
+
table.add_column("Location", style="cyan")
|
|
1122
|
+
table.add_column("Content", style="white")
|
|
1123
|
+
|
|
1124
|
+
current_file_group = ""
|
|
1125
|
+
for fpath, line_num, content in matches:
|
|
1126
|
+
# Group output by file for cleaner look
|
|
1127
|
+
if fpath != current_file_group:
|
|
1128
|
+
table.add_row(f"\n[bold green]{fpath}[/bold green]", "")
|
|
1129
|
+
current_file_group = fpath
|
|
1130
|
+
|
|
1131
|
+
# Strip whitespace for cleaner display
|
|
1132
|
+
table.add_row(f" :{line_num}", f"[dim]{content.strip()}[/dim]")
|
|
1133
|
+
|
|
1134
|
+
console.print(table)
|
|
1135
|
+
console.print(f"\n[bold]Found {len(matches)} matches.[/bold]")
|
|
1136
|
+
|
|
1137
|
+
except Exception as e:
|
|
1138
|
+
console.print(f"[red]Grep error:[/red] {e}")
|
|
1139
|
+
|
|
1140
|
+
elif cmd == "ls":
|
|
1141
|
+
target_path = current_path
|
|
1142
|
+
if args:
|
|
1143
|
+
# simplistic path resolution
|
|
1144
|
+
arg_path = args[0]
|
|
1145
|
+
if arg_path.startswith("/"):
|
|
1146
|
+
target_path = arg_path
|
|
1147
|
+
else:
|
|
1148
|
+
target_path = (Path(current_path) / arg_path).as_posix()
|
|
1149
|
+
|
|
1150
|
+
# Normalize: /src/ -> /src
|
|
1151
|
+
if target_path != "/" and target_path.endswith("/"):
|
|
1152
|
+
target_path = target_path.rstrip("/")
|
|
1153
|
+
|
|
1154
|
+
# Filter items in this directory
|
|
1155
|
+
items = set()
|
|
1156
|
+
prefix = target_path if target_path == "/" else target_path + "/"
|
|
1157
|
+
|
|
1158
|
+
found_any = False
|
|
1159
|
+
for p in all_files:
|
|
1160
|
+
# p is like "src/main.rs"
|
|
1161
|
+
# if target is "/", we want "src"
|
|
1162
|
+
# if target is "/src", we want "main.rs"
|
|
1163
|
+
|
|
1164
|
+
p_abs = (
|
|
1165
|
+
"/" + p
|
|
1166
|
+
) # Treat stored paths as relative to root, map to absolute
|
|
1167
|
+
|
|
1168
|
+
if p_abs.startswith(prefix):
|
|
1169
|
+
found_any = True
|
|
1170
|
+
rel = p_abs[len(prefix) :]
|
|
1171
|
+
if "/" in rel:
|
|
1172
|
+
items.add(rel.split("/")[0] + "/") # Directory
|
|
1173
|
+
else:
|
|
1174
|
+
items.add(rel) # File
|
|
1175
|
+
|
|
1176
|
+
if not found_any and target_path != "/":
|
|
1177
|
+
pass
|
|
1178
|
+
|
|
1179
|
+
# Sort: Dirs first
|
|
1180
|
+
sorted_items = sorted(
|
|
1181
|
+
list(items), key=lambda x: (not x.endswith("/"), x)
|
|
1182
|
+
)
|
|
1183
|
+
|
|
1184
|
+
grid = Table.grid(padding=1)
|
|
1185
|
+
for item in sorted_items:
|
|
1186
|
+
if item.endswith("/"):
|
|
1187
|
+
grid.add_row(f"[bold blue]{item}[/bold blue]")
|
|
1188
|
+
else:
|
|
1189
|
+
grid.add_row(f"[green]{item}[/green]")
|
|
1190
|
+
console.print(grid)
|
|
1191
|
+
|
|
1192
|
+
elif cmd == "cd":
|
|
1193
|
+
if not args:
|
|
1194
|
+
continue
|
|
1195
|
+
new_dir = args[0]
|
|
1196
|
+
|
|
1197
|
+
if new_dir == "..":
|
|
1198
|
+
current_path = str(Path(current_path).parent.as_posix())
|
|
1199
|
+
if current_path == ".":
|
|
1200
|
+
current_path = "/"
|
|
1201
|
+
elif new_dir == "/":
|
|
1202
|
+
current_path = "/"
|
|
1203
|
+
else:
|
|
1204
|
+
# Construct target
|
|
1205
|
+
if new_dir.startswith("/"):
|
|
1206
|
+
target = new_dir
|
|
1207
|
+
else:
|
|
1208
|
+
target = (Path(current_path) / new_dir).as_posix()
|
|
1209
|
+
|
|
1210
|
+
if target != "/" and target.endswith("/"):
|
|
1211
|
+
target = target.rstrip("/")
|
|
1212
|
+
|
|
1213
|
+
# Validate existence (is it a directory prefix?)
|
|
1214
|
+
prefix = target + "/"
|
|
1215
|
+
is_valid = any(("/" + f).startswith(prefix) for f in all_files)
|
|
1216
|
+
|
|
1217
|
+
if is_valid or target == "/":
|
|
1218
|
+
current_path = target
|
|
1219
|
+
else:
|
|
1220
|
+
console.print(f"[red]Directory not found: {new_dir}[/red]")
|
|
1221
|
+
|
|
1222
|
+
elif cmd == "cat":
|
|
1223
|
+
if not args:
|
|
1224
|
+
console.print("[red]Usage: cat <file>[/red]")
|
|
1225
|
+
continue
|
|
1226
|
+
|
|
1227
|
+
fname = args[0]
|
|
1228
|
+
# Resolve path
|
|
1229
|
+
if fname.startswith("/"):
|
|
1230
|
+
full_path = fname.lstrip("/")
|
|
1231
|
+
else:
|
|
1232
|
+
if current_path == "/":
|
|
1233
|
+
full_path = fname
|
|
1234
|
+
else:
|
|
1235
|
+
full_path = (Path(current_path) / fname).as_posix().lstrip("/")
|
|
1236
|
+
|
|
1237
|
+
if full_path in all_files:
|
|
1238
|
+
# Call existing cat logic
|
|
1239
|
+
try:
|
|
1240
|
+
content_bytes = cat_file(str(file), full_path)
|
|
1241
|
+
try:
|
|
1242
|
+
content_str = bytes(content_bytes).decode("utf-8")
|
|
1243
|
+
console.print(content_str)
|
|
1244
|
+
except UnicodeDecodeError:
|
|
1245
|
+
console.print(
|
|
1246
|
+
f"[yellow]Binary content ({len(content_bytes)} bytes)[/yellow]"
|
|
1247
|
+
)
|
|
1248
|
+
except Exception as e:
|
|
1249
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
1250
|
+
else:
|
|
1251
|
+
console.print(f"[red]File not found: {fname}[/red]")
|
|
1252
|
+
|
|
1253
|
+
else:
|
|
1254
|
+
console.print(f"[red]Unknown command: {cmd}[/red]")
|
|
1255
|
+
|
|
1256
|
+
except KeyboardInterrupt:
|
|
1257
|
+
break
|
|
1258
|
+
except Exception as e:
|
|
1259
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
1260
|
+
|
|
1261
|
+
|
|
1262
|
+
@app.command()
|
|
1263
|
+
def clean():
|
|
1264
|
+
"""Clean up the repository cache."""
|
|
1265
|
+
if not REPO_CACHE_DIR.exists():
|
|
1266
|
+
console.print("[yellow]Cache is already empty.[/yellow]")
|
|
1267
|
+
return
|
|
1268
|
+
|
|
1269
|
+
confirm = typer.confirm(f"Delete all cached repos in {REPO_CACHE_DIR}?")
|
|
1270
|
+
if not confirm:
|
|
1271
|
+
raise typer.Abort()
|
|
1272
|
+
|
|
1273
|
+
with console.status("[red]Cleaning cache...[/red]", spinner="bouncingBall"):
|
|
1274
|
+
try:
|
|
1275
|
+
shutil.rmtree(REPO_CACHE_DIR)
|
|
1276
|
+
console.print(
|
|
1277
|
+
f"[green]Successfully cleared cache at {REPO_CACHE_DIR}[/green]"
|
|
1278
|
+
)
|
|
1279
|
+
except Exception as e:
|
|
1280
|
+
console.print(f"[red]Failed to clean cache:[/red] {e}")
|
|
1281
|
+
|
|
1282
|
+
|
|
1283
|
+
@app.command("list")
|
|
1284
|
+
def list_cmd(
|
|
1285
|
+
file: Path = typer.Argument(..., help=".vegh file"),
|
|
1286
|
+
tree_view: bool = typer.Option(True, "--tree/--flat", help="View format"),
|
|
1287
|
+
):
|
|
1288
|
+
"""List snapshot contents."""
|
|
1289
|
+
try:
|
|
1290
|
+
files = list_files(str(file))
|
|
1291
|
+
if not files:
|
|
1292
|
+
console.print("[yellow]Empty snapshot.[/yellow]")
|
|
1293
|
+
return
|
|
1294
|
+
if tree_view:
|
|
1295
|
+
console.print(build_tree(files, file.name))
|
|
1296
|
+
else:
|
|
1297
|
+
table = Table(title=f"Contents of {file.name}")
|
|
1298
|
+
table.add_column("File Path", style="cyan")
|
|
1299
|
+
for f in sorted(files):
|
|
1300
|
+
table.add_row(f)
|
|
1301
|
+
console.print(table)
|
|
1302
|
+
except Exception as e:
|
|
1303
|
+
console.print(f"[red]List failed:[/red] {e}")
|
|
1304
|
+
|
|
1305
|
+
|
|
1306
|
+
@app.command()
|
|
1307
|
+
def check(file: Path = typer.Argument(..., help=".vegh file")):
|
|
1308
|
+
"""Verify integrity & metadata."""
|
|
1309
|
+
if not file.exists():
|
|
1310
|
+
console.print("[red]File not found.[/red]")
|
|
1311
|
+
raise typer.Exit(1)
|
|
1312
|
+
with console.status("[bold cyan]Verifying...[/bold cyan]", spinner="dots"):
|
|
1313
|
+
try:
|
|
1314
|
+
h = check_integrity(str(file))
|
|
1315
|
+
raw_meta = get_metadata(str(file))
|
|
1316
|
+
meta = json.loads(raw_meta)
|
|
1317
|
+
|
|
1318
|
+
grid = Table.grid(padding=1)
|
|
1319
|
+
grid.add_column(style="bold cyan", justify="right")
|
|
1320
|
+
grid.add_column(style="white")
|
|
1321
|
+
grid.add_row("Blake3:", f"[dim]{h}[/dim]")
|
|
1322
|
+
grid.add_row("Author:", meta.get("author", "Unknown"))
|
|
1323
|
+
grid.add_row("Ver:", meta.get("tool_version", "Unknown"))
|
|
1324
|
+
console.print(
|
|
1325
|
+
Panel(
|
|
1326
|
+
grid,
|
|
1327
|
+
title=f"[bold green][OK] Valid ({file.name})[/bold green]",
|
|
1328
|
+
border_style="green",
|
|
1329
|
+
)
|
|
1330
|
+
)
|
|
1331
|
+
except Exception as e:
|
|
1332
|
+
console.print(f"[bold red]Verification Failed:[/bold red] {e}")
|
|
1333
|
+
raise typer.Exit(1)
|
|
1334
|
+
|
|
1335
|
+
|
|
1336
|
+
@app.command()
|
|
1337
|
+
def loc(
|
|
1338
|
+
target: Optional[str] = typer.Argument(None, help="File, Dir, or Git URL"),
|
|
1339
|
+
repo: Optional[str] = typer.Option(None, "--repo", help="Git Repo URL"),
|
|
1340
|
+
branch: Optional[str] = typer.Option(None, "--branch", "-b", help="Branch/Tag"),
|
|
1341
|
+
offline: bool = typer.Option(
|
|
1342
|
+
False, "--offline", help="Force offline mode (overrides config)"
|
|
1343
|
+
),
|
|
1344
|
+
raw: bool = typer.Option(False, "--raw", help="Raw list view"),
|
|
1345
|
+
sloc: bool = typer.Option(
|
|
1346
|
+
False, "--sloc", help="Count SLOC (Source Lines of Code) instead of LOC"
|
|
1347
|
+
),
|
|
1348
|
+
):
|
|
1349
|
+
"""Visualize Lines of Code (Analytics)."""
|
|
1350
|
+
input_target = repo or target
|
|
1351
|
+
if not input_target:
|
|
1352
|
+
console.print("[red]Provide file/dir or use --repo.[/red]")
|
|
1353
|
+
raise typer.Exit(1)
|
|
1354
|
+
|
|
1355
|
+
is_remote = (
|
|
1356
|
+
input_target.startswith(("http://", "https://", "git@")) or repo is not None
|
|
1357
|
+
)
|
|
1358
|
+
scan_path: Path = None
|
|
1359
|
+
display_name: str = "Unknown"
|
|
1360
|
+
|
|
1361
|
+
try:
|
|
1362
|
+
if is_remote:
|
|
1363
|
+
scan_path, display_name = ensure_repo(input_target, branch, offline)
|
|
1364
|
+
else:
|
|
1365
|
+
scan_path = Path(input_target)
|
|
1366
|
+
display_name = scan_path.name
|
|
1367
|
+
if not scan_path.exists():
|
|
1368
|
+
console.print(f"[red]Path '{scan_path}' not found.[/red]")
|
|
1369
|
+
raise typer.Exit(1)
|
|
1370
|
+
|
|
1371
|
+
metric_name = "SLOC" if sloc else "LOC"
|
|
1372
|
+
|
|
1373
|
+
with console.status(
|
|
1374
|
+
f"[cyan]Analyzing {display_name} ({metric_name})...[/cyan]", spinner="dots"
|
|
1375
|
+
):
|
|
1376
|
+
if sloc:
|
|
1377
|
+
if scan_path.is_dir():
|
|
1378
|
+
if scan_sloc is None:
|
|
1379
|
+
console.print(
|
|
1380
|
+
"[red]SLOC analysis not available (Import Error).[/red]"
|
|
1381
|
+
)
|
|
1382
|
+
raise typer.Exit(1)
|
|
1383
|
+
results = scan_sloc(str(scan_path))
|
|
1384
|
+
else:
|
|
1385
|
+
if calculate_sloc is None:
|
|
1386
|
+
console.print(
|
|
1387
|
+
"[red]SLOC analysis not available (Import Error).[/red]"
|
|
1388
|
+
)
|
|
1389
|
+
raise typer.Exit(1)
|
|
1390
|
+
# For single file, results should be list of (path, count)
|
|
1391
|
+
cnt = calculate_sloc(str(scan_path))
|
|
1392
|
+
results = [(scan_path.name, cnt)]
|
|
1393
|
+
else:
|
|
1394
|
+
if scan_path.is_file():
|
|
1395
|
+
# FIX: Handle single file logic separately
|
|
1396
|
+
# Rust's count_locs expects a .vegh snapshot
|
|
1397
|
+
if scan_path.suffix == ".vegh":
|
|
1398
|
+
results = count_locs(str(scan_path))
|
|
1399
|
+
else:
|
|
1400
|
+
# Plain text file counting in Python
|
|
1401
|
+
try:
|
|
1402
|
+
with open(scan_path, "r", encoding="utf-8", errors="ignore") as f:
|
|
1403
|
+
cnt = sum(1 for _ in f)
|
|
1404
|
+
results = [(scan_path.name, cnt)]
|
|
1405
|
+
except Exception:
|
|
1406
|
+
results = [(scan_path.name, 0)]
|
|
1407
|
+
elif scan_path.is_dir():
|
|
1408
|
+
results = scan_locs_dir(str(scan_path))
|
|
1409
|
+
else:
|
|
1410
|
+
results = [] # Should not happen
|
|
1411
|
+
|
|
1412
|
+
if render_dashboard and not raw:
|
|
1413
|
+
render_dashboard(console, display_name, results, metric_name=metric_name)
|
|
1414
|
+
else:
|
|
1415
|
+
total = sum(c for _, c in results)
|
|
1416
|
+
table = Table(title=f"{metric_name}: {display_name}")
|
|
1417
|
+
table.add_column(metric_name, style="green", footer=f"{total:,}")
|
|
1418
|
+
table.add_column("Path", style="cyan")
|
|
1419
|
+
for p, c in sorted(results, key=lambda x: x[1], reverse=True):
|
|
1420
|
+
if c > 0:
|
|
1421
|
+
table.add_row(f"{c:,}", p)
|
|
1422
|
+
console.print(table)
|
|
1423
|
+
|
|
1424
|
+
except Exception as e:
|
|
1425
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
1426
|
+
raise typer.Exit(1)
|
|
1427
|
+
|
|
1428
|
+
|
|
1429
|
+
def _upload_chunk(url, file_path, start, chunk_size, index, total, filename, headers):
|
|
1430
|
+
try:
|
|
1431
|
+
with open(file_path, "rb") as f:
|
|
1432
|
+
f.seek(start)
|
|
1433
|
+
data = f.read(chunk_size)
|
|
1434
|
+
h = headers.copy()
|
|
1435
|
+
h.update(
|
|
1436
|
+
{
|
|
1437
|
+
"X-File-Name": filename,
|
|
1438
|
+
"X-Chunk-Index": str(index),
|
|
1439
|
+
"X-Total-Chunks": str(total),
|
|
1440
|
+
}
|
|
1441
|
+
)
|
|
1442
|
+
resp = requests.post(url, data=data, headers=h)
|
|
1443
|
+
if not (200 <= resp.status_code < 300):
|
|
1444
|
+
raise Exception(f"Status {resp.status_code}")
|
|
1445
|
+
return True
|
|
1446
|
+
except Exception as e:
|
|
1447
|
+
raise Exception(f"Chunk {index}: {e}")
|
|
1448
|
+
|
|
1449
|
+
|
|
1450
|
+
@app.command()
|
|
1451
|
+
def send(
|
|
1452
|
+
file: Path = typer.Argument(..., help="File to send"),
|
|
1453
|
+
url: Optional[str] = typer.Option(None, help="Target URL"),
|
|
1454
|
+
force_chunk: bool = typer.Option(False, "--force-chunk"),
|
|
1455
|
+
auth: Optional[str] = typer.Option(None, "--auth"),
|
|
1456
|
+
):
|
|
1457
|
+
"""Send snapshot to server."""
|
|
1458
|
+
if not file.exists():
|
|
1459
|
+
console.print("[red]File not found.[/red]")
|
|
1460
|
+
raise typer.Exit(1)
|
|
1461
|
+
cfg = load_config()
|
|
1462
|
+
target = url or cfg.get("url")
|
|
1463
|
+
token = auth or cfg.get("auth")
|
|
1464
|
+
if not target:
|
|
1465
|
+
console.print("[red]No URL configured.[/red]")
|
|
1466
|
+
raise typer.Exit(1)
|
|
1467
|
+
|
|
1468
|
+
size = file.stat().st_size
|
|
1469
|
+
headers = {"Authorization": f"Bearer {token}"} if token else {}
|
|
1470
|
+
|
|
1471
|
+
console.print(f"Target: {target} | Size: {format_bytes(size)}")
|
|
1472
|
+
|
|
1473
|
+
if size < CHUNK_THRESHOLD and not force_chunk:
|
|
1474
|
+
try:
|
|
1475
|
+
with open(file, "rb") as f:
|
|
1476
|
+
with console.status("Uploading...", spinner="dots"):
|
|
1477
|
+
r = requests.post(target, data=f, headers=headers)
|
|
1478
|
+
if 200 <= r.status_code < 300:
|
|
1479
|
+
console.print("[green]Success![/green]")
|
|
1480
|
+
else:
|
|
1481
|
+
console.print(f"[red]Failed: {r.status_code}[/red]")
|
|
1482
|
+
except Exception as e:
|
|
1483
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
1484
|
+
else:
|
|
1485
|
+
chunks = math.ceil(size / CHUNK_SIZE)
|
|
1486
|
+
with console.status(f"Sending {chunks} chunks...", spinner="dots") as s:
|
|
1487
|
+
done = 0
|
|
1488
|
+
with ThreadPoolExecutor(max_workers=CONCURRENT_WORKERS) as ex:
|
|
1489
|
+
fs = []
|
|
1490
|
+
for i in range(chunks):
|
|
1491
|
+
start = i * CHUNK_SIZE
|
|
1492
|
+
curr = min(CHUNK_SIZE, size - start)
|
|
1493
|
+
fs.append(
|
|
1494
|
+
ex.submit(
|
|
1495
|
+
_upload_chunk,
|
|
1496
|
+
target,
|
|
1497
|
+
file,
|
|
1498
|
+
start,
|
|
1499
|
+
curr,
|
|
1500
|
+
i,
|
|
1501
|
+
chunks,
|
|
1502
|
+
file.name,
|
|
1503
|
+
headers,
|
|
1504
|
+
)
|
|
1505
|
+
)
|
|
1506
|
+
for f in as_completed(fs):
|
|
1507
|
+
try:
|
|
1508
|
+
f.result()
|
|
1509
|
+
done += 1
|
|
1510
|
+
s.update(f"Sending... ({done}/{chunks})")
|
|
1511
|
+
except Exception as e:
|
|
1512
|
+
console.print(f"[red]Aborted: {e}[/red]")
|
|
1513
|
+
raise typer.Exit(1)
|
|
1514
|
+
console.print("[green]Success![/green]")
|
|
1515
|
+
|
|
1516
|
+
# --- VEGH PROMPT COMMAND ---
|
|
1517
|
+
|
|
1518
|
+
@app.command()
|
|
1519
|
+
def prompt(
|
|
1520
|
+
target: Path = typer.Argument(Path("."), help="Target codebase"),
|
|
1521
|
+
clean: bool = typer.Option(
|
|
1522
|
+
False,
|
|
1523
|
+
"--clean",
|
|
1524
|
+
help="Remove lock files, binaries, and secrets to save tokens."
|
|
1525
|
+
),
|
|
1526
|
+
exclude: Optional[List[str]] = typer.Option(
|
|
1527
|
+
None,
|
|
1528
|
+
"--exclude",
|
|
1529
|
+
"-e",
|
|
1530
|
+
help="Custom patterns to exclude"
|
|
1531
|
+
),
|
|
1532
|
+
copy: bool = typer.Option(
|
|
1533
|
+
False,
|
|
1534
|
+
"--copy",
|
|
1535
|
+
"-c",
|
|
1536
|
+
help="Copy output to clipboard (No extra deps required)"
|
|
1537
|
+
),
|
|
1538
|
+
output: Optional[Path] = typer.Option(
|
|
1539
|
+
None,
|
|
1540
|
+
"--output",
|
|
1541
|
+
"-o",
|
|
1542
|
+
help="Save XML to file"
|
|
1543
|
+
),
|
|
1544
|
+
):
|
|
1545
|
+
"""
|
|
1546
|
+
Generate XML context for LLM.
|
|
1547
|
+
"""
|
|
1548
|
+
if not target.exists():
|
|
1549
|
+
console.print(f"[red]Path '{target}' not found.[/red]")
|
|
1550
|
+
raise typer.Exit(1)
|
|
1551
|
+
|
|
1552
|
+
# 1. Prepare Exclude Patterns
|
|
1553
|
+
final_exclude = []
|
|
1554
|
+
if exclude:
|
|
1555
|
+
final_exclude.extend(exclude)
|
|
1556
|
+
|
|
1557
|
+
if clean:
|
|
1558
|
+
final_exclude.extend(NOISE_PATTERNS)
|
|
1559
|
+
console.print("[dim]Clean mode enabled: Ignoring lock files, binaries & secrets.[/dim]")
|
|
1560
|
+
|
|
1561
|
+
# 2. Call Rust Core
|
|
1562
|
+
with console.status("[bold cyan]Gathering context...[/bold cyan]"):
|
|
1563
|
+
try:
|
|
1564
|
+
# Calls the new Rust function
|
|
1565
|
+
xml_content = get_context_xml(str(target), exclude=final_exclude)
|
|
1566
|
+
except Exception as e:
|
|
1567
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
1568
|
+
raise typer.Exit(1)
|
|
1569
|
+
|
|
1570
|
+
# 3. Handle Output
|
|
1571
|
+
if output:
|
|
1572
|
+
output.write_text(xml_content, encoding="utf-8")
|
|
1573
|
+
console.print(f"[green]Saved prompt to {output}[/green]")
|
|
1574
|
+
elif copy:
|
|
1575
|
+
# Use our Native helper instead of Pyperclip
|
|
1576
|
+
success = _copy_to_clipboard_native(xml_content)
|
|
1577
|
+
if success:
|
|
1578
|
+
console.print(f"[green]Copied {len(xml_content)} chars to clipboard![/green]")
|
|
1579
|
+
else:
|
|
1580
|
+
console.print("[yellow]Clipboard tool not found (pbcopy, clip, xclip/wl-copy missing). Printing to stdout:[/yellow]")
|
|
1581
|
+
print(xml_content)
|
|
1582
|
+
else:
|
|
1583
|
+
# Default: Print to stdout
|
|
1584
|
+
print(xml_content)
|
|
1585
|
+
|
|
1586
|
+
|
|
1587
|
+
if __name__ == "__main__":
|
|
1588
|
+
app()
|