peridot-cli 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
peridot.py
ADDED
|
@@ -0,0 +1,3362 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import base64
|
|
7
|
+
from concurrent.futures import FIRST_COMPLETED, ProcessPoolExecutor, ThreadPoolExecutor, wait
|
|
8
|
+
import fnmatch
|
|
9
|
+
import gzip
|
|
10
|
+
import hashlib
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
import platform
|
|
14
|
+
import shutil
|
|
15
|
+
import socket
|
|
16
|
+
import stat
|
|
17
|
+
import subprocess
|
|
18
|
+
import sys
|
|
19
|
+
from dataclasses import dataclass
|
|
20
|
+
from datetime import datetime, timezone
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from tempfile import TemporaryDirectory
|
|
23
|
+
from types import SimpleNamespace
|
|
24
|
+
from typing import Callable, Iterable
|
|
25
|
+
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
from cryptography.exceptions import InvalidTag
|
|
29
|
+
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
|
30
|
+
except ModuleNotFoundError:
|
|
31
|
+
print(
|
|
32
|
+
"Error: falta la dependencia 'cryptography'. "
|
|
33
|
+
"Instalala con 'python3 -m pip install .'.",
|
|
34
|
+
file=sys.stderr,
|
|
35
|
+
)
|
|
36
|
+
raise SystemExit(1)
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
import zstandard as zstd
|
|
40
|
+
except ModuleNotFoundError:
|
|
41
|
+
zstd = None
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
from rich.align import Align
|
|
45
|
+
from rich.console import Console
|
|
46
|
+
from rich.panel import Panel
|
|
47
|
+
from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn, TimeRemainingColumn
|
|
48
|
+
from rich.prompt import Confirm
|
|
49
|
+
from rich.prompt import Prompt
|
|
50
|
+
from rich.table import Table
|
|
51
|
+
from rich.text import Text
|
|
52
|
+
except ModuleNotFoundError:
|
|
53
|
+
print(
|
|
54
|
+
"Error: falta la dependencia 'rich'. "
|
|
55
|
+
"Instalala con 'python3 -m pip install .'.",
|
|
56
|
+
file=sys.stderr,
|
|
57
|
+
)
|
|
58
|
+
raise SystemExit(1)
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
import questionary
|
|
62
|
+
from questionary import Choice
|
|
63
|
+
except ModuleNotFoundError:
|
|
64
|
+
questionary = None
|
|
65
|
+
Choice = None
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
QUESTIONARY_AVAILABLE = questionary is not None and Choice is not None
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
APP_VERSION = "0.4.1"
|
|
72
|
+
PACKAGE_VERSION = 1
|
|
73
|
+
DEFAULT_COMPRESSION_LEVEL = 1
|
|
74
|
+
DEFAULT_JOBS = max(2, min(8, os.cpu_count() or 2))
|
|
75
|
+
DEFAULT_KEY = Path.home() / ".config" / "peridot" / "peridot.key"
|
|
76
|
+
DEFAULT_PROFILE_STORE = Path.home() / ".config" / "peridot" / "profiles.json"
|
|
77
|
+
DEFAULT_HISTORY_DIR = Path.home() / ".config" / "peridot" / "history"
|
|
78
|
+
DEFAULT_SETTINGS_STORE = Path.home() / ".config" / "peridot" / "settings.json"
|
|
79
|
+
DEFAULT_EXCLUDES = {
|
|
80
|
+
".DS_Store",
|
|
81
|
+
".Trash",
|
|
82
|
+
".cache",
|
|
83
|
+
".npm",
|
|
84
|
+
".pnpm-store",
|
|
85
|
+
".yarn",
|
|
86
|
+
".local/share/Trash",
|
|
87
|
+
".config/peridot",
|
|
88
|
+
}
|
|
89
|
+
SENSITIVE_PATTERNS = (
|
|
90
|
+
"id_rsa",
|
|
91
|
+
"id_ed25519",
|
|
92
|
+
"id_ecdsa",
|
|
93
|
+
"known_hosts",
|
|
94
|
+
"credentials",
|
|
95
|
+
"token",
|
|
96
|
+
".env",
|
|
97
|
+
".npmrc",
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
DEFAULT_SETTINGS = {
|
|
101
|
+
"compression_level": DEFAULT_COMPRESSION_LEVEL,
|
|
102
|
+
"jobs": DEFAULT_JOBS,
|
|
103
|
+
"language": "es",
|
|
104
|
+
}
|
|
105
|
+
ENCRYPTION_ALGORITHM = "aes-gcm"
|
|
106
|
+
INCOMPRESSIBLE_SUFFIXES = {
|
|
107
|
+
".7z",
|
|
108
|
+
".avi",
|
|
109
|
+
".bz2",
|
|
110
|
+
".class",
|
|
111
|
+
".dmg",
|
|
112
|
+
".gif",
|
|
113
|
+
".gz",
|
|
114
|
+
".ico",
|
|
115
|
+
".jpeg",
|
|
116
|
+
".jpg",
|
|
117
|
+
".mkv",
|
|
118
|
+
".mov",
|
|
119
|
+
".mp3",
|
|
120
|
+
".mp4",
|
|
121
|
+
".otf",
|
|
122
|
+
".pdf",
|
|
123
|
+
".png",
|
|
124
|
+
".rar",
|
|
125
|
+
".tar",
|
|
126
|
+
".tgz",
|
|
127
|
+
".ttf",
|
|
128
|
+
".wav",
|
|
129
|
+
".webm",
|
|
130
|
+
".webp",
|
|
131
|
+
".woff",
|
|
132
|
+
".woff2",
|
|
133
|
+
".xz",
|
|
134
|
+
".zip",
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
PRIMARY_COMPRESSION = "zstd"
|
|
138
|
+
FALLBACK_COMPRESSION = "gzip"
|
|
139
|
+
CURRENT_LANGUAGE = "es"
|
|
140
|
+
|
|
141
|
+
TRANSLATIONS = {
|
|
142
|
+
"en": {
|
|
143
|
+
"Peridot initialized": "Peridot initialized",
|
|
144
|
+
"Next steps": "Next steps",
|
|
145
|
+
"Bench results": "Bench results",
|
|
146
|
+
"Verificacion fallida": "Verification failed",
|
|
147
|
+
"Verificacion OK": "Verification OK",
|
|
148
|
+
"Inspeccionar": "Inspect",
|
|
149
|
+
"Mostrar lista de ficheros?": "Show file list?",
|
|
150
|
+
"Mostrar manifest JSON?": "Show manifest JSON?",
|
|
151
|
+
"Aplicar": "Apply",
|
|
152
|
+
"Hacer dry-run primero?": "Dry run first?",
|
|
153
|
+
"Directorio destino": "Target directory",
|
|
154
|
+
"Guardar backups antes de sobrescribir?": "Save backups before overwrite?",
|
|
155
|
+
"Directorio de backups": "Backup directory",
|
|
156
|
+
"Ignorar mismatch de plataforma?": "Ignore platform mismatch?",
|
|
157
|
+
"Verificar": "Verify",
|
|
158
|
+
"Verificacion profunda (descifrar)?": "Deep verify with decryption?",
|
|
159
|
+
"Compartir": "Share",
|
|
160
|
+
"Formato": "Format",
|
|
161
|
+
"Fichero de salida (vacio = imprimir)": "Output file (leave empty to print)",
|
|
162
|
+
"Nombre del bundle": "Bundle name",
|
|
163
|
+
"Accion de perfil": "Profile action",
|
|
164
|
+
"Nombre del perfil": "Profile name",
|
|
165
|
+
"Rekey todos los bundles locales?": "Rekey all local bundles?",
|
|
166
|
+
"Borrar todos los bundles locales?": "Delete all local bundles?",
|
|
167
|
+
"Borrar": "Delete",
|
|
168
|
+
"Portable config bundles for humans": "Portable config bundles for humans",
|
|
169
|
+
"Bundles portables de configuracion para humanos": "Portable config bundles for humans",
|
|
170
|
+
"Error": "Error",
|
|
171
|
+
"Compression": "Compression",
|
|
172
|
+
"Level": "Level",
|
|
173
|
+
"Codec": "Codec",
|
|
174
|
+
"Mode": "Mode",
|
|
175
|
+
"Tradeoff": "Tradeoff",
|
|
176
|
+
"Rule": "Rule",
|
|
177
|
+
"menos compresion, mas velocidad": "less compression, more speed",
|
|
178
|
+
"equilibrado entre tamano y velocidad": "balanced between size and speed",
|
|
179
|
+
"mas compresion, mas lentitud": "more compression, slower speed",
|
|
180
|
+
"0 = mas rapido y mas grande | 9 = mas lento y mas pequeno": "0 = faster and bigger | 9 = slower and smaller",
|
|
181
|
+
"Dot Presets": "Dot Presets",
|
|
182
|
+
"Config Catalog": "Config Catalog",
|
|
183
|
+
"Checkbox UI no disponible:": "Checkbox UI unavailable:",
|
|
184
|
+
"esta sesion no tiene un TTY interactivo real.": "this session does not have a real interactive TTY.",
|
|
185
|
+
"Ejecuta Peridot directamente en una terminal interactiva.": "Run Peridot directly in an interactive terminal.",
|
|
186
|
+
"falta la dependencia 'questionary' en este Python.": "the 'questionary' dependency is missing in this Python.",
|
|
187
|
+
"Usa el binario instalado con './install.sh' o ejecuta 'python3 -m pip install -r requirements.txt'.": "Use the binary installed with './install.sh' or run 'python3 -m pip install -r requirements.txt'.",
|
|
188
|
+
"Select groups": "Select groups",
|
|
189
|
+
"Selecciona rutas": "Select paths",
|
|
190
|
+
"Selecciona rutas para este bundle": "Select paths for this bundle",
|
|
191
|
+
"Selecciona grupos de configuracion": "Select config groups",
|
|
192
|
+
"Espacio para marcar, Enter para confirmar": "Space to toggle, Enter to confirm",
|
|
193
|
+
"Como quieres construir este bundle?": "How do you want to build this bundle?",
|
|
194
|
+
"Bundle source: preset, catalog or empty": "Bundle source: preset, catalog or empty",
|
|
195
|
+
"Base selection": "Base selection",
|
|
196
|
+
"Preset": "Preset",
|
|
197
|
+
"Bundle name": "Bundle name",
|
|
198
|
+
"Description": "Description",
|
|
199
|
+
"Target OS": "Target OS",
|
|
200
|
+
"Primary shell/runtime": "Primary shell/runtime",
|
|
201
|
+
"Target architecture": "Target architecture",
|
|
202
|
+
"Tags (comma separated)": "Tags (comma separated)",
|
|
203
|
+
"Selected paths": "Selected paths",
|
|
204
|
+
"Edit this selection?": "Edit this selection?",
|
|
205
|
+
"Add extra paths manually?": "Add extra paths manually?",
|
|
206
|
+
"Extra paths (comma separated)": "Extra paths (comma separated)",
|
|
207
|
+
"Usando entrada manual.": "Falling back to manual input.",
|
|
208
|
+
"Paths to include (comma separated)": "Paths to include (comma separated)",
|
|
209
|
+
"No has seleccionado ninguna ruta, se usara la seleccion sugerida.": "No path was selected; the suggested selection will be used.",
|
|
210
|
+
"Output package": "Output package",
|
|
211
|
+
"Pack Preview": "Pack Preview",
|
|
212
|
+
"Files": "Files",
|
|
213
|
+
"Bundle": "Bundle",
|
|
214
|
+
"Package": "Package",
|
|
215
|
+
"Target": "Target",
|
|
216
|
+
"Payload": "Payload",
|
|
217
|
+
"Tags": "Tags",
|
|
218
|
+
"Created": "Created",
|
|
219
|
+
"From": "From",
|
|
220
|
+
"Notes": "Notes",
|
|
221
|
+
"Sensitive": "Sensitive",
|
|
222
|
+
"Bundle Card": "Bundle Card",
|
|
223
|
+
"Bundles locales": "Local Bundles",
|
|
224
|
+
"No hay bundles .peridot en este directorio": "No .peridot bundles in this directory",
|
|
225
|
+
"Elegir accion": "Choose action",
|
|
226
|
+
"Centro de acciones": "Action Hub",
|
|
227
|
+
"Atajos: 1=pack 2=catalog 3=presets 4=inspect 5=apply 6=diff 7=verify 8=doctor 9=share 10=manifest 11=history 12=profile 13=settings 14=keygen 15=rekey 16=delete 17=quit": "Quick keys: 1=pack 2=catalog 3=presets 4=inspect 5=apply 6=diff 7=verify 8=doctor 9=share 10=manifest 11=history 12=profile 13=settings 14=keygen 15=rekey 16=delete 17=quit",
|
|
228
|
+
"Compatible con esta maquina": "Compatible with this machine",
|
|
229
|
+
"Compatibility": "Compatibility",
|
|
230
|
+
"Scanning files": "Scanning files",
|
|
231
|
+
"Scanning files done": "Scanning files done",
|
|
232
|
+
"Sensitive paths detected": "Sensitive paths detected",
|
|
233
|
+
"Include these sensitive paths?": "Include these sensitive paths?",
|
|
234
|
+
"Adaptive pack:": "Adaptive pack:",
|
|
235
|
+
"reduciendo ventana inicial de {requested} a {initial} ({reason}). Puede volver a subir si la memoria acompana.": "reducing initial active window from {requested} to {initial} ({reason}). It can grow again if memory allows it.",
|
|
236
|
+
"Process pool no disponible en este sistema; usando threads.": "Process pool is not available on this system; using threads.",
|
|
237
|
+
"subiendo ventana activa {previous} -> {current} ({label}).": "raising active window {previous} -> {current} ({label}).",
|
|
238
|
+
"bajando ventana activa {previous} -> {current} ({label}).": "lowering active window {previous} -> {current} ({label}).",
|
|
239
|
+
"Created {output}": "Created {output}",
|
|
240
|
+
"Previous snapshot saved to {path}": "Previous snapshot saved to {path}",
|
|
241
|
+
"Show file list?": "Show file list?",
|
|
242
|
+
"Show manifest JSON?": "Show manifest JSON?",
|
|
243
|
+
"Dry run first?": "Dry run first?",
|
|
244
|
+
"Target directory": "Target directory",
|
|
245
|
+
"Save backups before overwrite?": "Save backups before overwrite?",
|
|
246
|
+
"Backup directory": "Backup directory",
|
|
247
|
+
"Ignore platform mismatch?": "Ignore platform mismatch?",
|
|
248
|
+
"Dry run: no se ha escrito nada.": "Dry run: nothing was written.",
|
|
249
|
+
"Apply this bundle?": "Apply this bundle?",
|
|
250
|
+
"Operacion cancelada.": "Operation cancelled.",
|
|
251
|
+
"La clave no coincide con el paquete.": "The key does not match the package.",
|
|
252
|
+
"Apply Summary": "Apply Summary",
|
|
253
|
+
"Restored": "Restored",
|
|
254
|
+
"Backups": "Backups",
|
|
255
|
+
"Backup dir": "Backup dir",
|
|
256
|
+
"Post apply": "Post apply",
|
|
257
|
+
"Post-apply checklist": "Post-apply checklist",
|
|
258
|
+
"Verify failed": "Verify failed",
|
|
259
|
+
"Verify ok": "Verify ok",
|
|
260
|
+
"Settings": "Settings",
|
|
261
|
+
"workers para pack; mas puede ir mas rapido si hay CPU libre": "workers for pack; more can go faster if CPU is available",
|
|
262
|
+
"cifrado fijo: rapido, moderno y estandar": "fixed encryption: fast, modern and standard",
|
|
263
|
+
"preparado para internacionalizacion CLI": "ready for CLI internationalization",
|
|
264
|
+
"Compression: 0 = mas rapido y mas grande, 9 = mas lento y mas pequeno.": "Compression: 0 = faster and bigger, 9 = slower and smaller.",
|
|
265
|
+
"Compression level": "Compression level",
|
|
266
|
+
"Pack workers": "Pack workers",
|
|
267
|
+
"CPU detectada: {cpu} | workers activos: {jobs}": "Detected CPU: {cpu} | active workers: {jobs}",
|
|
268
|
+
"Language": "Language",
|
|
269
|
+
"Save settings?": "Save settings?",
|
|
270
|
+
"Settings saved {path}": "Settings saved {path}",
|
|
271
|
+
"Settings updated {path}": "Settings updated {path}",
|
|
272
|
+
"Doctor": "Doctor",
|
|
273
|
+
"Check": "Check",
|
|
274
|
+
"Status": "Status",
|
|
275
|
+
"Detail": "Detail",
|
|
276
|
+
"History": "History",
|
|
277
|
+
"Profiles": "Profiles",
|
|
278
|
+
"Profile saved {name}": "Profile saved {name}",
|
|
279
|
+
"Profile deleted {name}": "Profile deleted {name}",
|
|
280
|
+
"Deleted {count} package(s)": "Deleted {count} package(s)",
|
|
281
|
+
"Rekey Summary": "Rekey Summary",
|
|
282
|
+
"Packages": "Packages",
|
|
283
|
+
"New key": "New key",
|
|
284
|
+
"Backup key": "Backup key",
|
|
285
|
+
"'x' marca los grupos recomendados por defecto para esta plataforma/shell. No significa que ya vayan dentro de ningun bundle.": "'x' marks the default recommended groups for this platform/shell. It does not mean they are already in any bundle.",
|
|
286
|
+
"Leaving Peridot UI.": "Leaving Peridot UI.",
|
|
287
|
+
"Press enter to return to the command center": "Press enter to return to the command center",
|
|
288
|
+
"Empaqueta, inspecciona y aplica bundles de configuracion .peridot": "Pack, inspect and apply .peridot configuration bundles",
|
|
289
|
+
"Crea un paquete .peridot": "Create a .peridot package",
|
|
290
|
+
"Muestra la ficha de un paquete": "Show a package card",
|
|
291
|
+
"Aplica un paquete .peridot": "Apply a .peridot package",
|
|
292
|
+
"Compara un bundle con un directorio destino": "Compare a bundle with a target directory",
|
|
293
|
+
"Verifica integridad del bundle": "Verify bundle integrity",
|
|
294
|
+
"Diagnostico del entorno local": "Run local environment diagnostics",
|
|
295
|
+
"Exporta una ficha CLI-friendly del bundle": "Export a CLI-friendly bundle card",
|
|
296
|
+
"Fusiona varios bundles en uno": "Merge several bundles into one",
|
|
297
|
+
"Extrae un subset de un bundle en otro bundle": "Extract a subset of a bundle into another bundle",
|
|
298
|
+
"Lista snapshots historicos de un bundle": "List historical snapshots for a bundle",
|
|
299
|
+
"Imprime el manifest de un paquete": "Print a package manifest",
|
|
300
|
+
"Elimina paquetes .peridot": "Delete .peridot packages",
|
|
301
|
+
"Genera una nueva clave y migra paquetes existentes": "Generate a new key and migrate existing packages",
|
|
302
|
+
"Lista grupos clasificados de configuracion detectables": "List detectable categorized configuration groups",
|
|
303
|
+
"Gestiona perfiles reutilizables": "Manage reusable profiles",
|
|
304
|
+
"Gestiona defaults persistentes de Peridot": "Manage persistent Peridot defaults",
|
|
305
|
+
"Genera o muestra la clave activa": "Generate or show the active key",
|
|
306
|
+
"Lanza el command center visual": "Launch the visual command center",
|
|
307
|
+
"Alias de pack": "Alias for pack",
|
|
308
|
+
"Alias de apply": "Alias for apply",
|
|
309
|
+
"Ruta de la clave AES-GCM (por defecto: {path})": "AES-GCM key path (default: {path})",
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
console = Console()
|
|
314
|
+
PRESET_LIBRARY = {
|
|
315
|
+
"macos-fish": {
|
|
316
|
+
"description": "macOS + fish dotfiles",
|
|
317
|
+
"platform": "macos",
|
|
318
|
+
"shell": "fish",
|
|
319
|
+
"tags": ["dotfiles", "fish", "macos"],
|
|
320
|
+
"paths": [
|
|
321
|
+
"~/.config/fish",
|
|
322
|
+
"~/.gitconfig",
|
|
323
|
+
"~/.ssh",
|
|
324
|
+
"~/.tool-versions",
|
|
325
|
+
],
|
|
326
|
+
},
|
|
327
|
+
"macos-zsh": {
|
|
328
|
+
"description": "macOS + zsh dotfiles",
|
|
329
|
+
"platform": "macos",
|
|
330
|
+
"shell": "zsh",
|
|
331
|
+
"tags": ["dotfiles", "zsh", "macos"],
|
|
332
|
+
"paths": [
|
|
333
|
+
"~/.zshrc",
|
|
334
|
+
"~/.zprofile",
|
|
335
|
+
"~/.gitconfig",
|
|
336
|
+
"~/.ssh",
|
|
337
|
+
],
|
|
338
|
+
},
|
|
339
|
+
"linux-zsh": {
|
|
340
|
+
"description": "Linux + zsh dotfiles",
|
|
341
|
+
"platform": "linux",
|
|
342
|
+
"shell": "zsh",
|
|
343
|
+
"tags": ["dotfiles", "zsh", "linux"],
|
|
344
|
+
"paths": [
|
|
345
|
+
"~/.zshrc",
|
|
346
|
+
"~/.zprofile",
|
|
347
|
+
"~/.config",
|
|
348
|
+
"~/.gitconfig",
|
|
349
|
+
],
|
|
350
|
+
},
|
|
351
|
+
"linux-bash": {
|
|
352
|
+
"description": "Linux + bash dotfiles",
|
|
353
|
+
"platform": "linux",
|
|
354
|
+
"shell": "bash",
|
|
355
|
+
"tags": ["dotfiles", "bash", "linux"],
|
|
356
|
+
"paths": [
|
|
357
|
+
"~/.bashrc",
|
|
358
|
+
"~/.bash_profile",
|
|
359
|
+
"~/.config",
|
|
360
|
+
"~/.gitconfig",
|
|
361
|
+
],
|
|
362
|
+
},
|
|
363
|
+
"windows-powershell": {
|
|
364
|
+
"description": "Windows + PowerShell dotfiles",
|
|
365
|
+
"platform": "windows",
|
|
366
|
+
"shell": "powershell",
|
|
367
|
+
"tags": ["dotfiles", "powershell", "windows"],
|
|
368
|
+
"paths": [
|
|
369
|
+
"~/.gitconfig",
|
|
370
|
+
"~/.wslconfig",
|
|
371
|
+
"~/Documents/PowerShell",
|
|
372
|
+
"~/AppData/Local/Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState",
|
|
373
|
+
],
|
|
374
|
+
},
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
def utc_now() -> str:
|
|
379
|
+
return datetime.now(timezone.utc).isoformat()
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
def set_current_language(language: str) -> None:
|
|
383
|
+
global CURRENT_LANGUAGE
|
|
384
|
+
CURRENT_LANGUAGE = sanitize_language(language)
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
def detect_runtime_language() -> str:
|
|
388
|
+
env_language = os.environ.get("PERIDOT_LANG")
|
|
389
|
+
if env_language:
|
|
390
|
+
return sanitize_language(env_language)
|
|
391
|
+
try:
|
|
392
|
+
settings_path = DEFAULT_SETTINGS_STORE
|
|
393
|
+
if settings_path.exists():
|
|
394
|
+
raw = json.loads(settings_path.read_text())
|
|
395
|
+
if isinstance(raw, dict):
|
|
396
|
+
return sanitize_language(raw.get("language"))
|
|
397
|
+
except Exception:
|
|
398
|
+
pass
|
|
399
|
+
return DEFAULT_SETTINGS["language"]
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
def tr(text: str) -> str:
|
|
403
|
+
if CURRENT_LANGUAGE == "en":
|
|
404
|
+
return TRANSLATIONS["en"].get(text, text)
|
|
405
|
+
return text
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
def trf(text: str, **kwargs) -> str:
|
|
409
|
+
return tr(text).format(**kwargs)
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def localize_parser(parser: argparse.ArgumentParser) -> None:
|
|
413
|
+
parser.description = tr(parser.description) if parser.description else parser.description
|
|
414
|
+
for action in parser._actions:
|
|
415
|
+
if getattr(action, "help", None):
|
|
416
|
+
action.help = tr(action.help)
|
|
417
|
+
if getattr(action, "description", None):
|
|
418
|
+
action.description = tr(action.description)
|
|
419
|
+
for pseudo_action in getattr(action, "_choices_actions", []):
|
|
420
|
+
if getattr(pseudo_action, "help", None):
|
|
421
|
+
pseudo_action.help = tr(pseudo_action.help)
|
|
422
|
+
subparser_map = getattr(action, "choices", None)
|
|
423
|
+
if isinstance(subparser_map, dict):
|
|
424
|
+
for subparser in subparser_map.values():
|
|
425
|
+
localize_parser(subparser)
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
def die(message: str) -> None:
|
|
429
|
+
console.print(f"[bold red]{tr('Error')}:[/bold red] {message}", style="red")
|
|
430
|
+
raise SystemExit(1)
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
def normalize_os_name(value: str | None = None) -> str:
|
|
434
|
+
raw = (value or platform.system()).strip().lower()
|
|
435
|
+
mapping = {
|
|
436
|
+
"darwin": "macos",
|
|
437
|
+
"mac": "macos",
|
|
438
|
+
"macos": "macos",
|
|
439
|
+
"linux": "linux",
|
|
440
|
+
"windows": "windows",
|
|
441
|
+
"win32": "windows",
|
|
442
|
+
"msys": "windows",
|
|
443
|
+
"cygwin": "windows",
|
|
444
|
+
}
|
|
445
|
+
return mapping.get(raw, raw or "unknown")
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
def sanitize_compression_level(value: object) -> int:
|
|
449
|
+
try:
|
|
450
|
+
level = int(value)
|
|
451
|
+
except (TypeError, ValueError):
|
|
452
|
+
level = DEFAULT_COMPRESSION_LEVEL
|
|
453
|
+
return max(0, min(9, level))
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def max_reasonable_jobs(cpu_count: int | None = None) -> int:
|
|
457
|
+
"""Return a safe upper bound for parallel jobs.
|
|
458
|
+
|
|
459
|
+
We keep it tied to available CPU to avoid spawning an excessive amount of
|
|
460
|
+
processes/threads on small machines, while still allowing high-core hosts
|
|
461
|
+
to take advantage of their capacity.
|
|
462
|
+
"""
|
|
463
|
+
|
|
464
|
+
cpu = cpu_count if cpu_count is not None else (os.cpu_count() or 2)
|
|
465
|
+
try:
|
|
466
|
+
cpu_int = int(cpu)
|
|
467
|
+
except (TypeError, ValueError):
|
|
468
|
+
cpu_int = 2
|
|
469
|
+
cpu_int = max(1, cpu_int)
|
|
470
|
+
return max(1, min(64, cpu_int * 2))
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def sanitize_jobs(value: object) -> int:
|
|
474
|
+
try:
|
|
475
|
+
jobs = int(value)
|
|
476
|
+
except (TypeError, ValueError):
|
|
477
|
+
jobs = DEFAULT_JOBS
|
|
478
|
+
return max(1, min(max_reasonable_jobs(), jobs))
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def sanitize_language(value: object) -> str:
|
|
482
|
+
"""Normalize language values.
|
|
483
|
+
|
|
484
|
+
Accepts exact codes ("es", "en") and common locale variants such as
|
|
485
|
+
"es-ES", "en_US" or "EN-us" by reducing them to the base language.
|
|
486
|
+
"""
|
|
487
|
+
|
|
488
|
+
raw = str(value or DEFAULT_SETTINGS["language"]).strip().lower()
|
|
489
|
+
# Strip common locale suffixes such as ".UTF-8" or "@euro".
|
|
490
|
+
raw = raw.split(".", 1)[0].split("@", 1)[0].replace("_", "-")
|
|
491
|
+
if raw in {"es", "en"}:
|
|
492
|
+
return raw
|
|
493
|
+
|
|
494
|
+
base = raw.split("-", 1)[0] if raw else ""
|
|
495
|
+
if base in {"es", "en"}:
|
|
496
|
+
return base
|
|
497
|
+
|
|
498
|
+
return DEFAULT_SETTINGS["language"]
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def slugify(value: str) -> str:
|
|
502
|
+
cleaned = []
|
|
503
|
+
for char in value.lower():
|
|
504
|
+
if char.isalnum():
|
|
505
|
+
cleaned.append(char)
|
|
506
|
+
elif char in {" ", "-", "_", "."}:
|
|
507
|
+
cleaned.append("-")
|
|
508
|
+
slug = "".join(cleaned).strip("-")
|
|
509
|
+
while "--" in slug:
|
|
510
|
+
slug = slug.replace("--", "-")
|
|
511
|
+
return slug or "bundle"
|
|
512
|
+
|
|
513
|
+
|
|
514
|
+
def format_bytes(size: int) -> str:
|
|
515
|
+
units = ["B", "KB", "MB", "GB", "TB"]
|
|
516
|
+
value = float(size)
|
|
517
|
+
for unit in units:
|
|
518
|
+
if value < 1024 or unit == units[-1]:
|
|
519
|
+
return f"{value:.1f} {unit}" if unit != "B" else f"{int(value)} B"
|
|
520
|
+
value /= 1024
|
|
521
|
+
return f"{size} B"
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
def active_compression_codec() -> str:
|
|
525
|
+
return PRIMARY_COMPRESSION if zstd is not None else FALLBACK_COMPRESSION
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
def compression_profile_name(level: int) -> str:
|
|
529
|
+
if level <= 2:
|
|
530
|
+
return "fast"
|
|
531
|
+
if level <= 6:
|
|
532
|
+
return "balanced"
|
|
533
|
+
return "small"
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
def compression_profile_detail(level: int) -> str:
|
|
537
|
+
if level <= 2:
|
|
538
|
+
return "menos compresion, mas velocidad"
|
|
539
|
+
if level <= 6:
|
|
540
|
+
return "equilibrado entre tamano y velocidad"
|
|
541
|
+
return "mas compresion, mas lentitud"
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
def render_level_bar(level: int, maximum: int = 9, width: int = 10) -> str:
|
|
545
|
+
filled = round((level / maximum) * width) if maximum else width
|
|
546
|
+
filled = max(0, min(width, filled))
|
|
547
|
+
return f"[{'#' * filled}{'-' * (width - filled)}]"
|
|
548
|
+
|
|
549
|
+
|
|
550
|
+
def render_compression_setting(level: int) -> Panel:
|
|
551
|
+
safe_level = sanitize_compression_level(level)
|
|
552
|
+
profile = compression_profile_name(safe_level)
|
|
553
|
+
detail = tr(compression_profile_detail(safe_level))
|
|
554
|
+
codec = active_compression_codec()
|
|
555
|
+
text = Table.grid(padding=(0, 1))
|
|
556
|
+
text.add_row(tr("Level"), f"{safe_level}/9 {render_level_bar(safe_level)}")
|
|
557
|
+
text.add_row(tr("Codec"), codec)
|
|
558
|
+
text.add_row(tr("Mode"), profile)
|
|
559
|
+
text.add_row(tr("Tradeoff"), detail)
|
|
560
|
+
text.add_row(tr("Rule"), tr("0 = mas rapido y mas grande | 9 = mas lento y mas pequeno"))
|
|
561
|
+
return Panel(text, title=tr("Compression"), border_style="cyan")
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
def total_memory_bytes() -> int | None:
|
|
565
|
+
try:
|
|
566
|
+
pages = os.sysconf("SC_PHYS_PAGES")
|
|
567
|
+
page_size = os.sysconf("SC_PAGE_SIZE")
|
|
568
|
+
if isinstance(pages, int) and isinstance(page_size, int) and pages > 0 and page_size > 0:
|
|
569
|
+
return pages * page_size
|
|
570
|
+
except (AttributeError, OSError, ValueError):
|
|
571
|
+
pass
|
|
572
|
+
|
|
573
|
+
if normalize_os_name() == "macos":
|
|
574
|
+
try:
|
|
575
|
+
result = subprocess.run(["sysctl", "-n", "hw.memsize"], capture_output=True, text=True, check=True)
|
|
576
|
+
return int(result.stdout.strip())
|
|
577
|
+
except (OSError, ValueError, subprocess.SubprocessError):
|
|
578
|
+
return None
|
|
579
|
+
return None
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def available_memory_bytes() -> int | None:
|
|
583
|
+
os_name = normalize_os_name()
|
|
584
|
+
if os_name == "linux":
|
|
585
|
+
try:
|
|
586
|
+
for line in Path("/proc/meminfo").read_text().splitlines():
|
|
587
|
+
if line.startswith("MemAvailable:"):
|
|
588
|
+
return int(line.split()[1]) * 1024
|
|
589
|
+
except (OSError, ValueError, IndexError):
|
|
590
|
+
return None
|
|
591
|
+
|
|
592
|
+
if os_name == "macos":
|
|
593
|
+
try:
|
|
594
|
+
result = subprocess.run(["vm_stat"], capture_output=True, text=True, check=True)
|
|
595
|
+
except (OSError, subprocess.SubprocessError):
|
|
596
|
+
return None
|
|
597
|
+
page_size = 4096
|
|
598
|
+
counters: dict[str, int] = {}
|
|
599
|
+
for line in result.stdout.splitlines():
|
|
600
|
+
if "page size of" in line and "bytes" in line:
|
|
601
|
+
try:
|
|
602
|
+
page_size = int(line.split("page size of", 1)[1].split("bytes", 1)[0].strip())
|
|
603
|
+
except ValueError:
|
|
604
|
+
page_size = 4096
|
|
605
|
+
continue
|
|
606
|
+
if ":" not in line:
|
|
607
|
+
continue
|
|
608
|
+
key, raw_value = line.split(":", 1)
|
|
609
|
+
digits = "".join(char for char in raw_value if char.isdigit())
|
|
610
|
+
if digits:
|
|
611
|
+
counters[key.strip()] = int(digits)
|
|
612
|
+
available_pages = counters.get("Pages free", 0) + counters.get("Pages speculative", 0) + counters.get("Pages inactive", 0)
|
|
613
|
+
if available_pages > 0:
|
|
614
|
+
return available_pages * page_size
|
|
615
|
+
return None
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
def memory_pressure_ratio() -> float | None:
|
|
619
|
+
total = total_memory_bytes()
|
|
620
|
+
available = available_memory_bytes()
|
|
621
|
+
if not total or not available or total <= 0:
|
|
622
|
+
return None
|
|
623
|
+
ratio = 1.0 - (available / total)
|
|
624
|
+
return max(0.0, min(1.0, ratio))
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
def estimate_pack_working_set(entries: list["FileEntry"]) -> int:
|
|
628
|
+
if not entries:
|
|
629
|
+
return 32 * 1024 * 1024
|
|
630
|
+
sizes = sorted((entry.size for entry in entries), reverse=True)
|
|
631
|
+
largest = sizes[0]
|
|
632
|
+
head = sizes[: min(4, len(sizes))]
|
|
633
|
+
average_head = sum(head) // len(head)
|
|
634
|
+
return max(24 * 1024 * 1024, int(max(largest * 1.35, average_head * 1.75)))
|
|
635
|
+
|
|
636
|
+
|
|
637
|
+
def safe_pack_jobs(entries: list["FileEntry"], requested_jobs: int) -> tuple[int, str]:
|
|
638
|
+
requested = sanitize_jobs(requested_jobs)
|
|
639
|
+
if not entries:
|
|
640
|
+
return requested, "no files"
|
|
641
|
+
available = available_memory_bytes()
|
|
642
|
+
if not available:
|
|
643
|
+
return requested, "memory unknown"
|
|
644
|
+
per_job_budget = estimate_pack_working_set(entries)
|
|
645
|
+
usable_budget = max(256 * 1024 * 1024, int(available * 0.5))
|
|
646
|
+
memory_based_jobs = max(1, usable_budget // per_job_budget)
|
|
647
|
+
clamped = max(1, min(requested, memory_based_jobs))
|
|
648
|
+
if requested >= 4 and clamped == 1 and available >= 1024 * 1024 * 1024:
|
|
649
|
+
clamped = 2
|
|
650
|
+
if clamped >= requested:
|
|
651
|
+
return requested, "memory ok"
|
|
652
|
+
return clamped, f"memory budget {format_bytes(usable_budget)} / job ~{format_bytes(per_job_budget)}"
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
def adaptive_inflight_label(pressure: float | None) -> str:
|
|
656
|
+
if pressure is None:
|
|
657
|
+
return "mem unknown"
|
|
658
|
+
if pressure >= 0.92:
|
|
659
|
+
return "mem critical"
|
|
660
|
+
if pressure >= 0.84:
|
|
661
|
+
return "mem high"
|
|
662
|
+
if pressure >= 0.72:
|
|
663
|
+
return "mem warm"
|
|
664
|
+
if pressure >= 0.60:
|
|
665
|
+
return "mem normal"
|
|
666
|
+
return "mem cool"
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
def adaptive_next_inflight_limit(current_limit: int, max_jobs: int) -> tuple[int, str]:
|
|
670
|
+
pressure = memory_pressure_ratio()
|
|
671
|
+
label = adaptive_inflight_label(pressure)
|
|
672
|
+
current = max(1, min(max_jobs, current_limit))
|
|
673
|
+
if pressure is None:
|
|
674
|
+
return current, label
|
|
675
|
+
if pressure >= 0.90:
|
|
676
|
+
return max(1, current // 2), label
|
|
677
|
+
if pressure >= 0.82:
|
|
678
|
+
return max(1, current - 1), label
|
|
679
|
+
if pressure >= 0.72:
|
|
680
|
+
return current, label
|
|
681
|
+
if pressure < 0.60 and current < max_jobs:
|
|
682
|
+
return min(max_jobs, current + 2), label
|
|
683
|
+
if current < max_jobs:
|
|
684
|
+
return min(max_jobs, current + 1), label
|
|
685
|
+
return current, label
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
def create_pack_executor(requested_jobs: int):
|
|
689
|
+
if requested_jobs <= 1:
|
|
690
|
+
return ThreadPoolExecutor(max_workers=1), "threads"
|
|
691
|
+
try:
|
|
692
|
+
return ProcessPoolExecutor(max_workers=requested_jobs), "processes"
|
|
693
|
+
except (OSError, PermissionError):
|
|
694
|
+
return ThreadPoolExecutor(max_workers=requested_jobs), "threads-fallback"
|
|
695
|
+
|
|
696
|
+
|
|
697
|
+
def detect_shell() -> str:
|
|
698
|
+
shell = os.environ.get("SHELL") or os.environ.get("COMSPEC") or ""
|
|
699
|
+
return Path(shell).name.lower()
|
|
700
|
+
|
|
701
|
+
|
|
702
|
+
def ensure_parent(path: Path) -> None:
|
|
703
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
704
|
+
|
|
705
|
+
|
|
706
|
+
def write_key(key_path: Path, key: bytes) -> None:
|
|
707
|
+
ensure_parent(key_path)
|
|
708
|
+
key_path.write_bytes(key)
|
|
709
|
+
try:
|
|
710
|
+
key_path.chmod(0o600)
|
|
711
|
+
except OSError:
|
|
712
|
+
pass
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
def load_profiles(profile_path: Path = DEFAULT_PROFILE_STORE) -> dict:
|
|
716
|
+
if not profile_path.exists():
|
|
717
|
+
return {}
|
|
718
|
+
try:
|
|
719
|
+
return json.loads(profile_path.read_text())
|
|
720
|
+
except json.JSONDecodeError as exc:
|
|
721
|
+
die(f"El store de perfiles es invalido: {exc}")
|
|
722
|
+
|
|
723
|
+
|
|
724
|
+
def save_profiles(data: dict, profile_path: Path = DEFAULT_PROFILE_STORE) -> None:
|
|
725
|
+
ensure_parent(profile_path)
|
|
726
|
+
profile_path.write_text(json.dumps(data, indent=2, sort_keys=True) + "\n")
|
|
727
|
+
|
|
728
|
+
|
|
729
|
+
def load_settings(settings_path: Path = DEFAULT_SETTINGS_STORE) -> dict:
|
|
730
|
+
data = dict(DEFAULT_SETTINGS)
|
|
731
|
+
if not settings_path.exists():
|
|
732
|
+
return data
|
|
733
|
+
try:
|
|
734
|
+
raw = json.loads(settings_path.read_text())
|
|
735
|
+
except json.JSONDecodeError as exc:
|
|
736
|
+
die(f"El store de settings es invalido: {exc}")
|
|
737
|
+
if not isinstance(raw, dict):
|
|
738
|
+
die("El store de settings debe ser un objeto JSON.")
|
|
739
|
+
data.update(raw)
|
|
740
|
+
data["compression_level"] = sanitize_compression_level(data.get("compression_level"))
|
|
741
|
+
data["jobs"] = sanitize_jobs(data.get("jobs"))
|
|
742
|
+
data["language"] = sanitize_language(data.get("language"))
|
|
743
|
+
return data
|
|
744
|
+
|
|
745
|
+
|
|
746
|
+
def save_settings(data: dict, settings_path: Path = DEFAULT_SETTINGS_STORE) -> None:
|
|
747
|
+
ensure_parent(settings_path)
|
|
748
|
+
merged = dict(DEFAULT_SETTINGS)
|
|
749
|
+
merged.update(data)
|
|
750
|
+
merged["compression_level"] = sanitize_compression_level(merged.get("compression_level"))
|
|
751
|
+
merged["jobs"] = sanitize_jobs(merged.get("jobs"))
|
|
752
|
+
merged.pop("encryption", None)
|
|
753
|
+
merged["language"] = sanitize_language(merged.get("language"))
|
|
754
|
+
settings_path.write_text(json.dumps(merged, indent=2, sort_keys=True) + "\n")
|
|
755
|
+
|
|
756
|
+
|
|
757
|
+
def save_history_snapshot(package_path: Path, history_dir: Path = DEFAULT_HISTORY_DIR) -> Path | None:
|
|
758
|
+
if not package_path.exists():
|
|
759
|
+
return None
|
|
760
|
+
bundle_name = package_path.stem
|
|
761
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
|
762
|
+
target_dir = history_dir / bundle_name
|
|
763
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
764
|
+
snapshot_path = target_dir / f"{timestamp}.peridot"
|
|
765
|
+
shutil.copy2(package_path, snapshot_path)
|
|
766
|
+
return snapshot_path
|
|
767
|
+
|
|
768
|
+
|
|
769
|
+
def fingerprint_key(key: bytes) -> str:
|
|
770
|
+
return hashlib.sha256(key).hexdigest()[:16]
|
|
771
|
+
|
|
772
|
+
|
|
773
|
+
def decode_aesgcm_key_bytes(raw: bytes | str) -> bytes | None:
|
|
774
|
+
"""Decode a 32-byte AES-GCM key from raw bytes, text, or base64url.
|
|
775
|
+
|
|
776
|
+
Accepts:
|
|
777
|
+
- Raw 32 bytes
|
|
778
|
+
- base64url-encoded bytes (with or without padding, with optional whitespace/newlines)
|
|
779
|
+
- A string containing either of the above (UTF-8)
|
|
780
|
+
"""
|
|
781
|
+
|
|
782
|
+
if isinstance(raw, str):
|
|
783
|
+
raw_bytes = raw.encode("utf-8")
|
|
784
|
+
else:
|
|
785
|
+
raw_bytes = raw
|
|
786
|
+
|
|
787
|
+
if len(raw_bytes) == 32:
|
|
788
|
+
return raw_bytes
|
|
789
|
+
|
|
790
|
+
cleaned = b"".join(raw_bytes.split())
|
|
791
|
+
if not cleaned:
|
|
792
|
+
return None
|
|
793
|
+
|
|
794
|
+
# base64 decoders expect padding; allow unpadded base64url keys.
|
|
795
|
+
missing_padding = (-len(cleaned)) % 4
|
|
796
|
+
if missing_padding:
|
|
797
|
+
cleaned += b"=" * missing_padding
|
|
798
|
+
|
|
799
|
+
try:
|
|
800
|
+
decoded = base64.urlsafe_b64decode(cleaned)
|
|
801
|
+
except Exception:
|
|
802
|
+
return None
|
|
803
|
+
|
|
804
|
+
return decoded if len(decoded) == 32 else None
|
|
805
|
+
|
|
806
|
+
|
|
807
|
+
def load_key(key_path: Path, create: bool = False) -> bytes:
|
|
808
|
+
if key_path.exists():
|
|
809
|
+
key = key_path.read_bytes()
|
|
810
|
+
decoded = decode_aesgcm_key_bytes(key)
|
|
811
|
+
if decoded is not None:
|
|
812
|
+
if decoded != key:
|
|
813
|
+
try:
|
|
814
|
+
write_key(key_path, decoded)
|
|
815
|
+
except OSError:
|
|
816
|
+
pass
|
|
817
|
+
return decoded
|
|
818
|
+
die(f"Clave invalida en {key_path}: se esperaban 32 bytes para AES-GCM.")
|
|
819
|
+
if not create:
|
|
820
|
+
die(f"No se encontro la clave en {key_path}")
|
|
821
|
+
key = AESGCM.generate_key(bit_length=256)
|
|
822
|
+
write_key(key_path, key)
|
|
823
|
+
return key
|
|
824
|
+
|
|
825
|
+
|
|
826
|
+
def manifest_from_zip(package_path: Path) -> dict:
|
|
827
|
+
try:
|
|
828
|
+
with ZipFile(package_path) as zf:
|
|
829
|
+
with zf.open("manifest.json") as handle:
|
|
830
|
+
manifest = json.load(handle)
|
|
831
|
+
except FileNotFoundError:
|
|
832
|
+
die(f"No existe el paquete {package_path}")
|
|
833
|
+
except KeyError:
|
|
834
|
+
die(f"{package_path} no contiene manifest.json")
|
|
835
|
+
except OSError as exc:
|
|
836
|
+
die(f"No se pudo abrir {package_path}: {exc}")
|
|
837
|
+
|
|
838
|
+
if manifest.get("package_version") != PACKAGE_VERSION:
|
|
839
|
+
die(
|
|
840
|
+
f"Version de paquete no soportada: {manifest.get('package_version')}. "
|
|
841
|
+
f"Esperada: {PACKAGE_VERSION}"
|
|
842
|
+
)
|
|
843
|
+
return manifest
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
@dataclass
|
|
847
|
+
class FileEntry:
|
|
848
|
+
source: Path
|
|
849
|
+
relative_path: str
|
|
850
|
+
size: int
|
|
851
|
+
mode: int
|
|
852
|
+
|
|
853
|
+
|
|
854
|
+
@dataclass(frozen=True)
|
|
855
|
+
class ConfigGroup:
|
|
856
|
+
key: str
|
|
857
|
+
category: str
|
|
858
|
+
label: str
|
|
859
|
+
description: str
|
|
860
|
+
paths: tuple[str, ...]
|
|
861
|
+
default: bool = False
|
|
862
|
+
|
|
863
|
+
|
|
864
|
+
def config_groups_for_os(os_name: str) -> list[ConfigGroup]:
|
|
865
|
+
shared = [
|
|
866
|
+
ConfigGroup(
|
|
867
|
+
"git-ssh",
|
|
868
|
+
"Core",
|
|
869
|
+
"Git + SSH",
|
|
870
|
+
"Git identity, global config and SSH keys/config",
|
|
871
|
+
("~/.gitconfig", "~/.gitignore", "~/.gitignore_global", "~/.ssh"),
|
|
872
|
+
default=True,
|
|
873
|
+
),
|
|
874
|
+
ConfigGroup(
|
|
875
|
+
"terminal-tools",
|
|
876
|
+
"Terminal",
|
|
877
|
+
"Terminal tools",
|
|
878
|
+
"tmux, wget and generic terminal helpers",
|
|
879
|
+
("~/.tmux.conf", "~/.config/kitty", "~/.config/wezterm"),
|
|
880
|
+
),
|
|
881
|
+
ConfigGroup(
|
|
882
|
+
"editors-vscode",
|
|
883
|
+
"Editors",
|
|
884
|
+
"VS Code",
|
|
885
|
+
"VS Code and compatible editor settings",
|
|
886
|
+
("~/.vscode",),
|
|
887
|
+
),
|
|
888
|
+
ConfigGroup(
|
|
889
|
+
"editors-zed",
|
|
890
|
+
"Editors",
|
|
891
|
+
"Zed",
|
|
892
|
+
"Zed editor settings",
|
|
893
|
+
("~/.config/zed",),
|
|
894
|
+
),
|
|
895
|
+
ConfigGroup(
|
|
896
|
+
"dev-node",
|
|
897
|
+
"Development",
|
|
898
|
+
"Node tooling",
|
|
899
|
+
"npm, yarn and JS tooling config",
|
|
900
|
+
("~/.npmrc", "~/.yarnrc", "~/.config/yarn"),
|
|
901
|
+
),
|
|
902
|
+
ConfigGroup(
|
|
903
|
+
"dev-rust",
|
|
904
|
+
"Development",
|
|
905
|
+
"Rust tooling",
|
|
906
|
+
"Cargo config and rustup-related settings",
|
|
907
|
+
("~/.cargo/config.toml", "~/.cargo/credentials.toml"),
|
|
908
|
+
),
|
|
909
|
+
ConfigGroup(
|
|
910
|
+
"dev-asdf",
|
|
911
|
+
"Development",
|
|
912
|
+
"asdf / versions",
|
|
913
|
+
"Version manager files such as .tool-versions",
|
|
914
|
+
("~/.tool-versions",),
|
|
915
|
+
),
|
|
916
|
+
]
|
|
917
|
+
|
|
918
|
+
if os_name == "macos":
|
|
919
|
+
return shared + [
|
|
920
|
+
ConfigGroup(
|
|
921
|
+
"shell-fish",
|
|
922
|
+
"Shells",
|
|
923
|
+
"Fish shell",
|
|
924
|
+
"fish config and functions",
|
|
925
|
+
("~/.config/fish", "~/.local/share/fish"),
|
|
926
|
+
default=detect_shell() == "fish",
|
|
927
|
+
),
|
|
928
|
+
ConfigGroup(
|
|
929
|
+
"shell-zsh",
|
|
930
|
+
"Shells",
|
|
931
|
+
"Zsh shell",
|
|
932
|
+
"zsh profile, env and rc files",
|
|
933
|
+
("~/.zshrc", "~/.zprofile", "~/.zshenv"),
|
|
934
|
+
default=detect_shell() == "zsh",
|
|
935
|
+
),
|
|
936
|
+
ConfigGroup(
|
|
937
|
+
"shell-bash",
|
|
938
|
+
"Shells",
|
|
939
|
+
"Bash shell",
|
|
940
|
+
"bash profile and rc files",
|
|
941
|
+
("~/.bashrc", "~/.bash_profile", "~/.profile"),
|
|
942
|
+
),
|
|
943
|
+
ConfigGroup(
|
|
944
|
+
"apps-raycast",
|
|
945
|
+
"Apps",
|
|
946
|
+
"Raycast",
|
|
947
|
+
"Raycast local config stored under .config",
|
|
948
|
+
("~/.config/raycast",),
|
|
949
|
+
),
|
|
950
|
+
ConfigGroup(
|
|
951
|
+
"apps-macos-code",
|
|
952
|
+
"Apps",
|
|
953
|
+
"VS Code User",
|
|
954
|
+
"User settings stored in Library/Application Support",
|
|
955
|
+
("~/Library/Application Support/Code/User",),
|
|
956
|
+
),
|
|
957
|
+
]
|
|
958
|
+
|
|
959
|
+
if os_name == "windows":
|
|
960
|
+
return shared + [
|
|
961
|
+
ConfigGroup(
|
|
962
|
+
"shell-powershell",
|
|
963
|
+
"Shells",
|
|
964
|
+
"PowerShell",
|
|
965
|
+
"PowerShell profiles and aliases from Documents/PowerShell",
|
|
966
|
+
("~/Documents/PowerShell", "~/OneDrive/Documents/PowerShell", "~/.config/powershell"),
|
|
967
|
+
default=True,
|
|
968
|
+
),
|
|
969
|
+
ConfigGroup(
|
|
970
|
+
"shell-wsl",
|
|
971
|
+
"Shells",
|
|
972
|
+
"WSL",
|
|
973
|
+
"WSL integration config",
|
|
974
|
+
("~/.wslconfig",),
|
|
975
|
+
),
|
|
976
|
+
ConfigGroup(
|
|
977
|
+
"apps-terminal",
|
|
978
|
+
"Apps",
|
|
979
|
+
"Windows Terminal",
|
|
980
|
+
"Windows Terminal local settings",
|
|
981
|
+
("~/AppData/Local/Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState",),
|
|
982
|
+
),
|
|
983
|
+
ConfigGroup(
|
|
984
|
+
"apps-vscode-user",
|
|
985
|
+
"Apps",
|
|
986
|
+
"VS Code User",
|
|
987
|
+
"User settings in AppData/Roaming",
|
|
988
|
+
("~/AppData/Roaming/Code/User",),
|
|
989
|
+
),
|
|
990
|
+
]
|
|
991
|
+
|
|
992
|
+
return shared + [
|
|
993
|
+
ConfigGroup(
|
|
994
|
+
"shell-zsh",
|
|
995
|
+
"Shells",
|
|
996
|
+
"Zsh shell",
|
|
997
|
+
"zsh profile, env and rc files",
|
|
998
|
+
("~/.zshrc", "~/.zprofile", "~/.zshenv"),
|
|
999
|
+
default=detect_shell() == "zsh",
|
|
1000
|
+
),
|
|
1001
|
+
ConfigGroup(
|
|
1002
|
+
"shell-bash",
|
|
1003
|
+
"Shells",
|
|
1004
|
+
"Bash shell",
|
|
1005
|
+
"bash profile and rc files",
|
|
1006
|
+
("~/.bashrc", "~/.bash_profile", "~/.profile"),
|
|
1007
|
+
default=detect_shell() == "bash",
|
|
1008
|
+
),
|
|
1009
|
+
ConfigGroup(
|
|
1010
|
+
"shell-fish",
|
|
1011
|
+
"Shells",
|
|
1012
|
+
"Fish shell",
|
|
1013
|
+
"fish config and functions",
|
|
1014
|
+
("~/.config/fish", "~/.local/share/fish"),
|
|
1015
|
+
default=detect_shell() == "fish",
|
|
1016
|
+
),
|
|
1017
|
+
ConfigGroup(
|
|
1018
|
+
"apps-vscode-user",
|
|
1019
|
+
"Apps",
|
|
1020
|
+
"VS Code User",
|
|
1021
|
+
"User settings in ~/.config/Code/User",
|
|
1022
|
+
("~/.config/Code/User", "~/.config/VSCodium/User"),
|
|
1023
|
+
),
|
|
1024
|
+
ConfigGroup(
|
|
1025
|
+
"apps-neovim",
|
|
1026
|
+
"Apps",
|
|
1027
|
+
"Neovim",
|
|
1028
|
+
"Neovim and Vim config",
|
|
1029
|
+
("~/.config/nvim", "~/.vimrc"),
|
|
1030
|
+
),
|
|
1031
|
+
]
|
|
1032
|
+
|
|
1033
|
+
|
|
1034
|
+
def existing_paths(path_specs: tuple[str, ...] | list[str]) -> list[Path]:
|
|
1035
|
+
resolved: list[Path] = []
|
|
1036
|
+
seen: set[Path] = set()
|
|
1037
|
+
for path_spec in path_specs:
|
|
1038
|
+
path = Path(path_spec).expanduser()
|
|
1039
|
+
if path.exists() and path not in seen:
|
|
1040
|
+
seen.add(path)
|
|
1041
|
+
resolved.append(path)
|
|
1042
|
+
return resolved
|
|
1043
|
+
|
|
1044
|
+
|
|
1045
|
+
def default_export_roots() -> list[Path]:
|
|
1046
|
+
groups = config_groups_for_os(normalize_os_name())
|
|
1047
|
+
selected: list[Path] = []
|
|
1048
|
+
seen: set[Path] = set()
|
|
1049
|
+
for group in groups:
|
|
1050
|
+
if not group.default:
|
|
1051
|
+
continue
|
|
1052
|
+
for path in existing_paths(group.paths):
|
|
1053
|
+
if path not in seen:
|
|
1054
|
+
seen.add(path)
|
|
1055
|
+
selected.append(path)
|
|
1056
|
+
return selected
|
|
1057
|
+
|
|
1058
|
+
|
|
1059
|
+
def get_recommended_preset() -> str:
|
|
1060
|
+
current_os = normalize_os_name()
|
|
1061
|
+
shell = detect_shell()
|
|
1062
|
+
candidates = [
|
|
1063
|
+
f"{current_os}-{shell}",
|
|
1064
|
+
f"{current_os}-powershell" if current_os == "windows" else "",
|
|
1065
|
+
f"{current_os}-zsh" if current_os in {"macos", "linux"} else "",
|
|
1066
|
+
f"{current_os}-bash" if current_os == "linux" else "",
|
|
1067
|
+
]
|
|
1068
|
+
for candidate in candidates:
|
|
1069
|
+
if candidate and candidate in PRESET_LIBRARY:
|
|
1070
|
+
return candidate
|
|
1071
|
+
return "macos-fish"
|
|
1072
|
+
|
|
1073
|
+
|
|
1074
|
+
def render_presets_table() -> None:
|
|
1075
|
+
table = Table(title=tr("Dot Presets"), header_style="bold cyan")
|
|
1076
|
+
table.add_column("Preset", style="green")
|
|
1077
|
+
table.add_column("Target", style="white")
|
|
1078
|
+
table.add_column("Paths", justify="right", style="magenta")
|
|
1079
|
+
table.add_column("Description", style="dim")
|
|
1080
|
+
for name, preset in PRESET_LIBRARY.items():
|
|
1081
|
+
table.add_row(
|
|
1082
|
+
name,
|
|
1083
|
+
f"{preset['platform']} / {preset['shell']}",
|
|
1084
|
+
str(len(preset["paths"])),
|
|
1085
|
+
preset["description"],
|
|
1086
|
+
)
|
|
1087
|
+
console.print(table)
|
|
1088
|
+
|
|
1089
|
+
|
|
1090
|
+
def apply_preset(args, preset_name: str, force_paths: bool = False) -> None:
|
|
1091
|
+
if preset_name == "custom":
|
|
1092
|
+
args.preset = "custom"
|
|
1093
|
+
return
|
|
1094
|
+
preset = PRESET_LIBRARY.get(preset_name)
|
|
1095
|
+
if not preset:
|
|
1096
|
+
die(f"Preset desconocido: {preset_name}")
|
|
1097
|
+
|
|
1098
|
+
args.preset = preset_name
|
|
1099
|
+
if not args.name:
|
|
1100
|
+
args.name = f"{platform.node() or 'my'}-{preset_name}"
|
|
1101
|
+
if not args.description:
|
|
1102
|
+
args.description = preset["description"]
|
|
1103
|
+
if not args.platform:
|
|
1104
|
+
args.platform = preset["platform"]
|
|
1105
|
+
if not args.shell:
|
|
1106
|
+
args.shell = preset["shell"]
|
|
1107
|
+
if not args.tags:
|
|
1108
|
+
args.tags = list(preset["tags"])
|
|
1109
|
+
if force_paths or not args.paths:
|
|
1110
|
+
args.paths = list(preset["paths"])
|
|
1111
|
+
|
|
1112
|
+
|
|
1113
|
+
def render_config_group_table(groups: list[ConfigGroup], selected_keys: set[str], marker_label: str = "Pick") -> None:
|
|
1114
|
+
table = Table(title=tr("Config Catalog"), header_style="bold cyan")
|
|
1115
|
+
table.add_column("#", justify="right", style="dim")
|
|
1116
|
+
table.add_column(marker_label, justify="center")
|
|
1117
|
+
table.add_column("Category", style="magenta")
|
|
1118
|
+
table.add_column("Group", style="green")
|
|
1119
|
+
table.add_column("Description", style="white")
|
|
1120
|
+
table.add_column("Found", justify="right", style="cyan")
|
|
1121
|
+
for index, group in enumerate(groups, start=1):
|
|
1122
|
+
marker = "x" if group.key in selected_keys else ""
|
|
1123
|
+
found = len(existing_paths(group.paths))
|
|
1124
|
+
table.add_row(str(index), marker, group.category, group.label, group.description, str(found))
|
|
1125
|
+
console.print(table)
|
|
1126
|
+
|
|
1127
|
+
|
|
1128
|
+
def checkbox_prompt(message: str, choices: list, instruction: str | None = None):
|
|
1129
|
+
if not QUESTIONARY_AVAILABLE or not sys.stdin.isatty():
|
|
1130
|
+
return None
|
|
1131
|
+
return questionary.checkbox(
|
|
1132
|
+
tr(message),
|
|
1133
|
+
choices=choices,
|
|
1134
|
+
instruction=tr(instruction or "Espacio para marcar, Enter para confirmar"),
|
|
1135
|
+
).ask()
|
|
1136
|
+
|
|
1137
|
+
|
|
1138
|
+
def checkbox_unavailable_reason() -> str | None:
|
|
1139
|
+
if not sys.stdin.isatty():
|
|
1140
|
+
return "no_tty"
|
|
1141
|
+
if not QUESTIONARY_AVAILABLE:
|
|
1142
|
+
return "missing_questionary"
|
|
1143
|
+
return None
|
|
1144
|
+
|
|
1145
|
+
|
|
1146
|
+
def explain_checkbox_unavailable() -> None:
|
|
1147
|
+
reason = checkbox_unavailable_reason()
|
|
1148
|
+
if reason == "no_tty":
|
|
1149
|
+
console.print(
|
|
1150
|
+
f"[yellow]{tr('Checkbox UI no disponible:')}[/yellow] {tr('esta sesion no tiene un TTY interactivo real.')}"
|
|
1151
|
+
)
|
|
1152
|
+
console.print(f"[dim]{tr('Ejecuta Peridot directamente en una terminal interactiva.')}[/dim]")
|
|
1153
|
+
elif reason == "missing_questionary":
|
|
1154
|
+
missing_questionary_text = "falta la dependencia 'questionary' en este Python."
|
|
1155
|
+
install_hint_text = "Usa el binario instalado con './install.sh' o ejecuta 'python3 -m pip install -r requirements.txt'."
|
|
1156
|
+
console.print(
|
|
1157
|
+
f"[yellow]{tr('Checkbox UI no disponible:')}[/yellow] {tr(missing_questionary_text)}"
|
|
1158
|
+
)
|
|
1159
|
+
console.print(
|
|
1160
|
+
f"[dim]{tr(install_hint_text)}[/dim]"
|
|
1161
|
+
)
|
|
1162
|
+
|
|
1163
|
+
|
|
1164
|
+
def interactive_checkbox_paths(paths: list[Path], preselected: list[Path] | None = None) -> list[Path] | None:
|
|
1165
|
+
if not paths:
|
|
1166
|
+
return []
|
|
1167
|
+
preselected_set = set(preselected or paths)
|
|
1168
|
+
choices = []
|
|
1169
|
+
for path in paths:
|
|
1170
|
+
label = str(path)
|
|
1171
|
+
if path.exists():
|
|
1172
|
+
suffix = "dir" if path.is_dir() else "file"
|
|
1173
|
+
label = f"{label} [{suffix}]"
|
|
1174
|
+
choices.append(Choice(title=label, value=path, checked=path in preselected_set))
|
|
1175
|
+
return checkbox_prompt("Selecciona rutas", choices)
|
|
1176
|
+
|
|
1177
|
+
|
|
1178
|
+
def build_path_catalog(os_name: str) -> list[tuple[Path, str]]:
|
|
1179
|
+
catalog: list[tuple[Path, str]] = []
|
|
1180
|
+
seen: set[Path] = set()
|
|
1181
|
+
for group in config_groups_for_os(os_name):
|
|
1182
|
+
for path in existing_paths(group.paths):
|
|
1183
|
+
if path in seen:
|
|
1184
|
+
continue
|
|
1185
|
+
seen.add(path)
|
|
1186
|
+
label = f"[{group.category}] {group.label}"
|
|
1187
|
+
catalog.append((path, label))
|
|
1188
|
+
return catalog
|
|
1189
|
+
|
|
1190
|
+
|
|
1191
|
+
def interactive_checkbox_catalog_paths(
|
|
1192
|
+
os_name: str,
|
|
1193
|
+
preselected: list[Path] | None = None,
|
|
1194
|
+
) -> list[Path] | None:
|
|
1195
|
+
if not QUESTIONARY_AVAILABLE or not sys.stdin.isatty():
|
|
1196
|
+
return None
|
|
1197
|
+
catalog = build_path_catalog(os_name)
|
|
1198
|
+
preselected_set = set(preselected or [])
|
|
1199
|
+
choices = []
|
|
1200
|
+
for path, source_label in catalog:
|
|
1201
|
+
kind = "dir" if path.is_dir() else "file"
|
|
1202
|
+
title = f"{path} [{kind}] {source_label}"
|
|
1203
|
+
choices.append(Choice(title=title, value=path, checked=path in preselected_set))
|
|
1204
|
+
return checkbox_prompt("Selecciona rutas para este bundle", choices)
|
|
1205
|
+
|
|
1206
|
+
|
|
1207
|
+
def recommended_group_keys(groups: list[ConfigGroup], shell_name: str) -> set[str]:
|
|
1208
|
+
selected = {group.key for group in groups if group.default}
|
|
1209
|
+
shell_map = {
|
|
1210
|
+
"fish": "shell-fish",
|
|
1211
|
+
"zsh": "shell-zsh",
|
|
1212
|
+
"bash": "shell-bash",
|
|
1213
|
+
"powershell": "shell-powershell",
|
|
1214
|
+
}
|
|
1215
|
+
preferred = shell_map.get(shell_name)
|
|
1216
|
+
if preferred:
|
|
1217
|
+
selected.add(preferred)
|
|
1218
|
+
return {group.key for group in groups if group.key in selected}
|
|
1219
|
+
|
|
1220
|
+
|
|
1221
|
+
def interactive_select_config_groups(os_name: str, shell_name: str) -> list[Path]:
|
|
1222
|
+
groups = config_groups_for_os(os_name)
|
|
1223
|
+
selected_keys = recommended_group_keys(groups, shell_name)
|
|
1224
|
+
|
|
1225
|
+
if QUESTIONARY_AVAILABLE and sys.stdin.isatty():
|
|
1226
|
+
choices = []
|
|
1227
|
+
for group in groups:
|
|
1228
|
+
found = len(existing_paths(group.paths))
|
|
1229
|
+
title = f"[{group.category}] {group.label} ({found} found) - {group.description}"
|
|
1230
|
+
choices.append(Choice(title=title, value=group.key, checked=group.key in selected_keys))
|
|
1231
|
+
result = checkbox_prompt("Selecciona grupos de configuracion", choices)
|
|
1232
|
+
if result is not None:
|
|
1233
|
+
selected_keys = set(result)
|
|
1234
|
+
|
|
1235
|
+
else:
|
|
1236
|
+
while True:
|
|
1237
|
+
render_config_group_table(groups, selected_keys)
|
|
1238
|
+
console.print(
|
|
1239
|
+
"[dim]Toggle with numbers like '1 4 7'. Commands: [b]a[/b]=all, [b]n[/b]=none, [b]d[/b]=defaults, [b]c[/b]=continue.[/dim]"
|
|
1240
|
+
)
|
|
1241
|
+
raw = Prompt.ask(tr("Select groups"), default="c").strip().lower()
|
|
1242
|
+
if raw == "c":
|
|
1243
|
+
break
|
|
1244
|
+
if raw == "a":
|
|
1245
|
+
selected_keys = {group.key for group in groups}
|
|
1246
|
+
continue
|
|
1247
|
+
if raw == "n":
|
|
1248
|
+
selected_keys = set()
|
|
1249
|
+
continue
|
|
1250
|
+
if raw == "d":
|
|
1251
|
+
selected_keys = recommended_group_keys(groups, shell_name)
|
|
1252
|
+
continue
|
|
1253
|
+
|
|
1254
|
+
for token in raw.replace(",", " ").split():
|
|
1255
|
+
if not token.isdigit():
|
|
1256
|
+
continue
|
|
1257
|
+
index = int(token) - 1
|
|
1258
|
+
if 0 <= index < len(groups):
|
|
1259
|
+
group_key = groups[index].key
|
|
1260
|
+
if group_key in selected_keys:
|
|
1261
|
+
selected_keys.remove(group_key)
|
|
1262
|
+
else:
|
|
1263
|
+
selected_keys.add(group_key)
|
|
1264
|
+
|
|
1265
|
+
selected_paths: list[Path] = []
|
|
1266
|
+
seen: set[Path] = set()
|
|
1267
|
+
for group in groups:
|
|
1268
|
+
if group.key not in selected_keys:
|
|
1269
|
+
continue
|
|
1270
|
+
for path in existing_paths(group.paths):
|
|
1271
|
+
if path not in seen:
|
|
1272
|
+
seen.add(path)
|
|
1273
|
+
selected_paths.append(path)
|
|
1274
|
+
return selected_paths
|
|
1275
|
+
|
|
1276
|
+
|
|
1277
|
+
def choose_pack_base(os_name: str, preset_name: str | None) -> str:
|
|
1278
|
+
options = [
|
|
1279
|
+
("Preset or default selection", "preset"),
|
|
1280
|
+
("Categorized config groups", "catalog"),
|
|
1281
|
+
("Start empty", "empty"),
|
|
1282
|
+
]
|
|
1283
|
+
if QUESTIONARY_AVAILABLE and sys.stdin.isatty():
|
|
1284
|
+
answer = questionary.select(
|
|
1285
|
+
tr("Como quieres construir este bundle?"),
|
|
1286
|
+
choices=[Choice(title=title, value=value) for title, value in options],
|
|
1287
|
+
default="preset" if preset_name else "catalog",
|
|
1288
|
+
).ask()
|
|
1289
|
+
if answer:
|
|
1290
|
+
return answer
|
|
1291
|
+
|
|
1292
|
+
default_choice = "preset" if preset_name else "catalog"
|
|
1293
|
+
console.print(f"[dim]{tr('Bundle source: preset, catalog or empty')}[/dim]")
|
|
1294
|
+
return Prompt.ask(tr("Base selection"), default=default_choice)
|
|
1295
|
+
|
|
1296
|
+
|
|
1297
|
+
def source_root_for_path(path: Path) -> Path:
|
|
1298
|
+
home = Path.home()
|
|
1299
|
+
try:
|
|
1300
|
+
path.relative_to(home)
|
|
1301
|
+
return home
|
|
1302
|
+
except ValueError:
|
|
1303
|
+
return path.parent
|
|
1304
|
+
|
|
1305
|
+
|
|
1306
|
+
def should_exclude_entry(path: Path) -> bool:
|
|
1307
|
+
home = Path.home()
|
|
1308
|
+
try:
|
|
1309
|
+
relative = path.relative_to(home).as_posix()
|
|
1310
|
+
except ValueError:
|
|
1311
|
+
return False
|
|
1312
|
+
|
|
1313
|
+
for excluded in DEFAULT_EXCLUDES:
|
|
1314
|
+
if relative == excluded or relative.startswith(f"{excluded}/"):
|
|
1315
|
+
return True
|
|
1316
|
+
return False
|
|
1317
|
+
|
|
1318
|
+
|
|
1319
|
+
def collect_files(
|
|
1320
|
+
paths: list[Path],
|
|
1321
|
+
progress_callback: Callable[[int, Path], None] | None = None,
|
|
1322
|
+
) -> list[FileEntry]:
|
|
1323
|
+
entries: list[FileEntry] = []
|
|
1324
|
+
seen: set[str] = set()
|
|
1325
|
+
discovered = 0
|
|
1326
|
+
|
|
1327
|
+
for source in paths:
|
|
1328
|
+
expanded = source.expanduser()
|
|
1329
|
+
if not expanded.exists():
|
|
1330
|
+
console.print(f"[yellow]Aviso:[/yellow] se omite {expanded}, no existe.")
|
|
1331
|
+
continue
|
|
1332
|
+
if expanded.is_symlink():
|
|
1333
|
+
console.print(f"[yellow]Aviso:[/yellow] se omite {expanded}, es un symlink.")
|
|
1334
|
+
continue
|
|
1335
|
+
if should_exclude_entry(expanded):
|
|
1336
|
+
continue
|
|
1337
|
+
|
|
1338
|
+
export_root = source_root_for_path(expanded)
|
|
1339
|
+
if expanded.is_file():
|
|
1340
|
+
relative = expanded.relative_to(export_root).as_posix()
|
|
1341
|
+
if relative not in seen:
|
|
1342
|
+
stat_result = expanded.stat()
|
|
1343
|
+
seen.add(relative)
|
|
1344
|
+
entries.append(FileEntry(expanded, relative, stat_result.st_size, stat.S_IMODE(stat_result.st_mode)))
|
|
1345
|
+
discovered += 1
|
|
1346
|
+
if progress_callback:
|
|
1347
|
+
progress_callback(discovered, expanded)
|
|
1348
|
+
continue
|
|
1349
|
+
|
|
1350
|
+
for root, _, files in os.walk(expanded):
|
|
1351
|
+
root_path = Path(root)
|
|
1352
|
+
for name in files:
|
|
1353
|
+
file_path = root_path / name
|
|
1354
|
+
if file_path.is_symlink():
|
|
1355
|
+
continue
|
|
1356
|
+
if should_exclude_entry(file_path):
|
|
1357
|
+
continue
|
|
1358
|
+
relative = file_path.relative_to(export_root).as_posix()
|
|
1359
|
+
if relative in seen:
|
|
1360
|
+
continue
|
|
1361
|
+
stat_result = file_path.stat()
|
|
1362
|
+
seen.add(relative)
|
|
1363
|
+
entries.append(FileEntry(file_path, relative, stat_result.st_size, stat.S_IMODE(stat_result.st_mode)))
|
|
1364
|
+
discovered += 1
|
|
1365
|
+
if progress_callback and (discovered <= 20 or discovered % 200 == 0):
|
|
1366
|
+
progress_callback(discovered, root_path)
|
|
1367
|
+
|
|
1368
|
+
return sorted(entries, key=lambda item: item.relative_path)
|
|
1369
|
+
|
|
1370
|
+
|
|
1371
|
+
def filter_entries(entries: list[FileEntry], excludes: list[str] | None = None) -> list[FileEntry]:
|
|
1372
|
+
if not excludes:
|
|
1373
|
+
return entries
|
|
1374
|
+
filtered: list[FileEntry] = []
|
|
1375
|
+
for entry in entries:
|
|
1376
|
+
if any(fnmatch.fnmatch(entry.relative_path, pattern) for pattern in excludes):
|
|
1377
|
+
continue
|
|
1378
|
+
filtered.append(entry)
|
|
1379
|
+
return filtered
|
|
1380
|
+
|
|
1381
|
+
|
|
1382
|
+
def compress_payload(raw: bytes, compression_level: int) -> bytes:
|
|
1383
|
+
codec = active_compression_codec()
|
|
1384
|
+
if codec == "zstd":
|
|
1385
|
+
compressor = zstd.ZstdCompressor(level=compression_level + 1)
|
|
1386
|
+
return compressor.compress(raw)
|
|
1387
|
+
return gzip.compress(raw, compresslevel=compression_level, mtime=0)
|
|
1388
|
+
|
|
1389
|
+
|
|
1390
|
+
def shannon_entropy(sample: bytes) -> float:
|
|
1391
|
+
"""Return Shannon entropy (bits/byte) for a sample."""
|
|
1392
|
+
|
|
1393
|
+
if not sample:
|
|
1394
|
+
return 0.0
|
|
1395
|
+
|
|
1396
|
+
counts = [0] * 256
|
|
1397
|
+
for b in sample:
|
|
1398
|
+
counts[b] += 1
|
|
1399
|
+
|
|
1400
|
+
import math
|
|
1401
|
+
|
|
1402
|
+
length = len(sample)
|
|
1403
|
+
entropy = 0.0
|
|
1404
|
+
for c in counts:
|
|
1405
|
+
if not c:
|
|
1406
|
+
continue
|
|
1407
|
+
p = c / length
|
|
1408
|
+
entropy -= p * math.log2(p)
|
|
1409
|
+
return entropy
|
|
1410
|
+
|
|
1411
|
+
|
|
1412
|
+
def likely_incompressible(raw: bytes, relative_path: str) -> bool:
|
|
1413
|
+
suffix = Path(relative_path).suffix.lower()
|
|
1414
|
+
if suffix in INCOMPRESSIBLE_SUFFIXES:
|
|
1415
|
+
return True
|
|
1416
|
+
# Sample up to 32 KiB to estimate entropy; high entropy tends to compress poorly.
|
|
1417
|
+
sample = raw[: 32 * 1024]
|
|
1418
|
+
if len(sample) < 1024:
|
|
1419
|
+
return False
|
|
1420
|
+
return shannon_entropy(sample) > 7.6
|
|
1421
|
+
|
|
1422
|
+
|
|
1423
|
+
def choose_compression(raw: bytes, relative_path: str, compression_level: int) -> tuple[str, bytes]:
|
|
1424
|
+
if compression_level <= 0:
|
|
1425
|
+
return "none", raw
|
|
1426
|
+
if likely_incompressible(raw, relative_path):
|
|
1427
|
+
return "none", raw
|
|
1428
|
+
compressed = compress_payload(raw, compression_level)
|
|
1429
|
+
if len(compressed) >= int(len(raw) * 0.98):
|
|
1430
|
+
return "none", raw
|
|
1431
|
+
return active_compression_codec(), compressed
|
|
1432
|
+
|
|
1433
|
+
|
|
1434
|
+
def build_payload_record(
|
|
1435
|
+
*,
|
|
1436
|
+
raw: bytes,
|
|
1437
|
+
relative_path: str,
|
|
1438
|
+
mode: int,
|
|
1439
|
+
payload_name: str,
|
|
1440
|
+
key: bytes,
|
|
1441
|
+
compression_level: int,
|
|
1442
|
+
) -> tuple[bytes, dict]:
|
|
1443
|
+
compression, payload = choose_compression(raw, relative_path, compression_level)
|
|
1444
|
+
nonce = os.urandom(12)
|
|
1445
|
+
encrypted = AESGCM(key).encrypt(nonce, payload, None)
|
|
1446
|
+
encryption_meta = {"algorithm": ENCRYPTION_ALGORITHM, "nonce": nonce.hex()}
|
|
1447
|
+
record = {
|
|
1448
|
+
"path": relative_path,
|
|
1449
|
+
"payload": f"payloads/{payload_name}",
|
|
1450
|
+
"size": len(raw),
|
|
1451
|
+
"compression": compression,
|
|
1452
|
+
"encryption": encryption_meta,
|
|
1453
|
+
"compressed_size": len(payload),
|
|
1454
|
+
"encrypted_size": len(encrypted),
|
|
1455
|
+
"mode": mode,
|
|
1456
|
+
"sha256": hashlib.sha256(raw).hexdigest(),
|
|
1457
|
+
}
|
|
1458
|
+
return encrypted, record
|
|
1459
|
+
|
|
1460
|
+
|
|
1461
|
+
def detect_sensitive_entries(entries: list[FileEntry]) -> list[FileEntry]:
|
|
1462
|
+
sensitive: list[FileEntry] = []
|
|
1463
|
+
for entry in entries:
|
|
1464
|
+
name = entry.source.name.lower()
|
|
1465
|
+
path_str = entry.relative_path.lower()
|
|
1466
|
+
if any(pattern in name or pattern in path_str for pattern in SENSITIVE_PATTERNS):
|
|
1467
|
+
sensitive.append(entry)
|
|
1468
|
+
return sensitive
|
|
1469
|
+
|
|
1470
|
+
|
|
1471
|
+
def inflate_payload(payload: bytes, compression: str | None) -> bytes:
|
|
1472
|
+
"""Inflate a payload according to the manifest's compression metadata.
|
|
1473
|
+
|
|
1474
|
+
Notes:
|
|
1475
|
+
Older/hand-crafted manifests might omit the compression field.
|
|
1476
|
+
In that case we try gzip first and fall back to raw bytes.
|
|
1477
|
+
"""
|
|
1478
|
+
|
|
1479
|
+
if compression in {None, ""}:
|
|
1480
|
+
try:
|
|
1481
|
+
return gzip.decompress(payload)
|
|
1482
|
+
except OSError:
|
|
1483
|
+
# Older/hand-crafted manifests might omit compression.
|
|
1484
|
+
# We try gzip first, then zstd (if available), then fall back to raw bytes.
|
|
1485
|
+
if zstd is not None:
|
|
1486
|
+
try:
|
|
1487
|
+
return zstd.ZstdDecompressor().decompress(payload)
|
|
1488
|
+
except Exception:
|
|
1489
|
+
pass
|
|
1490
|
+
return payload
|
|
1491
|
+
|
|
1492
|
+
if compression == "gzip":
|
|
1493
|
+
return gzip.decompress(payload)
|
|
1494
|
+
|
|
1495
|
+
if compression == "zstd":
|
|
1496
|
+
if zstd is None:
|
|
1497
|
+
die("Este paquete usa zstd pero falta la dependencia 'zstandard' en este Python.")
|
|
1498
|
+
return zstd.ZstdDecompressor().decompress(payload)
|
|
1499
|
+
|
|
1500
|
+
if compression == "none":
|
|
1501
|
+
return payload
|
|
1502
|
+
|
|
1503
|
+
die(f"Metodo de compresion no soportado: {compression}")
|
|
1504
|
+
|
|
1505
|
+
|
|
1506
|
+
def decrypt_payload(encrypted: bytes, file_entry: dict, key: bytes) -> bytes:
|
|
1507
|
+
encryption_meta = file_entry.get("encryption") or {}
|
|
1508
|
+
algorithm = encryption_meta.get("algorithm") or ENCRYPTION_ALGORITHM
|
|
1509
|
+
if algorithm != ENCRYPTION_ALGORITHM:
|
|
1510
|
+
die(f"Algoritmo de cifrado no soportado: {algorithm}")
|
|
1511
|
+
nonce_hex = encryption_meta.get("nonce")
|
|
1512
|
+
if not nonce_hex:
|
|
1513
|
+
die(f"Falta nonce para {file_entry.get('path')}")
|
|
1514
|
+
try:
|
|
1515
|
+
return AESGCM(key).decrypt(bytes.fromhex(nonce_hex), encrypted, None)
|
|
1516
|
+
except InvalidTag:
|
|
1517
|
+
raise ValueError("invalid key")
|
|
1518
|
+
except Exception as exc:
|
|
1519
|
+
die(f"No se pudo descifrar {file_entry.get('path')}: {exc}")
|
|
1520
|
+
|
|
1521
|
+
|
|
1522
|
+
def decode_package_payload(package_path: Path, file_entry: dict, key: bytes) -> bytes:
|
|
1523
|
+
with ZipFile(package_path) as bundle:
|
|
1524
|
+
encrypted = bundle.read(file_entry["payload"])
|
|
1525
|
+
payload = decrypt_payload(encrypted, file_entry, key)
|
|
1526
|
+
return inflate_payload(payload, file_entry.get("compression"))
|
|
1527
|
+
|
|
1528
|
+
|
|
1529
|
+
def read_bundle_content(package_path: Path, manifest: dict, key: bytes, selected_paths: set[str] | None = None) -> dict[str, bytes]:
|
|
1530
|
+
contents: dict[str, bytes] = {}
|
|
1531
|
+
with ZipFile(package_path) as bundle:
|
|
1532
|
+
for file_entry in manifest["files"]:
|
|
1533
|
+
if selected_paths and file_entry["path"] not in selected_paths:
|
|
1534
|
+
continue
|
|
1535
|
+
encrypted = bundle.read(file_entry["payload"])
|
|
1536
|
+
payload = decrypt_payload(encrypted, file_entry, key)
|
|
1537
|
+
contents[file_entry["path"]] = inflate_payload(payload, file_entry.get("compression"))
|
|
1538
|
+
return contents
|
|
1539
|
+
|
|
1540
|
+
|
|
1541
|
+
def build_payload_job(
|
|
1542
|
+
source_path: str,
|
|
1543
|
+
relative_path: str,
|
|
1544
|
+
mode: int,
|
|
1545
|
+
payload_name: str,
|
|
1546
|
+
key: bytes,
|
|
1547
|
+
compression_level: int,
|
|
1548
|
+
) -> tuple[bytes, dict]:
|
|
1549
|
+
raw = Path(source_path).read_bytes()
|
|
1550
|
+
return build_payload_record(
|
|
1551
|
+
raw=raw,
|
|
1552
|
+
relative_path=relative_path,
|
|
1553
|
+
mode=mode,
|
|
1554
|
+
payload_name=payload_name,
|
|
1555
|
+
key=key,
|
|
1556
|
+
compression_level=compression_level,
|
|
1557
|
+
)
|
|
1558
|
+
|
|
1559
|
+
|
|
1560
|
+
def write_bundle_from_raw(
|
|
1561
|
+
output: Path,
|
|
1562
|
+
bundle_name: str,
|
|
1563
|
+
description: str,
|
|
1564
|
+
platform: str,
|
|
1565
|
+
shell: str,
|
|
1566
|
+
arch: str,
|
|
1567
|
+
tags: list[str],
|
|
1568
|
+
notes: str,
|
|
1569
|
+
after_steps: list[str],
|
|
1570
|
+
files: dict[str, dict],
|
|
1571
|
+
key: bytes,
|
|
1572
|
+
compression_level: int = DEFAULT_COMPRESSION_LEVEL,
|
|
1573
|
+
) -> None:
|
|
1574
|
+
history_snapshot = save_history_snapshot(output)
|
|
1575
|
+
files_manifest: list[dict] = []
|
|
1576
|
+
with TemporaryDirectory() as tmp_dir_name:
|
|
1577
|
+
payload_root = Path(tmp_dir_name) / "payloads"
|
|
1578
|
+
payload_root.mkdir(parents=True, exist_ok=True)
|
|
1579
|
+
for index, (path, file_meta) in enumerate(sorted(files.items()), start=1):
|
|
1580
|
+
raw = file_meta["raw"]
|
|
1581
|
+
payload_name = f"{index:04d}-{hashlib.sha256(path.encode('utf-8')).hexdigest()[:16]}.bin"
|
|
1582
|
+
encrypted, record = build_payload_record(
|
|
1583
|
+
raw=raw,
|
|
1584
|
+
relative_path=path,
|
|
1585
|
+
mode=file_meta["mode"],
|
|
1586
|
+
payload_name=payload_name,
|
|
1587
|
+
key=key,
|
|
1588
|
+
compression_level=compression_level,
|
|
1589
|
+
)
|
|
1590
|
+
(payload_root / payload_name).write_bytes(encrypted)
|
|
1591
|
+
files_manifest.append(record)
|
|
1592
|
+
args = SimpleNamespace(
|
|
1593
|
+
name=bundle_name,
|
|
1594
|
+
description=description,
|
|
1595
|
+
platform=platform,
|
|
1596
|
+
shell=shell,
|
|
1597
|
+
arch=arch,
|
|
1598
|
+
tags=tags,
|
|
1599
|
+
notes=notes,
|
|
1600
|
+
after_steps=after_steps,
|
|
1601
|
+
sensitive_count=0,
|
|
1602
|
+
key_fingerprint=fingerprint_key(key),
|
|
1603
|
+
)
|
|
1604
|
+
manifest = build_manifest(args, files_manifest, [])
|
|
1605
|
+
ensure_parent(output)
|
|
1606
|
+
with ZipFile(output, "w", compression=ZIP_STORED) as bundle:
|
|
1607
|
+
bundle.writestr("manifest.json", json.dumps(manifest, indent=2, sort_keys=True) + "\n", compress_type=ZIP_DEFLATED)
|
|
1608
|
+
for file_entry in files_manifest:
|
|
1609
|
+
bundle.write(payload_root / Path(file_entry["payload"]).name, file_entry["payload"])
|
|
1610
|
+
if history_snapshot:
|
|
1611
|
+
console.print(f"[dim]Previous snapshot saved to {history_snapshot}[/dim]")
|
|
1612
|
+
render_bundle_card(manifest, output)
|
|
1613
|
+
console.print(f"[bold green]Created[/bold green] {output}")
|
|
1614
|
+
|
|
1615
|
+
|
|
1616
|
+
def build_manifest(args, files: list[dict], source_paths: list[str]) -> dict:
|
|
1617
|
+
total_size = sum(item["size"] for item in files)
|
|
1618
|
+
return {
|
|
1619
|
+
"package_version": PACKAGE_VERSION,
|
|
1620
|
+
"peridot_version": APP_VERSION,
|
|
1621
|
+
"bundle": {
|
|
1622
|
+
"name": args.name,
|
|
1623
|
+
"slug": slugify(args.name),
|
|
1624
|
+
"description": args.description,
|
|
1625
|
+
"platform": {
|
|
1626
|
+
"os": normalize_os_name(args.platform),
|
|
1627
|
+
"shell": args.shell,
|
|
1628
|
+
"arch": args.arch or platform.machine().lower(),
|
|
1629
|
+
},
|
|
1630
|
+
"tags": sorted(set(args.tags)),
|
|
1631
|
+
"created_at": utc_now(),
|
|
1632
|
+
"notes": args.notes,
|
|
1633
|
+
"post_apply": args.after_steps,
|
|
1634
|
+
"source": {
|
|
1635
|
+
"host": socket.gethostname(),
|
|
1636
|
+
"user": os.environ.get("USER") or os.environ.get("USERNAME") or "unknown",
|
|
1637
|
+
"home": str(Path.home()),
|
|
1638
|
+
"paths": source_paths,
|
|
1639
|
+
},
|
|
1640
|
+
"stats": {
|
|
1641
|
+
"files": len(files),
|
|
1642
|
+
"bytes": total_size,
|
|
1643
|
+
},
|
|
1644
|
+
"security": {
|
|
1645
|
+
"sensitive_files": args.sensitive_count,
|
|
1646
|
+
"key_fingerprint": args.key_fingerprint,
|
|
1647
|
+
},
|
|
1648
|
+
},
|
|
1649
|
+
"files": files,
|
|
1650
|
+
}
|
|
1651
|
+
|
|
1652
|
+
|
|
1653
|
+
def print_banner() -> None:
|
|
1654
|
+
title = Text("PERIDOT", style="bold bright_green")
|
|
1655
|
+
subtitle = Text(tr("Bundles portables de configuracion para humanos"), style="italic cyan")
|
|
1656
|
+
panel = Panel(
|
|
1657
|
+
Align.center(Text.assemble(title, "\n", subtitle)),
|
|
1658
|
+
border_style="bright_green",
|
|
1659
|
+
padding=(1, 3),
|
|
1660
|
+
)
|
|
1661
|
+
console.print(panel)
|
|
1662
|
+
|
|
1663
|
+
|
|
1664
|
+
def normalize_tags(raw_tags: list[str] | str | None) -> list[str]:
|
|
1665
|
+
if raw_tags is None:
|
|
1666
|
+
return []
|
|
1667
|
+
if isinstance(raw_tags, str):
|
|
1668
|
+
candidates = raw_tags.split(",")
|
|
1669
|
+
else:
|
|
1670
|
+
candidates = raw_tags
|
|
1671
|
+
|
|
1672
|
+
tags: list[str] = []
|
|
1673
|
+
for item in candidates:
|
|
1674
|
+
for piece in str(item).split(","):
|
|
1675
|
+
cleaned = piece.strip()
|
|
1676
|
+
if cleaned:
|
|
1677
|
+
tags.append(cleaned)
|
|
1678
|
+
return sorted(set(tags))
|
|
1679
|
+
|
|
1680
|
+
|
|
1681
|
+
def load_profile_into_args(args) -> None:
|
|
1682
|
+
if not getattr(args, "profile", ""):
|
|
1683
|
+
return
|
|
1684
|
+
profiles = load_profiles()
|
|
1685
|
+
profile = profiles.get(args.profile)
|
|
1686
|
+
if not profile:
|
|
1687
|
+
die(f"No existe el perfil '{args.profile}'")
|
|
1688
|
+
args.name = args.name or profile.get("name")
|
|
1689
|
+
args.description = args.description or profile.get("description", "")
|
|
1690
|
+
args.platform = args.platform or profile.get("platform", "")
|
|
1691
|
+
args.shell = args.shell or profile.get("shell", "")
|
|
1692
|
+
args.arch = args.arch or profile.get("arch", "")
|
|
1693
|
+
args.tags = args.tags or profile.get("tags", [])
|
|
1694
|
+
args.preset = args.preset or profile.get("preset", "")
|
|
1695
|
+
args.paths = args.paths or profile.get("paths", [])
|
|
1696
|
+
args.exclude = args.exclude or profile.get("exclude", [])
|
|
1697
|
+
args.notes = args.notes or profile.get("notes", "")
|
|
1698
|
+
args.after_steps = args.after_steps or profile.get("after_steps", [])
|
|
1699
|
+
|
|
1700
|
+
|
|
1701
|
+
def apply_settings_defaults(args) -> None:
|
|
1702
|
+
settings = load_settings()
|
|
1703
|
+
args.compression_level = sanitize_compression_level(
|
|
1704
|
+
getattr(args, "compression_level", None) if getattr(args, "compression_level", None) is not None else settings["compression_level"]
|
|
1705
|
+
)
|
|
1706
|
+
args.jobs = sanitize_jobs(getattr(args, "jobs", None) if getattr(args, "jobs", None) is not None else settings["jobs"])
|
|
1707
|
+
args.language = sanitize_language(
|
|
1708
|
+
getattr(args, "language", None) if getattr(args, "language", None) is not None else settings["language"]
|
|
1709
|
+
)
|
|
1710
|
+
|
|
1711
|
+
|
|
1712
|
+
def interactive_pack_setup(args) -> tuple[list[Path], Path]:
|
|
1713
|
+
render_presets_table()
|
|
1714
|
+
recommended_preset = args.preset or get_recommended_preset()
|
|
1715
|
+
preset_name = Prompt.ask(tr("Preset"), default=recommended_preset)
|
|
1716
|
+
if preset_name:
|
|
1717
|
+
apply_preset(args, preset_name, force_paths=False)
|
|
1718
|
+
|
|
1719
|
+
default_name = f"{platform.node() or 'my'}-{normalize_os_name()}-bundle"
|
|
1720
|
+
if not args.name:
|
|
1721
|
+
args.name = Prompt.ask(tr("Bundle name"), default=default_name)
|
|
1722
|
+
|
|
1723
|
+
current_os = normalize_os_name()
|
|
1724
|
+
args.description = Prompt.ask(
|
|
1725
|
+
tr("Description"),
|
|
1726
|
+
default=args.description or f"Config bundle for {args.name}",
|
|
1727
|
+
)
|
|
1728
|
+
args.platform = normalize_os_name(
|
|
1729
|
+
Prompt.ask(tr("Target OS"), default=args.platform or current_os)
|
|
1730
|
+
)
|
|
1731
|
+
args.shell = Prompt.ask(
|
|
1732
|
+
tr("Primary shell/runtime"),
|
|
1733
|
+
default=args.shell or detect_shell() or "any",
|
|
1734
|
+
)
|
|
1735
|
+
arch_default = args.arch or platform.machine().lower() or "any"
|
|
1736
|
+
args.arch = Prompt.ask(tr("Target architecture"), default=arch_default)
|
|
1737
|
+
tag_default = ", ".join(args.tags) if args.tags else ""
|
|
1738
|
+
entered_tags = Prompt.ask(tr("Tags (comma separated)"), default=tag_default)
|
|
1739
|
+
args.tags = normalize_tags(entered_tags)
|
|
1740
|
+
|
|
1741
|
+
base_mode = choose_pack_base(args.platform, args.preset if getattr(args, "preset", "") else None)
|
|
1742
|
+
if base_mode == "preset":
|
|
1743
|
+
suggested_paths = [Path(item).expanduser() for item in args.paths] if args.paths else default_export_roots()
|
|
1744
|
+
elif base_mode == "catalog":
|
|
1745
|
+
suggested_paths = interactive_select_config_groups(args.platform, args.shell or detect_shell())
|
|
1746
|
+
else:
|
|
1747
|
+
suggested_paths = []
|
|
1748
|
+
|
|
1749
|
+
if suggested_paths:
|
|
1750
|
+
preview = "\n".join(f"- {item}" for item in suggested_paths[:8])
|
|
1751
|
+
if len(suggested_paths) > 8:
|
|
1752
|
+
preview += f"\n- ... y {len(suggested_paths) - 8} mas"
|
|
1753
|
+
console.print(Panel(preview, title=tr("Selected paths"), border_style="cyan"))
|
|
1754
|
+
edit_selection = Confirm.ask(tr("Edit this selection?"), default=False)
|
|
1755
|
+
if not edit_selection:
|
|
1756
|
+
selected_paths = suggested_paths
|
|
1757
|
+
else:
|
|
1758
|
+
chosen = interactive_checkbox_catalog_paths(args.platform, suggested_paths)
|
|
1759
|
+
if chosen is not None:
|
|
1760
|
+
selected_paths = chosen
|
|
1761
|
+
if Confirm.ask(tr("Add extra paths manually?"), default=False):
|
|
1762
|
+
raw_paths = Prompt.ask(tr("Extra paths (comma separated)"), default="")
|
|
1763
|
+
extras = [Path(item.strip()).expanduser() for item in raw_paths.split(",") if item.strip()]
|
|
1764
|
+
selected_paths.extend(extras)
|
|
1765
|
+
else:
|
|
1766
|
+
explain_checkbox_unavailable()
|
|
1767
|
+
console.print(f"[yellow]{tr('Usando entrada manual.')}[/yellow]")
|
|
1768
|
+
raw_paths = Prompt.ask(tr("Paths to include (comma separated)"))
|
|
1769
|
+
selected_paths = [Path(item.strip()).expanduser() for item in raw_paths.split(",") if item.strip()]
|
|
1770
|
+
else:
|
|
1771
|
+
chosen = interactive_checkbox_catalog_paths(args.platform, [])
|
|
1772
|
+
if chosen is not None:
|
|
1773
|
+
selected_paths = chosen
|
|
1774
|
+
if Confirm.ask(tr("Add extra paths manually?"), default=False):
|
|
1775
|
+
raw_paths = Prompt.ask(tr("Extra paths (comma separated)"), default="")
|
|
1776
|
+
selected_paths.extend([Path(item.strip()).expanduser() for item in raw_paths.split(",") if item.strip()])
|
|
1777
|
+
else:
|
|
1778
|
+
explain_checkbox_unavailable()
|
|
1779
|
+
console.print(f"[yellow]{tr('Usando entrada manual.')}[/yellow]")
|
|
1780
|
+
raw_paths = Prompt.ask(tr("Paths to include (comma separated)"))
|
|
1781
|
+
selected_paths = [Path(item.strip()).expanduser() for item in raw_paths.split(",") if item.strip()]
|
|
1782
|
+
|
|
1783
|
+
selected_paths = list(dict.fromkeys(selected_paths))
|
|
1784
|
+
if not selected_paths:
|
|
1785
|
+
if suggested_paths:
|
|
1786
|
+
console.print(f"[yellow]{tr('No has seleccionado ninguna ruta, se usara la seleccion sugerida.')}[/yellow]")
|
|
1787
|
+
selected_paths = suggested_paths
|
|
1788
|
+
else:
|
|
1789
|
+
raw_paths = Prompt.ask(tr("Paths to include (comma separated)"))
|
|
1790
|
+
selected_paths = [Path(item.strip()).expanduser() for item in raw_paths.split(",") if item.strip()]
|
|
1791
|
+
|
|
1792
|
+
output_default = str(args.output) if args.output else f"{slugify(args.name)}.peridot"
|
|
1793
|
+
args.output = Path(Prompt.ask(tr("Output package"), default=output_default))
|
|
1794
|
+
summary = Table.grid(padding=(0, 2))
|
|
1795
|
+
summary.add_row("Name", args.name)
|
|
1796
|
+
summary.add_row("Description", args.description)
|
|
1797
|
+
summary.add_row("Target", f"{args.platform} / {args.shell or 'any'} / {args.arch or 'any'}")
|
|
1798
|
+
summary.add_row("Tags", ", ".join(args.tags) if args.tags else "none")
|
|
1799
|
+
summary.add_row("Output", str(args.output))
|
|
1800
|
+
summary.add_row("Paths", str(len(selected_paths)))
|
|
1801
|
+
summary.add_row(
|
|
1802
|
+
"Compression",
|
|
1803
|
+
f"{args.compression_level}/9 ({compression_profile_name(args.compression_level)}: {compression_profile_detail(args.compression_level)})",
|
|
1804
|
+
)
|
|
1805
|
+
summary.add_row("Encryption", ENCRYPTION_ALGORITHM)
|
|
1806
|
+
summary.add_row("Workers", str(args.jobs))
|
|
1807
|
+
console.print(Panel(summary, title=tr("Pack Preview"), border_style="bright_green"))
|
|
1808
|
+
console.print(render_compression_setting(args.compression_level))
|
|
1809
|
+
return selected_paths, args.output
|
|
1810
|
+
|
|
1811
|
+
|
|
1812
|
+
def prepare_pack_inputs(args) -> tuple[list[Path], Path]:
|
|
1813
|
+
load_profile_into_args(args)
|
|
1814
|
+
args.tags = normalize_tags(args.tags)
|
|
1815
|
+
if getattr(args, "preset", ""):
|
|
1816
|
+
apply_preset(args, args.preset, force_paths=not args.paths)
|
|
1817
|
+
if not args.platform:
|
|
1818
|
+
args.platform = normalize_os_name()
|
|
1819
|
+
if not args.shell:
|
|
1820
|
+
args.shell = detect_shell()
|
|
1821
|
+
if not args.arch:
|
|
1822
|
+
args.arch = platform.machine().lower()
|
|
1823
|
+
|
|
1824
|
+
if sys.stdin.isatty():
|
|
1825
|
+
missing_core = not args.name or not args.description or not args.output
|
|
1826
|
+
if missing_core or not args.paths:
|
|
1827
|
+
return interactive_pack_setup(args)
|
|
1828
|
+
|
|
1829
|
+
# Non-interactive defaults: avoid hard-failing when running from scripts/CI.
|
|
1830
|
+
if not args.name:
|
|
1831
|
+
args.name = f"{platform.node() or 'my'}-{normalize_os_name(args.platform)}-bundle"
|
|
1832
|
+
if args.description is None:
|
|
1833
|
+
args.description = ""
|
|
1834
|
+
|
|
1835
|
+
paths = [Path(item).expanduser() for item in args.paths] if args.paths else default_export_roots()
|
|
1836
|
+
output = args.output or Path(f"{slugify(args.name)}.peridot")
|
|
1837
|
+
return paths, output
|
|
1838
|
+
|
|
1839
|
+
|
|
1840
|
+
def render_bundle_card(manifest: dict, package_path: Path | None = None) -> None:
|
|
1841
|
+
bundle = manifest["bundle"]
|
|
1842
|
+
platform_data = bundle["platform"]
|
|
1843
|
+
stats = bundle["stats"]
|
|
1844
|
+
tags = ", ".join(bundle["tags"]) if bundle["tags"] else "none"
|
|
1845
|
+
description = bundle["description"] or "Sin descripcion"
|
|
1846
|
+
algorithms = sorted({((entry.get("encryption") or {}).get("algorithm") or ENCRYPTION_ALGORITHM) for entry in manifest["files"]}) or [ENCRYPTION_ALGORITHM]
|
|
1847
|
+
|
|
1848
|
+
table = Table.grid(padding=(0, 2))
|
|
1849
|
+
table.add_row("Bundle", f"[bold]{bundle['name']}[/bold]")
|
|
1850
|
+
if package_path:
|
|
1851
|
+
table.add_row("Package", str(package_path))
|
|
1852
|
+
table.add_row("Description", description)
|
|
1853
|
+
table.add_row("Target", f"{platform_data['os']} / {platform_data.get('shell') or 'any'} / {platform_data.get('arch') or 'any'}")
|
|
1854
|
+
table.add_row("Files", str(stats["files"]))
|
|
1855
|
+
table.add_row("Payload", format_bytes(stats["bytes"]))
|
|
1856
|
+
table.add_row("Encryption", ", ".join(algorithms))
|
|
1857
|
+
table.add_row("Tags", tags)
|
|
1858
|
+
table.add_row("Created", bundle["created_at"])
|
|
1859
|
+
table.add_row("From", f"{bundle['source']['user']}@{bundle['source']['host']}")
|
|
1860
|
+
if bundle.get("notes"):
|
|
1861
|
+
table.add_row("Notes", bundle["notes"])
|
|
1862
|
+
if bundle.get("security", {}).get("sensitive_files"):
|
|
1863
|
+
table.add_row("Sensitive", str(bundle["security"]["sensitive_files"]))
|
|
1864
|
+
|
|
1865
|
+
console.print(
|
|
1866
|
+
Panel(
|
|
1867
|
+
table,
|
|
1868
|
+
title=f"[bold bright_green]{tr('Bundle Card')}[/bold bright_green]",
|
|
1869
|
+
border_style="green",
|
|
1870
|
+
)
|
|
1871
|
+
)
|
|
1872
|
+
|
|
1873
|
+
|
|
1874
|
+
def render_file_table(manifest: dict, limit: int | None = None) -> None:
|
|
1875
|
+
table = Table(title=tr("Files"), header_style="bold cyan")
|
|
1876
|
+
table.add_column("Path", style="white")
|
|
1877
|
+
table.add_column("Size", justify="right", style="green")
|
|
1878
|
+
table.add_column("Mode", justify="right", style="magenta")
|
|
1879
|
+
shown = manifest["files"] if limit is None else manifest["files"][:limit]
|
|
1880
|
+
for entry in shown:
|
|
1881
|
+
table.add_row(entry["path"], format_bytes(entry["size"]), oct(entry["mode"]))
|
|
1882
|
+
console.print(table)
|
|
1883
|
+
|
|
1884
|
+
hidden_count = len(manifest["files"]) - len(shown)
|
|
1885
|
+
if hidden_count > 0:
|
|
1886
|
+
console.print(f"[dim]... y {hidden_count} ficheros mas[/dim]")
|
|
1887
|
+
|
|
1888
|
+
|
|
1889
|
+
def print_manifest_json(manifest: dict) -> None:
|
|
1890
|
+
console.print_json(data=manifest)
|
|
1891
|
+
|
|
1892
|
+
|
|
1893
|
+
def render_diff_table(rows: list[tuple[str, str]]) -> None:
|
|
1894
|
+
table = Table(title="Diff" if CURRENT_LANGUAGE == "es" else "Diff", header_style="bold cyan")
|
|
1895
|
+
table.add_column("Status")
|
|
1896
|
+
table.add_column("Path", style="white")
|
|
1897
|
+
for status, path in rows:
|
|
1898
|
+
style = {"new": "green", "changed": "yellow", "same": "dim", "missing": "red"}.get(status, "white")
|
|
1899
|
+
table.add_row(f"[{style}]{status}[/{style}]", path)
|
|
1900
|
+
console.print(table)
|
|
1901
|
+
|
|
1902
|
+
|
|
1903
|
+
def bundle_diff(manifest: dict, target_root: Path, key: bytes | None = None, package_path: Path | None = None) -> list[tuple[str, str]]:
|
|
1904
|
+
rows: list[tuple[str, str]] = []
|
|
1905
|
+
bundle_contents = read_bundle_content(package_path, manifest, key) if key and package_path else {}
|
|
1906
|
+
for file_entry in manifest["files"]:
|
|
1907
|
+
target_path = target_root / file_entry["path"]
|
|
1908
|
+
if not target_path.exists():
|
|
1909
|
+
rows.append(("new", file_entry["path"]))
|
|
1910
|
+
continue
|
|
1911
|
+
if key and package_path:
|
|
1912
|
+
current_hash = hashlib.sha256(target_path.read_bytes()).hexdigest()
|
|
1913
|
+
rows.append(("same" if current_hash == file_entry["sha256"] else "changed", file_entry["path"]))
|
|
1914
|
+
else:
|
|
1915
|
+
rows.append(("exists", file_entry["path"]))
|
|
1916
|
+
return rows
|
|
1917
|
+
|
|
1918
|
+
|
|
1919
|
+
def discover_local_bundles(base_dir: Path | None = None) -> list[Path]:
|
|
1920
|
+
root = (base_dir or Path.cwd()).expanduser()
|
|
1921
|
+
return sorted(root.glob("*.peridot"))
|
|
1922
|
+
|
|
1923
|
+
|
|
1924
|
+
def render_local_bundle_table(base_dir: Path | None = None) -> None:
|
|
1925
|
+
bundles = discover_local_bundles(base_dir)
|
|
1926
|
+
table = Table(title=tr("Bundles locales"), header_style="bold cyan")
|
|
1927
|
+
table.add_column("#", style="dim", justify="right")
|
|
1928
|
+
table.add_column("File", style="white")
|
|
1929
|
+
table.add_column("Size", justify="right", style="green")
|
|
1930
|
+
if not bundles:
|
|
1931
|
+
table.add_row("-", tr("No hay bundles .peridot en este directorio"), "-")
|
|
1932
|
+
console.print(table)
|
|
1933
|
+
return
|
|
1934
|
+
|
|
1935
|
+
for index, bundle in enumerate(bundles, start=1):
|
|
1936
|
+
table.add_row(str(index), bundle.name, format_bytes(bundle.stat().st_size))
|
|
1937
|
+
console.print(table)
|
|
1938
|
+
|
|
1939
|
+
|
|
1940
|
+
def choose_bundle_path(action_label: str) -> Path:
|
|
1941
|
+
bundles = discover_local_bundles()
|
|
1942
|
+
if bundles:
|
|
1943
|
+
render_local_bundle_table()
|
|
1944
|
+
default = bundles[0].name
|
|
1945
|
+
else:
|
|
1946
|
+
default = ""
|
|
1947
|
+
|
|
1948
|
+
bundle_input = Prompt.ask(f"{action_label} package", default=default)
|
|
1949
|
+
if bundle_input.isdigit():
|
|
1950
|
+
index = int(bundle_input) - 1
|
|
1951
|
+
if 0 <= index < len(bundles):
|
|
1952
|
+
bundle_path = bundles[index]
|
|
1953
|
+
else:
|
|
1954
|
+
die(f"Indice fuera de rango: {bundle_input}")
|
|
1955
|
+
else:
|
|
1956
|
+
bundle_path = Path(bundle_input).expanduser()
|
|
1957
|
+
if not bundle_path.exists():
|
|
1958
|
+
die(f"No existe el paquete {bundle_path}")
|
|
1959
|
+
return bundle_path
|
|
1960
|
+
|
|
1961
|
+
|
|
1962
|
+
def choose_bundle_paths(action_label: str) -> list[Path]:
|
|
1963
|
+
bundles = discover_local_bundles()
|
|
1964
|
+
if not bundles:
|
|
1965
|
+
die("No hay paquetes locales en este directorio.")
|
|
1966
|
+
render_local_bundle_table()
|
|
1967
|
+
if QUESTIONARY_AVAILABLE and sys.stdin.isatty():
|
|
1968
|
+
choices = [Choice(title=bundle.name, value=bundle, checked=True) for bundle in bundles]
|
|
1969
|
+
selected = checkbox_prompt(f"{action_label} bundles", choices)
|
|
1970
|
+
if selected is not None:
|
|
1971
|
+
return selected
|
|
1972
|
+
raw = Prompt.ask(f"{action_label} bundles by index (e.g. 1 2 3)", default="1")
|
|
1973
|
+
selected_paths: list[Path] = []
|
|
1974
|
+
for token in raw.replace(",", " ").split():
|
|
1975
|
+
if token.isdigit():
|
|
1976
|
+
index = int(token) - 1
|
|
1977
|
+
if 0 <= index < len(bundles):
|
|
1978
|
+
selected_paths.append(bundles[index])
|
|
1979
|
+
return list(dict.fromkeys(selected_paths))
|
|
1980
|
+
|
|
1981
|
+
|
|
1982
|
+
def render_action_hub() -> None:
|
|
1983
|
+
commands = Table.grid(padding=(0, 3))
|
|
1984
|
+
hub_rows = [
|
|
1985
|
+
("pack", "Create a new bundle with a guided wizard", "Crea un nuevo bundle con asistente guiado"),
|
|
1986
|
+
("catalog", "Browse categorized config groups with found counts", "Explora grupos clasificados con recuento detectado"),
|
|
1987
|
+
("presets", "Browse dotfile presets for macOS, Linux and Windows", "Explora presets de dotfiles para macOS, Linux y Windows"),
|
|
1988
|
+
("inspect", "Open the bundle card and file summary", "Abre la ficha del bundle y el resumen de archivos"),
|
|
1989
|
+
("apply", "Preview or restore a bundle into a target directory", "Previsualiza o restaura un bundle en un directorio destino"),
|
|
1990
|
+
("diff", "Compare a bundle against a target directory", "Compara un bundle contra un directorio destino"),
|
|
1991
|
+
("verify", "Validate bundle structure and integrity", "Valida estructura e integridad del bundle"),
|
|
1992
|
+
("doctor", "Run local environment diagnostics", "Ejecuta diagnostico del entorno local"),
|
|
1993
|
+
("share", "Export a bundle card as markdown or json", "Exporta la ficha del bundle como markdown o json"),
|
|
1994
|
+
("manifest", "Show the raw manifest JSON", "Muestra el manifest JSON crudo"),
|
|
1995
|
+
("history", "List stored snapshots for a bundle name", "Lista snapshots guardados de un bundle"),
|
|
1996
|
+
("profile", "Manage reusable pack profiles", "Gestiona perfiles reutilizables"),
|
|
1997
|
+
("settings", "Tune compression, workers and language defaults", "Ajusta compresion, workers e idioma por defecto"),
|
|
1998
|
+
("keygen", "Create or inspect the active key", "Genera o inspecciona la clave activa"),
|
|
1999
|
+
("rekey", "Generate a new key and migrate package payloads", "Genera una nueva clave y migra los payloads"),
|
|
2000
|
+
("delete", "Delete local bundle files", "Elimina bundles locales"),
|
|
2001
|
+
("quit", "Exit the command center", "Sale del command center"),
|
|
2002
|
+
]
|
|
2003
|
+
colors = {"pack": "green", "catalog": "blue", "presets": "bright_green", "inspect": "cyan", "apply": "yellow", "diff": "yellow", "verify": "white", "doctor": "white", "share": "white", "manifest": "magenta", "history": "white", "profile": "white", "settings": "white", "keygen": "white", "rekey": "white", "delete": "red", "quit": "red"}
|
|
2004
|
+
for command, en_desc, es_desc in hub_rows:
|
|
2005
|
+
desc = en_desc if CURRENT_LANGUAGE == "en" else es_desc
|
|
2006
|
+
commands.add_row(f"[bold {colors[command]}]{command}[/bold {colors[command]}]", desc)
|
|
2007
|
+
console.print(Panel(commands, title=f"[bold bright_green]{tr('Centro de acciones')}[/bold bright_green]", border_style="green"))
|
|
2008
|
+
|
|
2009
|
+
|
|
2010
|
+
def prompt_action_choice() -> str:
|
|
2011
|
+
actions = ["pack", "catalog", "presets", "inspect", "apply", "diff", "verify", "doctor", "share", "manifest", "history", "profile", "settings", "keygen", "rekey", "delete", "quit"]
|
|
2012
|
+
action_map = {str(index): action for index, action in enumerate(actions, start=1)}
|
|
2013
|
+
console.print(f"[dim]{tr('Atajos: 1=pack 2=catalog 3=presets 4=inspect 5=apply 6=diff 7=verify 8=doctor 9=share 10=manifest 11=history 12=profile 13=settings 14=keygen 15=rekey 16=delete 17=quit')}[/dim]")
|
|
2014
|
+
raw = Prompt.ask(
|
|
2015
|
+
tr("Elegir accion"),
|
|
2016
|
+
default="pack",
|
|
2017
|
+
).strip().lower()
|
|
2018
|
+
if raw in action_map:
|
|
2019
|
+
return action_map[raw]
|
|
2020
|
+
if raw in actions:
|
|
2021
|
+
return raw
|
|
2022
|
+
die(f"Accion desconocida: {raw}")
|
|
2023
|
+
|
|
2024
|
+
|
|
2025
|
+
def check_platform_compatibility(manifest: dict) -> tuple[bool, str]:
|
|
2026
|
+
bundle_platform = manifest["bundle"]["platform"]
|
|
2027
|
+
target_os = bundle_platform.get("os")
|
|
2028
|
+
target_arch = (bundle_platform.get("arch") or "").lower()
|
|
2029
|
+
current_os = normalize_os_name()
|
|
2030
|
+
current_arch = platform.machine().lower()
|
|
2031
|
+
|
|
2032
|
+
if target_os and target_os != "any" and target_os != current_os:
|
|
2033
|
+
return False, f"El paquete es para {target_os} y esta maquina es {current_os}"
|
|
2034
|
+
if target_arch and target_arch != "any" and target_arch != current_arch:
|
|
2035
|
+
return False, f"El paquete es para {target_arch} y esta maquina es {current_arch}"
|
|
2036
|
+
return True, "Compatible con esta maquina"
|
|
2037
|
+
|
|
2038
|
+
|
|
2039
|
+
def cmd_keygen(args) -> None:
|
|
2040
|
+
key = load_key(args.key, create=True)
|
|
2041
|
+
fingerprint = hashlib.sha256(key).hexdigest()[:16]
|
|
2042
|
+
console.print(
|
|
2043
|
+
Panel(
|
|
2044
|
+
f"{'Key available at' if CURRENT_LANGUAGE == 'en' else 'Clave disponible en'} [bold]{args.key}[/bold]\nFingerprint: [cyan]{fingerprint}[/cyan]",
|
|
2045
|
+
title=f"[bold bright_green]{'Keyring' if CURRENT_LANGUAGE == 'en' else 'Keyring'}[/bold bright_green]",
|
|
2046
|
+
border_style="green",
|
|
2047
|
+
)
|
|
2048
|
+
)
|
|
2049
|
+
|
|
2050
|
+
|
|
2051
|
+
def cmd_pack(args) -> None:
|
|
2052
|
+
print_banner()
|
|
2053
|
+
args.exclude = getattr(args, "exclude", [])
|
|
2054
|
+
args.notes = getattr(args, "notes", "")
|
|
2055
|
+
args.after_steps = getattr(args, "after_steps", [])
|
|
2056
|
+
args.profile = getattr(args, "profile", "")
|
|
2057
|
+
args.yes = getattr(args, "yes", False)
|
|
2058
|
+
apply_settings_defaults(args)
|
|
2059
|
+
key = load_key(args.key, create=True)
|
|
2060
|
+
paths, output = prepare_pack_inputs(args)
|
|
2061
|
+
if not paths:
|
|
2062
|
+
die("No hay rutas para empaquetar. Pasa rutas explicitas o prepara tu HOME.")
|
|
2063
|
+
|
|
2064
|
+
with Progress(
|
|
2065
|
+
SpinnerColumn(style="cyan"),
|
|
2066
|
+
TextColumn("[progress.description]{task.description}"),
|
|
2067
|
+
TextColumn("{task.fields[file_count]} files"),
|
|
2068
|
+
TextColumn("[dim]{task.fields[current_path]}[/dim]"),
|
|
2069
|
+
TimeElapsedColumn(),
|
|
2070
|
+
console=console,
|
|
2071
|
+
) as scan_progress:
|
|
2072
|
+
scan_task = scan_progress.add_task(
|
|
2073
|
+
tr("Scanning files"),
|
|
2074
|
+
total=None,
|
|
2075
|
+
file_count=0,
|
|
2076
|
+
current_path="starting",
|
|
2077
|
+
)
|
|
2078
|
+
|
|
2079
|
+
def update_scan(count: int, current_path: Path) -> None:
|
|
2080
|
+
scan_progress.update(
|
|
2081
|
+
scan_task,
|
|
2082
|
+
file_count=count,
|
|
2083
|
+
current_path=str(current_path)[:80],
|
|
2084
|
+
)
|
|
2085
|
+
|
|
2086
|
+
entries = filter_entries(collect_files(paths, progress_callback=update_scan), args.exclude)
|
|
2087
|
+
scan_progress.update(scan_task, description=tr("Scanning files done"), current_path="")
|
|
2088
|
+
if not entries:
|
|
2089
|
+
die("No se encontro ningun archivo exportable.")
|
|
2090
|
+
sensitive_entries = detect_sensitive_entries(entries)
|
|
2091
|
+
if sensitive_entries:
|
|
2092
|
+
preview = "\n".join(f"- {entry.relative_path}" for entry in sensitive_entries[:10])
|
|
2093
|
+
console.print(Panel(preview, title=tr("Sensitive paths detected"), border_style="yellow"))
|
|
2094
|
+
if not args.yes and sys.stdin.isatty():
|
|
2095
|
+
if not Confirm.ask(tr("Include these sensitive paths?"), default=False):
|
|
2096
|
+
entries = [entry for entry in entries if entry not in sensitive_entries]
|
|
2097
|
+
if not entries:
|
|
2098
|
+
die("No quedan archivos tras aplicar exclusiones y filtros de seguridad.")
|
|
2099
|
+
|
|
2100
|
+
files_manifest: list[dict] = []
|
|
2101
|
+
history_snapshot = save_history_snapshot(output)
|
|
2102
|
+
args.sensitive_count = len(sensitive_entries)
|
|
2103
|
+
args.key_fingerprint = fingerprint_key(key)
|
|
2104
|
+
total_bytes = sum(entry.size for entry in entries)
|
|
2105
|
+
requested_jobs = args.jobs
|
|
2106
|
+
initial_jobs, preflight_reason = safe_pack_jobs(entries, args.jobs)
|
|
2107
|
+
args.jobs = initial_jobs
|
|
2108
|
+
if initial_jobs < requested_jobs:
|
|
2109
|
+
console.print(
|
|
2110
|
+
f"[yellow]{tr('Adaptive pack:')}[/yellow] {trf('reduciendo ventana inicial de {requested} a {initial} ({reason}). Puede volver a subir si la memoria acompana.', requested=requested_jobs, initial=initial_jobs, reason=preflight_reason)}"
|
|
2111
|
+
)
|
|
2112
|
+
|
|
2113
|
+
ensure_parent(output)
|
|
2114
|
+
tmp_output = output.with_suffix(output.suffix + ".tmp")
|
|
2115
|
+
|
|
2116
|
+
# Write payloads directly into the ZIP as they are produced.
|
|
2117
|
+
# This avoids writing encrypted payloads to disk first (big speed win on slow I/O).
|
|
2118
|
+
with ZipFile(tmp_output, "w", compression=ZIP_STORED) as bundle:
|
|
2119
|
+
with Progress(
|
|
2120
|
+
SpinnerColumn(style="green"),
|
|
2121
|
+
TextColumn("[progress.description]{task.description}"),
|
|
2122
|
+
BarColumn(bar_width=30),
|
|
2123
|
+
TextColumn("{task.fields[file_count]}/{task.fields[file_total]} files"),
|
|
2124
|
+
TextColumn("[dim]{task.fields[worker_status]}[/dim]"),
|
|
2125
|
+
TextColumn("{task.percentage:>3.0f}%"),
|
|
2126
|
+
TimeElapsedColumn(),
|
|
2127
|
+
TimeRemainingColumn(),
|
|
2128
|
+
console=console,
|
|
2129
|
+
) as progress:
|
|
2130
|
+
task = progress.add_task(
|
|
2131
|
+
"Packing",
|
|
2132
|
+
total=total_bytes or len(entries),
|
|
2133
|
+
file_count=0,
|
|
2134
|
+
file_total=len(entries),
|
|
2135
|
+
worker_status=f"active {initial_jobs}/{requested_jobs} | {preflight_reason}",
|
|
2136
|
+
)
|
|
2137
|
+
|
|
2138
|
+
executor, executor_mode = create_pack_executor(requested_jobs)
|
|
2139
|
+
if executor_mode == "threads-fallback":
|
|
2140
|
+
console.print(
|
|
2141
|
+
f"[yellow]{tr('Adaptive pack:')}[/yellow] {tr('Process pool no disponible en este sistema; usando threads.')}" # noqa: E501
|
|
2142
|
+
)
|
|
2143
|
+
with executor:
|
|
2144
|
+
pending: dict = {}
|
|
2145
|
+
next_index = 1
|
|
2146
|
+
completed_files = 0
|
|
2147
|
+
|
|
2148
|
+
def submit_one(index: int, entry: FileEntry) -> None:
|
|
2149
|
+
payload_name = f"{index:04d}-{hashlib.sha256(entry.relative_path.encode('utf-8')).hexdigest()[:16]}.bin"
|
|
2150
|
+
future = executor.submit(
|
|
2151
|
+
build_payload_job,
|
|
2152
|
+
str(entry.source),
|
|
2153
|
+
entry.relative_path,
|
|
2154
|
+
entry.mode,
|
|
2155
|
+
payload_name,
|
|
2156
|
+
key,
|
|
2157
|
+
args.compression_level,
|
|
2158
|
+
)
|
|
2159
|
+
pending[future] = (entry, payload_name)
|
|
2160
|
+
|
|
2161
|
+
inflight_limit = initial_jobs
|
|
2162
|
+
inflight_limit, pressure_label = adaptive_next_inflight_limit(inflight_limit, requested_jobs)
|
|
2163
|
+
while next_index <= len(entries) and len(pending) < inflight_limit:
|
|
2164
|
+
submit_one(next_index, entries[next_index - 1])
|
|
2165
|
+
next_index += 1
|
|
2166
|
+
|
|
2167
|
+
while pending:
|
|
2168
|
+
done, _ = wait(set(pending.keys()), return_when=FIRST_COMPLETED)
|
|
2169
|
+
for future in done:
|
|
2170
|
+
entry, payload_name = pending.pop(future)
|
|
2171
|
+
encrypted, record = future.result()
|
|
2172
|
+
|
|
2173
|
+
# Write payload directly into the bundle.
|
|
2174
|
+
bundle.writestr(record["payload"], encrypted)
|
|
2175
|
+
|
|
2176
|
+
files_manifest.append(record)
|
|
2177
|
+
completed_files += 1
|
|
2178
|
+
progress.update(
|
|
2179
|
+
task,
|
|
2180
|
+
advance=record["size"],
|
|
2181
|
+
file_count=completed_files,
|
|
2182
|
+
worker_status=f"active {inflight_limit}/{requested_jobs} | {pressure_label}",
|
|
2183
|
+
)
|
|
2184
|
+
|
|
2185
|
+
previous_limit = inflight_limit
|
|
2186
|
+
inflight_limit, pressure_label = adaptive_next_inflight_limit(inflight_limit, requested_jobs)
|
|
2187
|
+
if inflight_limit != previous_limit:
|
|
2188
|
+
if inflight_limit > previous_limit:
|
|
2189
|
+
detail = trf(
|
|
2190
|
+
"subiendo ventana activa {previous} -> {current} ({label}).",
|
|
2191
|
+
previous=previous_limit,
|
|
2192
|
+
current=inflight_limit,
|
|
2193
|
+
label=pressure_label,
|
|
2194
|
+
)
|
|
2195
|
+
else:
|
|
2196
|
+
detail = trf(
|
|
2197
|
+
"bajando ventana activa {previous} -> {current} ({label}).",
|
|
2198
|
+
previous=previous_limit,
|
|
2199
|
+
current=inflight_limit,
|
|
2200
|
+
label=pressure_label,
|
|
2201
|
+
)
|
|
2202
|
+
console.print(f"[dim]{tr('Adaptive pack:')}[/dim] {detail}")
|
|
2203
|
+
while next_index <= len(entries) and len(pending) < inflight_limit:
|
|
2204
|
+
submit_one(next_index, entries[next_index - 1])
|
|
2205
|
+
next_index += 1
|
|
2206
|
+
|
|
2207
|
+
files_manifest.sort(key=lambda item: item["path"])
|
|
2208
|
+
manifest = build_manifest(args, files_manifest, [str(path) for path in paths])
|
|
2209
|
+
bundle.writestr(
|
|
2210
|
+
"manifest.json",
|
|
2211
|
+
json.dumps(manifest, indent=2, sort_keys=True) + "\n",
|
|
2212
|
+
compress_type=ZIP_DEFLATED,
|
|
2213
|
+
)
|
|
2214
|
+
|
|
2215
|
+
# Atomic-ish replace.
|
|
2216
|
+
tmp_output.replace(output)
|
|
2217
|
+
|
|
2218
|
+
render_bundle_card(manifest, output)
|
|
2219
|
+
if history_snapshot:
|
|
2220
|
+
console.print(f"[dim]{trf('Previous snapshot saved to {path}', path=history_snapshot)}[/dim]")
|
|
2221
|
+
console.print(f"[bold green]{trf('Created {output}', output=output)}[/bold green]")
|
|
2222
|
+
|
|
2223
|
+
|
|
2224
|
+
def cmd_bench(args) -> None:
|
|
2225
|
+
"""Micro-benchmark Peridot pack performance.
|
|
2226
|
+
|
|
2227
|
+
This creates a synthetic dataset of N files and runs `pack` multiple times
|
|
2228
|
+
with different compression levels/jobs, reporting wall time and output size.
|
|
2229
|
+
"""
|
|
2230
|
+
|
|
2231
|
+
print_banner()
|
|
2232
|
+
|
|
2233
|
+
runs = max(1, int(args.runs))
|
|
2234
|
+
file_count = max(1, int(args.files))
|
|
2235
|
+
size_kb = max(1, int(args.size_kb))
|
|
2236
|
+
|
|
2237
|
+
# Parse compression levels like: "0,1,3,6".
|
|
2238
|
+
levels: list[int] = []
|
|
2239
|
+
for raw in str(args.levels).split(","):
|
|
2240
|
+
raw = raw.strip()
|
|
2241
|
+
if not raw:
|
|
2242
|
+
continue
|
|
2243
|
+
levels.append(sanitize_compression_level(int(raw)))
|
|
2244
|
+
if not levels:
|
|
2245
|
+
levels = [DEFAULT_COMPRESSION_LEVEL]
|
|
2246
|
+
|
|
2247
|
+
key = load_key(args.key, create=True)
|
|
2248
|
+
|
|
2249
|
+
results: list[dict] = []
|
|
2250
|
+
|
|
2251
|
+
with TemporaryDirectory() as tmp:
|
|
2252
|
+
root = Path(tmp)
|
|
2253
|
+
data_dir = root / "data"
|
|
2254
|
+
data_dir.mkdir(parents=True, exist_ok=True)
|
|
2255
|
+
|
|
2256
|
+
# Deterministic-ish payload to make runs comparable.
|
|
2257
|
+
block = (b"peridot" * 128) # 768 bytes
|
|
2258
|
+
target_size = size_kb * 1024
|
|
2259
|
+
|
|
2260
|
+
for i in range(file_count):
|
|
2261
|
+
content = (block * ((target_size // len(block)) + 1))[:target_size]
|
|
2262
|
+
(data_dir / f"file-{i:04d}.txt").write_bytes(content)
|
|
2263
|
+
|
|
2264
|
+
# One run per config.
|
|
2265
|
+
for level in levels:
|
|
2266
|
+
for jobs in [int(args.jobs)]:
|
|
2267
|
+
for run_idx in range(runs):
|
|
2268
|
+
out = root / f"bench-l{level}-j{jobs}-r{run_idx}.peridot"
|
|
2269
|
+
t0 = datetime.now(timezone.utc)
|
|
2270
|
+
start = datetime.now(timezone.utc).timestamp()
|
|
2271
|
+
|
|
2272
|
+
pack_args = SimpleNamespace(
|
|
2273
|
+
key=args.key,
|
|
2274
|
+
name=f"bench-l{level}-j{jobs}",
|
|
2275
|
+
paths=[str(data_dir)],
|
|
2276
|
+
output=out,
|
|
2277
|
+
description="",
|
|
2278
|
+
platform=normalize_os_name("linux"),
|
|
2279
|
+
shell="bash",
|
|
2280
|
+
arch=platform.machine().lower(),
|
|
2281
|
+
tags=[],
|
|
2282
|
+
preset="",
|
|
2283
|
+
profile="",
|
|
2284
|
+
exclude=[],
|
|
2285
|
+
notes="",
|
|
2286
|
+
after_steps=[],
|
|
2287
|
+
compression_level=level,
|
|
2288
|
+
jobs=jobs,
|
|
2289
|
+
yes=True,
|
|
2290
|
+
language=getattr(args, "language", None),
|
|
2291
|
+
)
|
|
2292
|
+
|
|
2293
|
+
cmd_pack(pack_args)
|
|
2294
|
+
|
|
2295
|
+
end = datetime.now(timezone.utc).timestamp()
|
|
2296
|
+
dt = end - start
|
|
2297
|
+
results.append(
|
|
2298
|
+
{
|
|
2299
|
+
"level": level,
|
|
2300
|
+
"jobs": jobs,
|
|
2301
|
+
"run": run_idx,
|
|
2302
|
+
"seconds": round(dt, 4),
|
|
2303
|
+
"output_bytes": out.stat().st_size,
|
|
2304
|
+
"started_at": t0.isoformat(),
|
|
2305
|
+
}
|
|
2306
|
+
)
|
|
2307
|
+
|
|
2308
|
+
input_bytes = file_count * size_kb * 1024
|
|
2309
|
+
|
|
2310
|
+
# Print a compact summary.
|
|
2311
|
+
console.print("\n[bold]" + tr("Bench results") + "[/bold]")
|
|
2312
|
+
for row in results:
|
|
2313
|
+
mb_s = (input_bytes / 1_000_000) / max(0.0001, float(row["seconds"]))
|
|
2314
|
+
ratio = float(row["output_bytes"]) / max(1.0, float(input_bytes))
|
|
2315
|
+
console.print(
|
|
2316
|
+
f"- level={row['level']} jobs={row['jobs']} run={row['run']} -> {row['seconds']}s | in={format_bytes(int(input_bytes))} | out={format_bytes(int(row['output_bytes']))} | {mb_s:.1f} MB/s | ratio={ratio:.2f}"
|
|
2317
|
+
)
|
|
2318
|
+
|
|
2319
|
+
if args.json or getattr(args, "out", None):
|
|
2320
|
+
payload = {
|
|
2321
|
+
"input_bytes": input_bytes,
|
|
2322
|
+
"files": file_count,
|
|
2323
|
+
"size_kb": size_kb,
|
|
2324
|
+
"results": results,
|
|
2325
|
+
}
|
|
2326
|
+
rendered = json.dumps(payload, indent=2)
|
|
2327
|
+
if getattr(args, "out", None):
|
|
2328
|
+
out_path = Path(args.out).expanduser()
|
|
2329
|
+
ensure_parent(out_path)
|
|
2330
|
+
out_path.write_text(rendered + "\n", encoding="utf-8")
|
|
2331
|
+
console.print(f"[dim]Saved bench JSON to {out_path}[/dim]")
|
|
2332
|
+
if args.json:
|
|
2333
|
+
print(rendered)
|
|
2334
|
+
|
|
2335
|
+
|
|
2336
|
+
def cmd_inspect(args) -> None:
|
|
2337
|
+
print_banner()
|
|
2338
|
+
manifest = manifest_from_zip(args.package)
|
|
2339
|
+
render_bundle_card(manifest, args.package)
|
|
2340
|
+
compatible, message = check_platform_compatibility(manifest)
|
|
2341
|
+
style = "green" if compatible else "yellow"
|
|
2342
|
+
console.print(Panel(message if CURRENT_LANGUAGE == "es" else tr(message), title=tr("Compatibility"), border_style=style))
|
|
2343
|
+
|
|
2344
|
+
if args.files:
|
|
2345
|
+
render_file_table(manifest, limit=None if args.all else 20)
|
|
2346
|
+
|
|
2347
|
+
if args.json:
|
|
2348
|
+
print_manifest_json(manifest)
|
|
2349
|
+
|
|
2350
|
+
|
|
2351
|
+
def backup_existing_file(source: Path, backup_dir: Path, home_target: Path) -> Path:
|
|
2352
|
+
relative = source.relative_to(home_target)
|
|
2353
|
+
backup_path = backup_dir / relative
|
|
2354
|
+
ensure_parent(backup_path)
|
|
2355
|
+
shutil.copy2(source, backup_path)
|
|
2356
|
+
return backup_path
|
|
2357
|
+
|
|
2358
|
+
|
|
2359
|
+
@dataclass
|
|
2360
|
+
class ApplyChange:
|
|
2361
|
+
target_path: Path
|
|
2362
|
+
existed: bool
|
|
2363
|
+
backup_path: Path | None
|
|
2364
|
+
|
|
2365
|
+
|
|
2366
|
+
def cmd_apply(args) -> None:
|
|
2367
|
+
print_banner()
|
|
2368
|
+
manifest = manifest_from_zip(args.package)
|
|
2369
|
+
render_bundle_card(manifest, args.package)
|
|
2370
|
+
|
|
2371
|
+
compatible, message = check_platform_compatibility(manifest)
|
|
2372
|
+
if not compatible and not args.ignore_platform and not args.dry_run:
|
|
2373
|
+
die(message)
|
|
2374
|
+
if not compatible:
|
|
2375
|
+
console.print(f"[yellow]{'Warning' if CURRENT_LANGUAGE == 'en' else 'Aviso'}:[/yellow] {message if CURRENT_LANGUAGE == 'es' else tr(message)}")
|
|
2376
|
+
else:
|
|
2377
|
+
console.print(f"[green]{message if CURRENT_LANGUAGE == 'es' else tr(message)}[/green]")
|
|
2378
|
+
|
|
2379
|
+
selected_paths = set(args.select or [])
|
|
2380
|
+
if not selected_paths and sys.stdin.isatty() and not args.yes and QUESTIONARY_AVAILABLE:
|
|
2381
|
+
if Confirm.ask("Select only some bundle paths?" if CURRENT_LANGUAGE == "es" else "Select only some bundle paths?", default=False):
|
|
2382
|
+
choices = [Choice(title=entry["path"], value=entry["path"], checked=True) for entry in manifest["files"]]
|
|
2383
|
+
chosen = checkbox_prompt("Bundle paths to apply", choices)
|
|
2384
|
+
if chosen is not None:
|
|
2385
|
+
selected_paths = set(chosen)
|
|
2386
|
+
|
|
2387
|
+
filtered_manifest = {
|
|
2388
|
+
**manifest,
|
|
2389
|
+
"files": [entry for entry in manifest["files"] if not selected_paths or entry["path"] in selected_paths],
|
|
2390
|
+
}
|
|
2391
|
+
|
|
2392
|
+
if args.dry_run:
|
|
2393
|
+
render_file_table(filtered_manifest, limit=None)
|
|
2394
|
+
diff_rows = bundle_diff(filtered_manifest, args.target.expanduser(), key=load_key(args.key, create=False), package_path=args.package)
|
|
2395
|
+
render_diff_table(diff_rows)
|
|
2396
|
+
console.print(f"[bold cyan]{tr('Dry run: no se ha escrito nada.')}[/bold cyan]")
|
|
2397
|
+
return
|
|
2398
|
+
|
|
2399
|
+
if not args.yes and sys.stdin.isatty():
|
|
2400
|
+
if not Confirm.ask(tr("Apply this bundle?"), default=False):
|
|
2401
|
+
console.print(f"[yellow]{tr('Operacion cancelada.')}[/yellow]")
|
|
2402
|
+
return
|
|
2403
|
+
|
|
2404
|
+
key = load_key(args.key, create=False)
|
|
2405
|
+
target_root = args.target.expanduser()
|
|
2406
|
+
backup_dir = args.backup_dir.expanduser() if args.backup_dir else None
|
|
2407
|
+
|
|
2408
|
+
transactional = getattr(args, "transactional", True)
|
|
2409
|
+
verify_write = getattr(args, "verify", True)
|
|
2410
|
+
|
|
2411
|
+
overwritten = 0
|
|
2412
|
+
restored = 0
|
|
2413
|
+
changes: list[ApplyChange] = []
|
|
2414
|
+
|
|
2415
|
+
def rollback(reason: str) -> None:
|
|
2416
|
+
if not changes:
|
|
2417
|
+
return
|
|
2418
|
+
console.print(f"[yellow]{'Aviso' if CURRENT_LANGUAGE == 'es' else 'Warning'}:[/yellow] rollback: {reason}")
|
|
2419
|
+
# Reverse order: last write first.
|
|
2420
|
+
for change in reversed(changes):
|
|
2421
|
+
try:
|
|
2422
|
+
if change.existed and change.backup_path and change.backup_path.exists():
|
|
2423
|
+
ensure_parent(change.target_path)
|
|
2424
|
+
shutil.copy2(change.backup_path, change.target_path)
|
|
2425
|
+
elif not change.existed:
|
|
2426
|
+
if change.target_path.exists():
|
|
2427
|
+
change.target_path.unlink()
|
|
2428
|
+
except Exception:
|
|
2429
|
+
# Best effort rollback.
|
|
2430
|
+
pass
|
|
2431
|
+
|
|
2432
|
+
total_bytes = sum(entry["size"] for entry in filtered_manifest["files"])
|
|
2433
|
+
|
|
2434
|
+
# If transactional and user didn't request backups, use a temporary backup dir.
|
|
2435
|
+
temp_backup_ctx = TemporaryDirectory() if transactional and not backup_dir else None
|
|
2436
|
+
try:
|
|
2437
|
+
if temp_backup_ctx is not None:
|
|
2438
|
+
backup_dir = Path(temp_backup_ctx.name)
|
|
2439
|
+
|
|
2440
|
+
if backup_dir:
|
|
2441
|
+
backup_dir.mkdir(parents=True, exist_ok=True)
|
|
2442
|
+
|
|
2443
|
+
with ZipFile(args.package) as bundle:
|
|
2444
|
+
with Progress(
|
|
2445
|
+
SpinnerColumn(style="green"),
|
|
2446
|
+
TextColumn("[progress.description]{task.description}"),
|
|
2447
|
+
BarColumn(bar_width=30),
|
|
2448
|
+
TextColumn("{task.fields[file_count]}/{task.fields[file_total]} files"),
|
|
2449
|
+
TextColumn("{task.percentage:>3.0f}%"),
|
|
2450
|
+
TimeElapsedColumn(),
|
|
2451
|
+
TimeRemainingColumn(),
|
|
2452
|
+
console=console,
|
|
2453
|
+
) as progress:
|
|
2454
|
+
task = progress.add_task(
|
|
2455
|
+
"Applying",
|
|
2456
|
+
total=total_bytes or len(filtered_manifest["files"]),
|
|
2457
|
+
file_count=0,
|
|
2458
|
+
file_total=len(filtered_manifest["files"]),
|
|
2459
|
+
)
|
|
2460
|
+
for file_entry in filtered_manifest["files"]:
|
|
2461
|
+
target_path = target_root / file_entry["path"]
|
|
2462
|
+
ensure_parent(target_path)
|
|
2463
|
+
|
|
2464
|
+
existed = target_path.exists()
|
|
2465
|
+
backup_path = None
|
|
2466
|
+
if existed and backup_dir:
|
|
2467
|
+
backup_path = backup_existing_file(target_path, backup_dir, target_root)
|
|
2468
|
+
overwritten += 1
|
|
2469
|
+
|
|
2470
|
+
# Track this change so we can rollback on failure.
|
|
2471
|
+
changes.append(ApplyChange(target_path=target_path, existed=existed, backup_path=backup_path))
|
|
2472
|
+
|
|
2473
|
+
try:
|
|
2474
|
+
encrypted = bundle.read(file_entry["payload"])
|
|
2475
|
+
payload = decrypt_payload(encrypted, file_entry, key)
|
|
2476
|
+
raw = inflate_payload(payload, file_entry.get("compression"))
|
|
2477
|
+
except ValueError:
|
|
2478
|
+
if transactional:
|
|
2479
|
+
rollback(tr("La clave no coincide con el paquete."))
|
|
2480
|
+
die(tr("La clave no coincide con el paquete."))
|
|
2481
|
+
|
|
2482
|
+
try:
|
|
2483
|
+
target_path.write_bytes(raw)
|
|
2484
|
+
except Exception as exc:
|
|
2485
|
+
if transactional:
|
|
2486
|
+
rollback(f"write failed: {exc}")
|
|
2487
|
+
raise
|
|
2488
|
+
|
|
2489
|
+
if verify_write:
|
|
2490
|
+
try:
|
|
2491
|
+
written = target_path.read_bytes()
|
|
2492
|
+
except Exception as exc:
|
|
2493
|
+
if transactional:
|
|
2494
|
+
rollback(f"verify read failed: {exc}")
|
|
2495
|
+
raise
|
|
2496
|
+
if hashlib.sha256(written).hexdigest() != file_entry["sha256"]:
|
|
2497
|
+
msg = f"Hash mismatch after write: {file_entry['path']}"
|
|
2498
|
+
if transactional:
|
|
2499
|
+
rollback(msg)
|
|
2500
|
+
die(msg)
|
|
2501
|
+
|
|
2502
|
+
try:
|
|
2503
|
+
target_path.chmod(file_entry["mode"])
|
|
2504
|
+
except OSError:
|
|
2505
|
+
pass
|
|
2506
|
+
restored += 1
|
|
2507
|
+
progress.update(task, advance=file_entry["size"], file_count=restored)
|
|
2508
|
+
|
|
2509
|
+
except Exception as exc:
|
|
2510
|
+
if transactional:
|
|
2511
|
+
rollback(f"exception: {exc}")
|
|
2512
|
+
raise
|
|
2513
|
+
finally:
|
|
2514
|
+
if temp_backup_ctx is not None:
|
|
2515
|
+
temp_backup_ctx.cleanup()
|
|
2516
|
+
|
|
2517
|
+
footer = Table.grid(padding=(0, 2))
|
|
2518
|
+
footer.add_row("Target", str(target_root))
|
|
2519
|
+
footer.add_row("Restored", str(restored))
|
|
2520
|
+
footer.add_row("Backups", str(overwritten if backup_dir else 0))
|
|
2521
|
+
if backup_dir:
|
|
2522
|
+
footer.add_row("Backup dir", str(backup_dir))
|
|
2523
|
+
post_apply = manifest["bundle"].get("post_apply") or []
|
|
2524
|
+
if post_apply:
|
|
2525
|
+
footer.add_row("Post apply", str(len(post_apply)))
|
|
2526
|
+
console.print(Panel(footer, title=f"[bold bright_green]{tr('Apply Summary')}[/bold bright_green]", border_style="green"))
|
|
2527
|
+
if post_apply:
|
|
2528
|
+
console.print(f"[bold cyan]{tr('Post-apply checklist')}[/bold cyan]")
|
|
2529
|
+
for step in post_apply:
|
|
2530
|
+
console.print(f"- {step}")
|
|
2531
|
+
|
|
2532
|
+
|
|
2533
|
+
def cmd_diff(args) -> None:
|
|
2534
|
+
print_banner()
|
|
2535
|
+
manifest = manifest_from_zip(args.package)
|
|
2536
|
+
key = None
|
|
2537
|
+
if not args.no_hash:
|
|
2538
|
+
key = load_key(args.key, create=False)
|
|
2539
|
+
rows = bundle_diff(manifest, args.target.expanduser(), key=key, package_path=args.package if key else None)
|
|
2540
|
+
if getattr(args, "json", False):
|
|
2541
|
+
print(json.dumps([{"status": status, "path": path} for status, path in rows], indent=2))
|
|
2542
|
+
return
|
|
2543
|
+
render_diff_table(rows)
|
|
2544
|
+
|
|
2545
|
+
|
|
2546
|
+
def cmd_verify(args) -> None:
|
|
2547
|
+
manifest = manifest_from_zip(args.package)
|
|
2548
|
+
issues: list[str] = []
|
|
2549
|
+
try:
|
|
2550
|
+
with ZipFile(args.package) as bundle:
|
|
2551
|
+
for file_entry in manifest["files"]:
|
|
2552
|
+
if file_entry["payload"] not in bundle.namelist():
|
|
2553
|
+
issues.append(f"Missing payload: {file_entry['payload']}")
|
|
2554
|
+
except OSError as exc:
|
|
2555
|
+
issues.append(str(exc))
|
|
2556
|
+
|
|
2557
|
+
if args.deep:
|
|
2558
|
+
key = load_key(args.key, create=False)
|
|
2559
|
+
try:
|
|
2560
|
+
contents = read_bundle_content(args.package, manifest, key)
|
|
2561
|
+
for file_entry in manifest["files"]:
|
|
2562
|
+
payload = contents[file_entry["path"]]
|
|
2563
|
+
if hashlib.sha256(payload).hexdigest() != file_entry["sha256"]:
|
|
2564
|
+
issues.append(f"Hash mismatch: {file_entry['path']}")
|
|
2565
|
+
except ValueError:
|
|
2566
|
+
issues.append("La clave no coincide con el paquete.")
|
|
2567
|
+
|
|
2568
|
+
result = {"ok": not issues, "issues": issues}
|
|
2569
|
+
if getattr(args, "json", False):
|
|
2570
|
+
print(json.dumps(result, indent=2))
|
|
2571
|
+
if issues:
|
|
2572
|
+
raise SystemExit(1)
|
|
2573
|
+
return
|
|
2574
|
+
if issues:
|
|
2575
|
+
console.print(f"[bold red]{tr('Verificacion fallida')}[/bold red]")
|
|
2576
|
+
for issue in issues:
|
|
2577
|
+
console.print(f"- {issue}")
|
|
2578
|
+
raise SystemExit(1)
|
|
2579
|
+
console.print(f"[bold green]{tr('Verificacion OK')}[/bold green]")
|
|
2580
|
+
|
|
2581
|
+
|
|
2582
|
+
def render_settings_table(settings: dict) -> None:
|
|
2583
|
+
table = Table(title=tr("Settings"), header_style="bold cyan")
|
|
2584
|
+
table.add_column("Key")
|
|
2585
|
+
table.add_column("Value")
|
|
2586
|
+
table.add_column("Detail")
|
|
2587
|
+
compression_level = sanitize_compression_level(settings.get("compression_level"))
|
|
2588
|
+
jobs = sanitize_jobs(settings.get("jobs"))
|
|
2589
|
+
language = sanitize_language(settings.get("language"))
|
|
2590
|
+
table.add_row(
|
|
2591
|
+
"compression_level",
|
|
2592
|
+
f"{compression_level}/9",
|
|
2593
|
+
f"{render_level_bar(compression_level)} {active_compression_codec()} | {tr(compression_profile_detail(compression_level))}",
|
|
2594
|
+
)
|
|
2595
|
+
table.add_row("jobs", str(jobs), tr("workers para pack; mas puede ir mas rapido si hay CPU libre"))
|
|
2596
|
+
table.add_row("encryption", ENCRYPTION_ALGORITHM, tr("cifrado fijo: rapido, moderno y estandar"))
|
|
2597
|
+
table.add_row("language", language, tr("preparado para internacionalizacion CLI"))
|
|
2598
|
+
console.print(table)
|
|
2599
|
+
console.print(render_compression_setting(compression_level))
|
|
2600
|
+
|
|
2601
|
+
|
|
2602
|
+
def interactive_settings_editor(settings_path: Path = DEFAULT_SETTINGS_STORE) -> dict:
|
|
2603
|
+
settings = load_settings(settings_path)
|
|
2604
|
+
print_banner()
|
|
2605
|
+
render_settings_table(settings)
|
|
2606
|
+
console.print(f"[dim]{tr('Compression: 0 = mas rapido y mas grande, 9 = mas lento y mas pequeno.')}[/dim]")
|
|
2607
|
+
raw_level = Prompt.ask(tr("Compression level"), default=str(settings["compression_level"]))
|
|
2608
|
+
settings["compression_level"] = sanitize_compression_level(raw_level)
|
|
2609
|
+
console.print(render_compression_setting(settings["compression_level"]))
|
|
2610
|
+
cpu_total = os.cpu_count() or DEFAULT_JOBS
|
|
2611
|
+
raw_jobs = Prompt.ask(tr("Pack workers"), default=str(settings["jobs"]))
|
|
2612
|
+
settings["jobs"] = sanitize_jobs(raw_jobs)
|
|
2613
|
+
console.print(f"[dim]{trf('CPU detectada: {cpu} | workers activos: {jobs}', cpu=cpu_total, jobs=settings['jobs'])}[/dim]")
|
|
2614
|
+
settings["language"] = sanitize_language(
|
|
2615
|
+
Prompt.ask(tr("Language"), choices=["es", "en"], default=settings["language"])
|
|
2616
|
+
)
|
|
2617
|
+
if not Confirm.ask(tr("Save settings?"), default=True):
|
|
2618
|
+
console.print(f"[yellow]{tr('Operacion cancelada.')}[/yellow]")
|
|
2619
|
+
return load_settings(settings_path)
|
|
2620
|
+
save_settings(settings, settings_path)
|
|
2621
|
+
set_current_language(settings["language"])
|
|
2622
|
+
console.print(f"[bold green]{trf('Settings saved {path}', path=settings_path)}[/bold green]")
|
|
2623
|
+
return settings
|
|
2624
|
+
|
|
2625
|
+
|
|
2626
|
+
def cmd_settings(args) -> None:
|
|
2627
|
+
settings_path = getattr(args, "settings_path", DEFAULT_SETTINGS_STORE)
|
|
2628
|
+
if getattr(args, "show", False):
|
|
2629
|
+
render_settings_table(load_settings(settings_path))
|
|
2630
|
+
return
|
|
2631
|
+
|
|
2632
|
+
if getattr(args, "set", []):
|
|
2633
|
+
settings = load_settings(settings_path)
|
|
2634
|
+
for item in args.set:
|
|
2635
|
+
if "=" not in item:
|
|
2636
|
+
die(f"Formato invalido en setting '{item}'. Usa clave=valor.")
|
|
2637
|
+
key, value = item.split("=", 1)
|
|
2638
|
+
key = key.strip()
|
|
2639
|
+
value = value.strip()
|
|
2640
|
+
if key == "compression_level":
|
|
2641
|
+
settings[key] = sanitize_compression_level(value)
|
|
2642
|
+
elif key == "jobs":
|
|
2643
|
+
settings[key] = sanitize_jobs(value)
|
|
2644
|
+
elif key == "language":
|
|
2645
|
+
settings[key] = sanitize_language(value)
|
|
2646
|
+
else:
|
|
2647
|
+
die(f"Setting no soportado: {key}")
|
|
2648
|
+
save_settings(settings, settings_path)
|
|
2649
|
+
set_current_language(settings["language"])
|
|
2650
|
+
render_settings_table(settings)
|
|
2651
|
+
console.print(f"[bold green]{trf('Settings updated {path}', path=settings_path)}[/bold green]")
|
|
2652
|
+
return
|
|
2653
|
+
|
|
2654
|
+
interactive_settings_editor(settings_path)
|
|
2655
|
+
|
|
2656
|
+
|
|
2657
|
+
def cmd_init(args) -> None:
|
|
2658
|
+
"""Initialize Peridot local state (key + settings) with sane defaults."""
|
|
2659
|
+
|
|
2660
|
+
print_banner()
|
|
2661
|
+
|
|
2662
|
+
key_path: Path = getattr(args, "key", DEFAULT_KEY)
|
|
2663
|
+
settings_path: Path = DEFAULT_SETTINGS_STORE
|
|
2664
|
+
|
|
2665
|
+
# Ensure key exists.
|
|
2666
|
+
key = load_key(key_path, create=True)
|
|
2667
|
+
|
|
2668
|
+
# Ensure settings exist (or overwrite with --force).
|
|
2669
|
+
if settings_path.exists() and not getattr(args, "force", False):
|
|
2670
|
+
settings = load_settings(settings_path)
|
|
2671
|
+
console.print(f"[dim]Settings already exist at {settings_path}[/dim]")
|
|
2672
|
+
else:
|
|
2673
|
+
ensure_parent(settings_path)
|
|
2674
|
+
save_settings({**DEFAULT_SETTINGS}, settings_path)
|
|
2675
|
+
settings = load_settings(settings_path)
|
|
2676
|
+
console.print(f"[green]Created settings at {settings_path}[/green]")
|
|
2677
|
+
|
|
2678
|
+
footer = Table.grid(padding=(0, 2))
|
|
2679
|
+
footer.add_row("Key", str(key_path))
|
|
2680
|
+
footer.add_row("Fingerprint", fingerprint_key(key))
|
|
2681
|
+
footer.add_row("Settings", str(settings_path))
|
|
2682
|
+
footer.add_row("Language", str(settings.get("language")))
|
|
2683
|
+
footer.add_row("Compression", f"{settings.get('compression_level')}/9 ({active_compression_codec()})")
|
|
2684
|
+
footer.add_row("Jobs", str(settings.get("jobs")))
|
|
2685
|
+
|
|
2686
|
+
console.print(Panel(footer, title=tr("Peridot initialized"), border_style="green"))
|
|
2687
|
+
|
|
2688
|
+
console.print("\n" + tr("Next steps") + ":")
|
|
2689
|
+
console.print("- peridot pack --help")
|
|
2690
|
+
console.print("- peridot ui")
|
|
2691
|
+
console.print("- peridot bench --files 200 --size-kb 4 --levels 0,1,3 --runs 1")
|
|
2692
|
+
|
|
2693
|
+
|
|
2694
|
+
def cmd_doctor(args) -> None:
|
|
2695
|
+
rows = []
|
|
2696
|
+
rows.append(("key", "ok" if args.key.exists() else "missing", str(args.key)))
|
|
2697
|
+
bundles = discover_local_bundles()
|
|
2698
|
+
rows.append(("bundles", "ok" if bundles else "empty", str(len(bundles))))
|
|
2699
|
+
rows.append(("profiles", "ok" if DEFAULT_PROFILE_STORE.exists() else "empty", str(DEFAULT_PROFILE_STORE)))
|
|
2700
|
+
settings = load_settings()
|
|
2701
|
+
rows.append(("settings", "ok" if DEFAULT_SETTINGS_STORE.exists() else "default", str(DEFAULT_SETTINGS_STORE)))
|
|
2702
|
+
rows.append(("compression_level", "ok", f"{settings['compression_level']}/9"))
|
|
2703
|
+
rows.append(("compression_codec", "ok" if zstd is not None else "warn", active_compression_codec()))
|
|
2704
|
+
rows.append(("jobs", "ok", str(settings["jobs"])))
|
|
2705
|
+
rows.append(("encryption", "ok", ENCRYPTION_ALGORITHM))
|
|
2706
|
+
rows.append(("language", "ok", settings["language"]))
|
|
2707
|
+
total_mem = total_memory_bytes()
|
|
2708
|
+
avail_mem = available_memory_bytes()
|
|
2709
|
+
pressure = memory_pressure_ratio()
|
|
2710
|
+
if total_mem:
|
|
2711
|
+
rows.append(("memory_total", "ok", format_bytes(total_mem)))
|
|
2712
|
+
if avail_mem:
|
|
2713
|
+
rows.append(("memory_available", "ok" if avail_mem > 512 * 1024 * 1024 else "warn", format_bytes(avail_mem)))
|
|
2714
|
+
if pressure is None:
|
|
2715
|
+
rows.append(("memory_pressure", "warn", "no se pudo calcular"))
|
|
2716
|
+
else:
|
|
2717
|
+
if pressure < 0.60:
|
|
2718
|
+
status = "ok"
|
|
2719
|
+
detail = f"{pressure * 100:.0f}% (cool)"
|
|
2720
|
+
elif pressure < 0.72:
|
|
2721
|
+
status = "ok"
|
|
2722
|
+
detail = f"{pressure * 100:.0f}% (normal)"
|
|
2723
|
+
elif pressure < 0.84:
|
|
2724
|
+
status = "warn"
|
|
2725
|
+
detail = f"{pressure * 100:.0f}% (warm)"
|
|
2726
|
+
elif pressure < 0.92:
|
|
2727
|
+
status = "high"
|
|
2728
|
+
detail = f"{pressure * 100:.0f}% (high)"
|
|
2729
|
+
else:
|
|
2730
|
+
status = "high"
|
|
2731
|
+
detail = f"{pressure * 100:.0f}% (critical)"
|
|
2732
|
+
rows.append(("memory_pressure", status, detail))
|
|
2733
|
+
path_ok = any(str(Path.home() / ".local" / "bin") == part for part in os.environ.get("PATH", "").split(os.pathsep))
|
|
2734
|
+
rows.append(("path", "ok" if path_ok else "warn", "~/.local/bin in PATH" if path_ok else "~/.local/bin not in PATH"))
|
|
2735
|
+
checkbox_reason = checkbox_unavailable_reason()
|
|
2736
|
+
if checkbox_reason is None:
|
|
2737
|
+
rows.append(("checkbox_ui", "ok", "questionary + tty available"))
|
|
2738
|
+
elif checkbox_reason == "missing_questionary":
|
|
2739
|
+
rows.append(("checkbox_ui", "warn", "missing questionary in current Python"))
|
|
2740
|
+
else:
|
|
2741
|
+
rows.append(("checkbox_ui", "warn", "no interactive tty"))
|
|
2742
|
+
if getattr(args, "json", False):
|
|
2743
|
+
print(json.dumps([{"check": name, "status": status, "detail": detail} for name, status, detail in rows], indent=2))
|
|
2744
|
+
return
|
|
2745
|
+
table = Table(title=tr("Doctor"), header_style="bold cyan")
|
|
2746
|
+
table.add_column(tr("Check"))
|
|
2747
|
+
table.add_column(tr("Status"))
|
|
2748
|
+
table.add_column(tr("Detail"))
|
|
2749
|
+
for name, status, detail in rows:
|
|
2750
|
+
style = {"ok": "green", "missing": "red", "warn": "yellow", "high": "red", "empty": "yellow"}.get(status, "white")
|
|
2751
|
+
table.add_row(name, f"[{style}]{status}[/{style}]", detail)
|
|
2752
|
+
console.print(table)
|
|
2753
|
+
|
|
2754
|
+
|
|
2755
|
+
def cmd_share(args) -> None:
|
|
2756
|
+
manifest = manifest_from_zip(args.package)
|
|
2757
|
+
bundle = manifest["bundle"]
|
|
2758
|
+
if args.format == "json":
|
|
2759
|
+
output = json.dumps(manifest, indent=2, sort_keys=True) + "\n"
|
|
2760
|
+
elif args.format == "md":
|
|
2761
|
+
lines = [
|
|
2762
|
+
f"# {bundle['name']}",
|
|
2763
|
+
"",
|
|
2764
|
+
bundle.get("description") or "",
|
|
2765
|
+
"",
|
|
2766
|
+
f"- Target: {bundle['platform']['os']} / {bundle['platform'].get('shell') or 'any'} / {bundle['platform'].get('arch') or 'any'}",
|
|
2767
|
+
f"- Files: {bundle['stats']['files']}",
|
|
2768
|
+
f"- Payload: {format_bytes(bundle['stats']['bytes'])}",
|
|
2769
|
+
"",
|
|
2770
|
+
"## Files",
|
|
2771
|
+
]
|
|
2772
|
+
lines.extend(f"- `{entry['path']}`" for entry in manifest["files"])
|
|
2773
|
+
output = "\n".join(lines) + "\n"
|
|
2774
|
+
else:
|
|
2775
|
+
die("Formato no soportado en modo 100% CLI. Usa json o md.")
|
|
2776
|
+
|
|
2777
|
+
if args.output:
|
|
2778
|
+
args.output.write_text(output)
|
|
2779
|
+
console.print(f"[bold green]Exported[/bold green] {args.output}")
|
|
2780
|
+
else:
|
|
2781
|
+
print(output, end="")
|
|
2782
|
+
|
|
2783
|
+
|
|
2784
|
+
def cmd_history(args) -> None:
|
|
2785
|
+
history_root = DEFAULT_HISTORY_DIR / args.bundle
|
|
2786
|
+
table = Table(title=tr("History"), header_style="bold cyan")
|
|
2787
|
+
table.add_column("Snapshot")
|
|
2788
|
+
table.add_column("Size", justify="right")
|
|
2789
|
+
if history_root.exists():
|
|
2790
|
+
for snapshot in sorted(history_root.glob("*.peridot")):
|
|
2791
|
+
table.add_row(snapshot.name, format_bytes(snapshot.stat().st_size))
|
|
2792
|
+
else:
|
|
2793
|
+
table.add_row("No snapshots", "-")
|
|
2794
|
+
console.print(table)
|
|
2795
|
+
|
|
2796
|
+
|
|
2797
|
+
def cmd_merge(args) -> None:
|
|
2798
|
+
key = load_key(args.key, create=False)
|
|
2799
|
+
merged_files: dict[str, dict] = {}
|
|
2800
|
+
bundle_name = args.name or "merged-bundle"
|
|
2801
|
+
notes = []
|
|
2802
|
+
for package in args.packages:
|
|
2803
|
+
manifest = manifest_from_zip(package)
|
|
2804
|
+
notes.append(f"merged from {package.name}")
|
|
2805
|
+
for file_entry in manifest["files"]:
|
|
2806
|
+
raw = decode_package_payload(package, file_entry, key)
|
|
2807
|
+
merged_files[file_entry["path"]] = {
|
|
2808
|
+
"raw": raw,
|
|
2809
|
+
"mode": file_entry["mode"],
|
|
2810
|
+
}
|
|
2811
|
+
if not merged_files:
|
|
2812
|
+
die("No hay archivos para fusionar.")
|
|
2813
|
+
write_bundle_from_raw(
|
|
2814
|
+
output=args.output,
|
|
2815
|
+
bundle_name=bundle_name,
|
|
2816
|
+
description=args.description or "Merged bundle",
|
|
2817
|
+
platform=args.platform or normalize_os_name(),
|
|
2818
|
+
shell=args.shell or detect_shell(),
|
|
2819
|
+
arch=args.arch or platform.machine().lower(),
|
|
2820
|
+
tags=normalize_tags(args.tags),
|
|
2821
|
+
notes="\n".join(notes),
|
|
2822
|
+
after_steps=[],
|
|
2823
|
+
files=merged_files,
|
|
2824
|
+
key=key,
|
|
2825
|
+
)
|
|
2826
|
+
|
|
2827
|
+
|
|
2828
|
+
def cmd_split(args) -> None:
|
|
2829
|
+
key = load_key(args.key, create=False)
|
|
2830
|
+
manifest = manifest_from_zip(args.package)
|
|
2831
|
+
selected = [entry for entry in manifest["files"] if any(entry["path"].startswith(prefix) for prefix in args.prefix)]
|
|
2832
|
+
if not selected:
|
|
2833
|
+
die("No se ha seleccionado ningun archivo para extraer.")
|
|
2834
|
+
files: dict[str, dict] = {}
|
|
2835
|
+
for file_entry in selected:
|
|
2836
|
+
files[file_entry["path"]] = {
|
|
2837
|
+
"raw": decode_package_payload(args.package, file_entry, key),
|
|
2838
|
+
"mode": file_entry["mode"],
|
|
2839
|
+
}
|
|
2840
|
+
write_bundle_from_raw(
|
|
2841
|
+
output=args.output,
|
|
2842
|
+
bundle_name=args.name or f"{manifest['bundle']['name']} split",
|
|
2843
|
+
description=args.description or "Split bundle",
|
|
2844
|
+
platform=manifest["bundle"]["platform"]["os"],
|
|
2845
|
+
shell=manifest["bundle"]["platform"].get("shell") or "",
|
|
2846
|
+
arch=manifest["bundle"]["platform"].get("arch") or "",
|
|
2847
|
+
tags=manifest["bundle"].get("tags") or [],
|
|
2848
|
+
notes=manifest["bundle"].get("notes") or "",
|
|
2849
|
+
after_steps=manifest["bundle"].get("post_apply") or [],
|
|
2850
|
+
files=files,
|
|
2851
|
+
key=key,
|
|
2852
|
+
)
|
|
2853
|
+
|
|
2854
|
+
|
|
2855
|
+
def cmd_profile_save(args) -> None:
|
|
2856
|
+
profiles = load_profiles()
|
|
2857
|
+
profiles[args.name] = {
|
|
2858
|
+
"name": args.bundle_name,
|
|
2859
|
+
"description": args.description,
|
|
2860
|
+
"platform": args.platform,
|
|
2861
|
+
"shell": args.shell,
|
|
2862
|
+
"arch": args.arch,
|
|
2863
|
+
"tags": normalize_tags(args.tags),
|
|
2864
|
+
"preset": args.preset,
|
|
2865
|
+
"paths": args.paths,
|
|
2866
|
+
"exclude": args.exclude,
|
|
2867
|
+
"notes": args.notes,
|
|
2868
|
+
"after_steps": args.after_steps,
|
|
2869
|
+
}
|
|
2870
|
+
save_profiles(profiles)
|
|
2871
|
+
console.print(f"[bold green]{trf('Profile saved {name}', name=args.name)}[/bold green]")
|
|
2872
|
+
|
|
2873
|
+
|
|
2874
|
+
def cmd_profile_list(args) -> None:
|
|
2875
|
+
profiles = load_profiles()
|
|
2876
|
+
table = Table(title=tr("Profiles"), header_style="bold cyan")
|
|
2877
|
+
table.add_column("Name")
|
|
2878
|
+
table.add_column("Target")
|
|
2879
|
+
table.add_column("Preset")
|
|
2880
|
+
for name, profile in sorted(profiles.items()):
|
|
2881
|
+
table.add_row(name, f"{profile.get('platform') or 'any'} / {profile.get('shell') or 'any'}", profile.get("preset") or "-")
|
|
2882
|
+
if not profiles:
|
|
2883
|
+
table.add_row("No profiles", "-", "-")
|
|
2884
|
+
console.print(table)
|
|
2885
|
+
|
|
2886
|
+
|
|
2887
|
+
def cmd_profile_show(args) -> None:
|
|
2888
|
+
profiles = load_profiles()
|
|
2889
|
+
profile = profiles.get(args.name)
|
|
2890
|
+
if not profile:
|
|
2891
|
+
die(f"No existe el perfil '{args.name}'")
|
|
2892
|
+
console.print_json(data=profile)
|
|
2893
|
+
|
|
2894
|
+
|
|
2895
|
+
def cmd_profile_delete(args) -> None:
|
|
2896
|
+
profiles = load_profiles()
|
|
2897
|
+
if args.name not in profiles:
|
|
2898
|
+
die(f"No existe el perfil '{args.name}'")
|
|
2899
|
+
del profiles[args.name]
|
|
2900
|
+
save_profiles(profiles)
|
|
2901
|
+
console.print(f"[bold green]{trf('Profile deleted {name}', name=args.name)}[/bold green]")
|
|
2902
|
+
|
|
2903
|
+
|
|
2904
|
+
def resolve_package_list(package_inputs: list[str] | None, use_local: bool = False) -> list[Path]:
|
|
2905
|
+
packages: list[Path] = []
|
|
2906
|
+
seen: set[Path] = set()
|
|
2907
|
+
if use_local:
|
|
2908
|
+
for bundle in discover_local_bundles():
|
|
2909
|
+
if bundle not in seen:
|
|
2910
|
+
seen.add(bundle)
|
|
2911
|
+
packages.append(bundle)
|
|
2912
|
+
for package_input in package_inputs or []:
|
|
2913
|
+
candidate = Path(package_input).expanduser()
|
|
2914
|
+
if package_input.isdigit():
|
|
2915
|
+
bundles = discover_local_bundles()
|
|
2916
|
+
index = int(package_input) - 1
|
|
2917
|
+
if 0 <= index < len(bundles):
|
|
2918
|
+
candidate = bundles[index]
|
|
2919
|
+
if candidate not in seen:
|
|
2920
|
+
seen.add(candidate)
|
|
2921
|
+
packages.append(candidate)
|
|
2922
|
+
return packages
|
|
2923
|
+
|
|
2924
|
+
|
|
2925
|
+
def reencrypt_package(package_path: Path, old_key: bytes, new_key: bytes) -> None:
|
|
2926
|
+
temp_path = package_path.with_suffix(package_path.suffix + ".tmp")
|
|
2927
|
+
with ZipFile(package_path) as source_zip:
|
|
2928
|
+
manifest = json.loads(source_zip.read("manifest.json"))
|
|
2929
|
+
with ZipFile(temp_path, "w", compression=ZIP_STORED) as target_zip:
|
|
2930
|
+
for file_entry in manifest["files"]:
|
|
2931
|
+
encrypted = source_zip.read(file_entry["payload"])
|
|
2932
|
+
payload = decrypt_payload(encrypted, file_entry, old_key)
|
|
2933
|
+
nonce = os.urandom(12)
|
|
2934
|
+
reencrypted = AESGCM(new_key).encrypt(nonce, payload, None)
|
|
2935
|
+
file_entry["encryption"] = {"algorithm": ENCRYPTION_ALGORITHM, "nonce": nonce.hex()}
|
|
2936
|
+
target_zip.writestr(file_entry["payload"], reencrypted)
|
|
2937
|
+
target_zip.writestr("manifest.json", json.dumps(manifest, indent=2, sort_keys=True) + "\n", compress_type=ZIP_DEFLATED)
|
|
2938
|
+
temp_path.replace(package_path)
|
|
2939
|
+
|
|
2940
|
+
|
|
2941
|
+
def cmd_delete(args) -> None:
|
|
2942
|
+
packages = resolve_package_list(args.packages, use_local=args.all_local)
|
|
2943
|
+
if not packages:
|
|
2944
|
+
die("No hay paquetes para eliminar.")
|
|
2945
|
+
if not args.yes and sys.stdin.isatty():
|
|
2946
|
+
render_local_bundle_table()
|
|
2947
|
+
if not Confirm.ask(f"Delete {len(packages)} package(s)?", default=False):
|
|
2948
|
+
console.print(f"[yellow]{tr('Operacion cancelada.')}[/yellow]")
|
|
2949
|
+
return
|
|
2950
|
+
deleted = 0
|
|
2951
|
+
for package in packages:
|
|
2952
|
+
if package.exists():
|
|
2953
|
+
package.unlink()
|
|
2954
|
+
deleted += 1
|
|
2955
|
+
console.print(f"[bold green]{trf('Deleted {count} package(s)', count=deleted)}[/bold green]")
|
|
2956
|
+
|
|
2957
|
+
|
|
2958
|
+
def cmd_rekey(args) -> None:
|
|
2959
|
+
packages = resolve_package_list(args.packages, use_local=args.all_local)
|
|
2960
|
+
if not packages:
|
|
2961
|
+
die("No hay paquetes para migrar. Pasa paquetes o usa --all-local.")
|
|
2962
|
+
|
|
2963
|
+
old_key = load_key(args.key, create=False)
|
|
2964
|
+
new_key = AESGCM.generate_key(bit_length=256)
|
|
2965
|
+
backup_key_path = args.key.with_suffix(args.key.suffix + ".bak")
|
|
2966
|
+
|
|
2967
|
+
if not args.yes and sys.stdin.isatty():
|
|
2968
|
+
render_local_bundle_table()
|
|
2969
|
+
if not Confirm.ask(f"Re-encrypt {len(packages)} package(s) with a new key?", default=False):
|
|
2970
|
+
console.print(f"[yellow]{tr('Operacion cancelada.')}[/yellow]")
|
|
2971
|
+
return
|
|
2972
|
+
|
|
2973
|
+
migrated = 0
|
|
2974
|
+
with Progress(
|
|
2975
|
+
SpinnerColumn(style="green"),
|
|
2976
|
+
TextColumn("[progress.description]{task.description}"),
|
|
2977
|
+
BarColumn(bar_width=30),
|
|
2978
|
+
TextColumn("{task.completed}/{task.total}"),
|
|
2979
|
+
TimeElapsedColumn(),
|
|
2980
|
+
console=console,
|
|
2981
|
+
) as progress:
|
|
2982
|
+
task = progress.add_task("Rekeying", total=len(packages))
|
|
2983
|
+
for package in packages:
|
|
2984
|
+
reencrypt_package(package, old_key, new_key)
|
|
2985
|
+
migrated += 1
|
|
2986
|
+
progress.advance(task)
|
|
2987
|
+
|
|
2988
|
+
if not args.no_backup:
|
|
2989
|
+
write_key(backup_key_path, old_key)
|
|
2990
|
+
write_key(args.key, new_key)
|
|
2991
|
+
|
|
2992
|
+
table = Table.grid(padding=(0, 2))
|
|
2993
|
+
table.add_row("Packages", str(migrated))
|
|
2994
|
+
table.add_row("New key", str(args.key))
|
|
2995
|
+
table.add_row("Backup key", str(backup_key_path if not args.no_backup else "disabled"))
|
|
2996
|
+
console.print(Panel(table, title=f"[bold bright_green]{tr('Rekey Summary')}[/bold bright_green]", border_style="green"))
|
|
2997
|
+
|
|
2998
|
+
|
|
2999
|
+
def cmd_manifest(args) -> None:
|
|
3000
|
+
manifest = manifest_from_zip(args.package)
|
|
3001
|
+
print_manifest_json(manifest)
|
|
3002
|
+
|
|
3003
|
+
|
|
3004
|
+
def cmd_catalog(args) -> None:
|
|
3005
|
+
print_banner()
|
|
3006
|
+
os_name = normalize_os_name(args.platform or normalize_os_name())
|
|
3007
|
+
shell_name = args.shell or detect_shell()
|
|
3008
|
+
groups = config_groups_for_os(os_name)
|
|
3009
|
+
render_config_group_table(groups, recommended_group_keys(groups, shell_name), marker_label="Default")
|
|
3010
|
+
catalog_hint = "'x' marca los grupos recomendados por defecto para esta plataforma/shell. No significa que ya vayan dentro de ningun bundle."
|
|
3011
|
+
console.print(f"[dim]{tr(catalog_hint)}[/dim]")
|
|
3012
|
+
|
|
3013
|
+
|
|
3014
|
+
def cmd_ui(args) -> None:
|
|
3015
|
+
while True:
|
|
3016
|
+
console.clear()
|
|
3017
|
+
print_banner()
|
|
3018
|
+
render_local_bundle_table()
|
|
3019
|
+
render_action_hub()
|
|
3020
|
+
action = prompt_action_choice()
|
|
3021
|
+
skip_return_prompt = False
|
|
3022
|
+
|
|
3023
|
+
try:
|
|
3024
|
+
if action == "quit":
|
|
3025
|
+
console.print(f"[dim]{tr('Leaving Peridot UI.')}[/dim]")
|
|
3026
|
+
return
|
|
3027
|
+
if action == "catalog":
|
|
3028
|
+
cmd_catalog(SimpleNamespace(platform="", shell=""))
|
|
3029
|
+
if action == "presets":
|
|
3030
|
+
render_presets_table()
|
|
3031
|
+
elif action == "pack":
|
|
3032
|
+
cmd_pack(
|
|
3033
|
+
SimpleNamespace(
|
|
3034
|
+
key=args.key,
|
|
3035
|
+
name=None,
|
|
3036
|
+
paths=[],
|
|
3037
|
+
output=None,
|
|
3038
|
+
description="",
|
|
3039
|
+
platform="",
|
|
3040
|
+
shell="",
|
|
3041
|
+
arch="",
|
|
3042
|
+
tags=[],
|
|
3043
|
+
preset="",
|
|
3044
|
+
profile="",
|
|
3045
|
+
exclude=[],
|
|
3046
|
+
notes="",
|
|
3047
|
+
after_steps=[],
|
|
3048
|
+
compression_level=None,
|
|
3049
|
+
jobs=None,
|
|
3050
|
+
language=None,
|
|
3051
|
+
yes=False,
|
|
3052
|
+
)
|
|
3053
|
+
)
|
|
3054
|
+
elif action == "inspect":
|
|
3055
|
+
package = choose_bundle_path(tr("Inspeccionar"))
|
|
3056
|
+
show_files = Confirm.ask(tr("Mostrar lista de ficheros?"), default=True)
|
|
3057
|
+
show_json = Confirm.ask(tr("Mostrar manifest JSON?"), default=False)
|
|
3058
|
+
cmd_inspect(SimpleNamespace(package=package, files=show_files, all=True, json=show_json))
|
|
3059
|
+
elif action == "apply":
|
|
3060
|
+
package = choose_bundle_path(tr("Aplicar"))
|
|
3061
|
+
dry_run = Confirm.ask(tr("Hacer dry-run primero?"), default=True)
|
|
3062
|
+
target = Path(Prompt.ask(tr("Directorio destino"), default=str(Path.home()))).expanduser()
|
|
3063
|
+
backup_enabled = Confirm.ask(tr("Guardar backups antes de sobrescribir?"), default=True)
|
|
3064
|
+
backup_dir = None
|
|
3065
|
+
if backup_enabled:
|
|
3066
|
+
backup_dir = Path(
|
|
3067
|
+
Prompt.ask(
|
|
3068
|
+
tr("Directorio de backups"),
|
|
3069
|
+
default=str(Path.home() / ".peridot-backups"),
|
|
3070
|
+
)
|
|
3071
|
+
).expanduser()
|
|
3072
|
+
ignore_platform = Confirm.ask(tr("Ignorar mismatch de plataforma?"), default=False)
|
|
3073
|
+
cmd_apply(
|
|
3074
|
+
SimpleNamespace(
|
|
3075
|
+
package=package,
|
|
3076
|
+
target=target,
|
|
3077
|
+
backup_dir=backup_dir,
|
|
3078
|
+
dry_run=dry_run,
|
|
3079
|
+
ignore_platform=ignore_platform,
|
|
3080
|
+
yes=True,
|
|
3081
|
+
key=args.key,
|
|
3082
|
+
)
|
|
3083
|
+
)
|
|
3084
|
+
elif action == "diff":
|
|
3085
|
+
package = choose_bundle_path(tr("Diff"))
|
|
3086
|
+
target = Path(Prompt.ask(tr("Directorio destino"), default=str(Path.home()))).expanduser()
|
|
3087
|
+
cmd_diff(SimpleNamespace(package=package, target=target, no_hash=False, json=False, key=args.key))
|
|
3088
|
+
elif action == "verify":
|
|
3089
|
+
package = choose_bundle_path(tr("Verificar"))
|
|
3090
|
+
deep = Confirm.ask(tr("Verificacion profunda (descifrar)?"), default=True)
|
|
3091
|
+
cmd_verify(SimpleNamespace(package=package, deep=deep, json=False, key=args.key))
|
|
3092
|
+
elif action == "doctor":
|
|
3093
|
+
cmd_doctor(SimpleNamespace(key=args.key, json=False))
|
|
3094
|
+
elif action == "share":
|
|
3095
|
+
package = choose_bundle_path(tr("Compartir"))
|
|
3096
|
+
fmt = Prompt.ask(tr("Formato"), choices=["md", "json"], default="md")
|
|
3097
|
+
output_raw = Prompt.ask(tr("Fichero de salida (vacio = imprimir)"), default="")
|
|
3098
|
+
output = Path(output_raw).expanduser() if output_raw else None
|
|
3099
|
+
cmd_share(SimpleNamespace(package=package, format=fmt, output=output))
|
|
3100
|
+
elif action == "manifest":
|
|
3101
|
+
package = choose_bundle_path("Manifest")
|
|
3102
|
+
cmd_manifest(SimpleNamespace(package=package))
|
|
3103
|
+
elif action == "history":
|
|
3104
|
+
bundle_name = Prompt.ask(tr("Nombre del bundle"), default=(discover_local_bundles()[0].stem if discover_local_bundles() else "bundle"))
|
|
3105
|
+
cmd_history(SimpleNamespace(bundle=bundle_name))
|
|
3106
|
+
elif action == "profile":
|
|
3107
|
+
profile_action = Prompt.ask(tr("Accion de perfil"), choices=["list", "show", "delete"], default="list")
|
|
3108
|
+
if profile_action == "list":
|
|
3109
|
+
cmd_profile_list(SimpleNamespace())
|
|
3110
|
+
elif profile_action == "show":
|
|
3111
|
+
name = Prompt.ask(tr("Nombre del perfil"))
|
|
3112
|
+
cmd_profile_show(SimpleNamespace(name=name))
|
|
3113
|
+
else:
|
|
3114
|
+
name = Prompt.ask(tr("Nombre del perfil"))
|
|
3115
|
+
cmd_profile_delete(SimpleNamespace(name=name))
|
|
3116
|
+
elif action == "settings":
|
|
3117
|
+
cmd_settings(SimpleNamespace(settings_path=DEFAULT_SETTINGS_STORE, show=False, set=[]))
|
|
3118
|
+
skip_return_prompt = True
|
|
3119
|
+
elif action == "keygen":
|
|
3120
|
+
cmd_keygen(SimpleNamespace(key=args.key))
|
|
3121
|
+
elif action == "rekey":
|
|
3122
|
+
all_local = Confirm.ask(tr("Rekey todos los bundles locales?"), default=True)
|
|
3123
|
+
packages = [] if all_local else [str(path) for path in choose_bundle_paths(tr("Rekey"))]
|
|
3124
|
+
cmd_rekey(SimpleNamespace(key=args.key, packages=packages, all_local=all_local, no_backup=False, yes=True))
|
|
3125
|
+
elif action == "delete":
|
|
3126
|
+
all_local = Confirm.ask(tr("Borrar todos los bundles locales?"), default=False)
|
|
3127
|
+
packages = [] if all_local else [str(path) for path in choose_bundle_paths(tr("Borrar"))]
|
|
3128
|
+
cmd_delete(SimpleNamespace(packages=packages, all_local=all_local, yes=True))
|
|
3129
|
+
except SystemExit:
|
|
3130
|
+
pass
|
|
3131
|
+
|
|
3132
|
+
if skip_return_prompt:
|
|
3133
|
+
continue
|
|
3134
|
+
Prompt.ask(tr("Press enter to return to the command center"), default="")
|
|
3135
|
+
|
|
3136
|
+
|
|
3137
|
+
def build_parser() -> argparse.ArgumentParser:
|
|
3138
|
+
parser = argparse.ArgumentParser(
|
|
3139
|
+
prog="peridot",
|
|
3140
|
+
description=tr("Empaqueta, inspecciona y aplica bundles de configuracion .peridot"),
|
|
3141
|
+
)
|
|
3142
|
+
parser.add_argument(
|
|
3143
|
+
"--key",
|
|
3144
|
+
type=Path,
|
|
3145
|
+
default=DEFAULT_KEY,
|
|
3146
|
+
help=trf("Ruta de la clave AES-GCM (por defecto: {path})", path=DEFAULT_KEY),
|
|
3147
|
+
)
|
|
3148
|
+
parser.add_argument(
|
|
3149
|
+
"-V",
|
|
3150
|
+
"--version",
|
|
3151
|
+
action="version",
|
|
3152
|
+
version=f"%(prog)s {APP_VERSION}",
|
|
3153
|
+
help=tr("Muestra la version y sale"),
|
|
3154
|
+
)
|
|
3155
|
+
|
|
3156
|
+
subparsers = parser.add_subparsers(dest="command", required=True)
|
|
3157
|
+
|
|
3158
|
+
pack_parser = subparsers.add_parser("pack", help="Crea un paquete .peridot")
|
|
3159
|
+
pack_parser.add_argument("name", nargs="?", help="Nombre del bundle")
|
|
3160
|
+
pack_parser.add_argument("paths", nargs="*", help="Rutas a incluir. Si no se indican, usa las rutas por defecto del sistema.")
|
|
3161
|
+
pack_parser.add_argument("--output", type=Path, help="Ruta del paquete de salida")
|
|
3162
|
+
pack_parser.add_argument("--description", default="", help="Descripcion corta del bundle")
|
|
3163
|
+
pack_parser.add_argument("--platform", default="", help="SO objetivo: macos, linux, windows o any")
|
|
3164
|
+
pack_parser.add_argument("--shell", default="", help="Shell o runtime principal: fish, zsh, powershell, bash...")
|
|
3165
|
+
pack_parser.add_argument("--arch", default="", help="Arquitectura objetivo: arm64, x86_64 o any")
|
|
3166
|
+
pack_parser.add_argument("--tag", dest="tags", action="append", default=[], help="Tag del bundle. Repetible.")
|
|
3167
|
+
pack_parser.add_argument("--preset", default="", help="Preset de dotfiles: macos-fish, macos-zsh, linux-zsh, linux-bash, windows-powershell")
|
|
3168
|
+
pack_parser.add_argument("--profile", default="", help="Perfil guardado para reutilizar configuracion")
|
|
3169
|
+
pack_parser.add_argument("--exclude", action="append", default=[], help="Glob a excluir. Repetible.")
|
|
3170
|
+
pack_parser.add_argument("--notes", default="", help="Notas del bundle")
|
|
3171
|
+
pack_parser.add_argument("--after-step", dest="after_steps", action="append", default=[], help="Paso post-apply. Repetible.")
|
|
3172
|
+
pack_parser.add_argument("--compression-level", type=int, default=None, choices=range(0, 10), help="Nivel de compresion Peridot: zstd si esta disponible, gzip como fallback. 0 rapido, 9 pequeno. Si no se pasa, usa settings.")
|
|
3173
|
+
pack_parser.add_argument("--jobs", type=int, default=None, help="Numero de workers para pack. Si no se pasa, usa settings.")
|
|
3174
|
+
pack_parser.add_argument("-y", "--yes", action="store_true", help="Aceptar avisos sensibles sin confirmacion")
|
|
3175
|
+
pack_parser.set_defaults(func=cmd_pack)
|
|
3176
|
+
|
|
3177
|
+
bench_parser = subparsers.add_parser("bench", help="Benchmark rapido de pack (tiempo + tamano)")
|
|
3178
|
+
bench_parser.add_argument("--files", type=int, default=200, help="Numero de ficheros sinteticos")
|
|
3179
|
+
bench_parser.add_argument("--size-kb", type=int, default=4, help="Tamano por fichero (KB)")
|
|
3180
|
+
bench_parser.add_argument("--runs", type=int, default=1, help="Repeticiones por configuracion")
|
|
3181
|
+
bench_parser.add_argument("--levels", default="0,1,3", help="Niveles de compresion separados por coma")
|
|
3182
|
+
bench_parser.add_argument("--jobs", type=int, default=DEFAULT_JOBS, help="Workers para pack")
|
|
3183
|
+
bench_parser.add_argument("--json", action="store_true", help="Imprime tambien JSON con resultados")
|
|
3184
|
+
bench_parser.add_argument("--out", type=Path, help="Guarda el JSON en un fichero")
|
|
3185
|
+
bench_parser.set_defaults(func=cmd_bench)
|
|
3186
|
+
|
|
3187
|
+
inspect_parser = subparsers.add_parser("inspect", help="Muestra la ficha de un paquete")
|
|
3188
|
+
inspect_parser.add_argument("package", type=Path, help="Ruta del paquete .peridot")
|
|
3189
|
+
inspect_parser.add_argument("--files", action="store_true", help="Muestra la lista de ficheros")
|
|
3190
|
+
inspect_parser.add_argument("--all", action="store_true", help="Muestra todos los ficheros")
|
|
3191
|
+
inspect_parser.add_argument("--json", action="store_true", help="Imprime tambien el manifest en JSON")
|
|
3192
|
+
inspect_parser.set_defaults(func=cmd_inspect)
|
|
3193
|
+
|
|
3194
|
+
apply_parser = subparsers.add_parser("apply", help="Aplica un paquete .peridot")
|
|
3195
|
+
apply_parser.add_argument("package", type=Path, help="Ruta del paquete .peridot")
|
|
3196
|
+
apply_parser.add_argument("--target", type=Path, default=Path.home(), help="Directorio destino para la restauracion")
|
|
3197
|
+
apply_parser.add_argument("--backup-dir", type=Path, help="Si existe el fichero, guarda una copia antes de sobrescribir")
|
|
3198
|
+
apply_parser.add_argument("--transactional", dest="transactional", action="store_true", default=True, help="Rollback best-effort si falla a mitad (por defecto activado)")
|
|
3199
|
+
apply_parser.add_argument("--no-transactional", dest="transactional", action="store_false", help="Desactiva rollback transaccional")
|
|
3200
|
+
apply_parser.add_argument("--verify", dest="verify", action="store_true", default=True, help="Verifica hash tras escribir (por defecto activado)")
|
|
3201
|
+
apply_parser.add_argument("--no-verify", dest="verify", action="store_false", help="Desactiva verificacion post-escritura")
|
|
3202
|
+
apply_parser.add_argument("--dry-run", action="store_true", help="Muestra lo que se haria sin escribir")
|
|
3203
|
+
apply_parser.add_argument("--ignore-platform", action="store_true", help="Aplica incluso si el target del bundle no coincide con la maquina actual")
|
|
3204
|
+
apply_parser.add_argument("--select", action="append", default=[], help="Path exacto dentro del bundle a restaurar. Repetible.")
|
|
3205
|
+
apply_parser.add_argument("-y", "--yes", action="store_true", help="No pedir confirmacion interactiva")
|
|
3206
|
+
apply_parser.set_defaults(func=cmd_apply)
|
|
3207
|
+
|
|
3208
|
+
diff_parser = subparsers.add_parser("diff", help="Compara un bundle con un directorio destino")
|
|
3209
|
+
diff_parser.add_argument("package", type=Path, help="Ruta del paquete .peridot")
|
|
3210
|
+
diff_parser.add_argument("--target", type=Path, default=Path.home(), help="Directorio destino")
|
|
3211
|
+
diff_parser.add_argument("--no-hash", action="store_true", help="No descifrar payloads; solo comprobar presencia")
|
|
3212
|
+
diff_parser.add_argument("--json", action="store_true", help="Salida estructurada en JSON")
|
|
3213
|
+
diff_parser.set_defaults(func=cmd_diff)
|
|
3214
|
+
|
|
3215
|
+
verify_parser = subparsers.add_parser("verify", help="Verifica integridad del bundle")
|
|
3216
|
+
verify_parser.add_argument("package", type=Path, help="Ruta del paquete .peridot")
|
|
3217
|
+
verify_parser.add_argument("--deep", action="store_true", help="Verifica hashes descifrando payloads con la clave")
|
|
3218
|
+
verify_parser.add_argument("--json", action="store_true", help="Salida estructurada en JSON")
|
|
3219
|
+
verify_parser.set_defaults(func=cmd_verify)
|
|
3220
|
+
|
|
3221
|
+
doctor_parser = subparsers.add_parser("doctor", help="Diagnostico del entorno local")
|
|
3222
|
+
doctor_parser.add_argument("--json", action="store_true", help="Salida estructurada en JSON")
|
|
3223
|
+
doctor_parser.set_defaults(func=cmd_doctor)
|
|
3224
|
+
|
|
3225
|
+
share_parser = subparsers.add_parser("share", help="Exporta una ficha CLI-friendly del bundle")
|
|
3226
|
+
share_parser.add_argument("package", type=Path, help="Ruta del paquete .peridot")
|
|
3227
|
+
share_parser.add_argument("--format", choices=["json", "md"], default="md", help="Formato de salida")
|
|
3228
|
+
share_parser.add_argument("--output", type=Path, help="Ruta de salida opcional")
|
|
3229
|
+
share_parser.set_defaults(func=cmd_share)
|
|
3230
|
+
|
|
3231
|
+
merge_parser = subparsers.add_parser("merge", help="Fusiona varios bundles en uno")
|
|
3232
|
+
merge_parser.add_argument("packages", nargs="+", type=Path, help="Bundles a fusionar")
|
|
3233
|
+
merge_parser.add_argument("--output", type=Path, required=True, help="Bundle de salida")
|
|
3234
|
+
merge_parser.add_argument("--name", default="", help="Nombre del bundle resultante")
|
|
3235
|
+
merge_parser.add_argument("--description", default="", help="Descripcion del bundle resultante")
|
|
3236
|
+
merge_parser.add_argument("--platform", default="", help="Plataforma del bundle resultante")
|
|
3237
|
+
merge_parser.add_argument("--shell", default="", help="Shell del bundle resultante")
|
|
3238
|
+
merge_parser.add_argument("--arch", default="", help="Arquitectura del bundle resultante")
|
|
3239
|
+
merge_parser.add_argument("--tag", dest="tags", action="append", default=[], help="Tag repetible")
|
|
3240
|
+
merge_parser.set_defaults(func=cmd_merge)
|
|
3241
|
+
|
|
3242
|
+
split_parser = subparsers.add_parser("split", help="Extrae un subset de un bundle en otro bundle")
|
|
3243
|
+
split_parser.add_argument("package", type=Path, help="Bundle origen")
|
|
3244
|
+
split_parser.add_argument("--prefix", action="append", required=True, help="Prefijo/path a extraer. Repetible.")
|
|
3245
|
+
split_parser.add_argument("--output", type=Path, required=True, help="Bundle de salida")
|
|
3246
|
+
split_parser.add_argument("--name", default="", help="Nombre del bundle resultante")
|
|
3247
|
+
split_parser.add_argument("--description", default="", help="Descripcion del bundle resultante")
|
|
3248
|
+
split_parser.set_defaults(func=cmd_split)
|
|
3249
|
+
|
|
3250
|
+
history_parser = subparsers.add_parser("history", help="Lista snapshots historicos de un bundle")
|
|
3251
|
+
history_parser.add_argument("bundle", help="Nombre base del bundle sin extension")
|
|
3252
|
+
history_parser.set_defaults(func=cmd_history)
|
|
3253
|
+
|
|
3254
|
+
manifest_parser = subparsers.add_parser("manifest", help="Imprime el manifest de un paquete")
|
|
3255
|
+
manifest_parser.add_argument("package", type=Path, help="Ruta del paquete .peridot")
|
|
3256
|
+
manifest_parser.set_defaults(func=cmd_manifest)
|
|
3257
|
+
|
|
3258
|
+
delete_parser = subparsers.add_parser("delete", help="Elimina paquetes .peridot")
|
|
3259
|
+
delete_parser.add_argument("packages", nargs="*", help="Paquetes a eliminar o indices locales")
|
|
3260
|
+
delete_parser.add_argument("--all-local", action="store_true", help="Elimina todos los .peridot del directorio actual")
|
|
3261
|
+
delete_parser.add_argument("-y", "--yes", action="store_true", help="No pedir confirmacion")
|
|
3262
|
+
delete_parser.set_defaults(func=cmd_delete)
|
|
3263
|
+
|
|
3264
|
+
rekey_parser = subparsers.add_parser("rekey", help="Genera una nueva clave y migra paquetes existentes")
|
|
3265
|
+
rekey_parser.add_argument("packages", nargs="*", help="Paquetes a migrar o indices locales")
|
|
3266
|
+
rekey_parser.add_argument("--all-local", action="store_true", help="Migra todos los .peridot del directorio actual")
|
|
3267
|
+
rekey_parser.add_argument("--no-backup", action="store_true", help="No guardar copia de la clave antigua")
|
|
3268
|
+
rekey_parser.add_argument("-y", "--yes", action="store_true", help="No pedir confirmacion")
|
|
3269
|
+
rekey_parser.set_defaults(func=cmd_rekey)
|
|
3270
|
+
|
|
3271
|
+
catalog_parser = subparsers.add_parser("catalog", help="Lista grupos clasificados de configuracion detectables")
|
|
3272
|
+
catalog_parser.add_argument("--platform", default="", help="Plataforma a inspeccionar: macos, linux o windows")
|
|
3273
|
+
catalog_parser.add_argument("--shell", default="", help="Shell para recomendaciones por defecto")
|
|
3274
|
+
catalog_parser.set_defaults(func=cmd_catalog)
|
|
3275
|
+
|
|
3276
|
+
profile_parser = subparsers.add_parser("profile", help="Gestiona perfiles reutilizables")
|
|
3277
|
+
profile_subparsers = profile_parser.add_subparsers(dest="profile_command", required=True)
|
|
3278
|
+
|
|
3279
|
+
profile_save_parser = profile_subparsers.add_parser("save", help="Guarda un perfil")
|
|
3280
|
+
profile_save_parser.add_argument("name", help="Nombre del perfil")
|
|
3281
|
+
profile_save_parser.add_argument("--bundle-name", default="", help="Nombre de bundle por defecto")
|
|
3282
|
+
profile_save_parser.add_argument("--description", default="", help="Descripcion")
|
|
3283
|
+
profile_save_parser.add_argument("--platform", default="", help="Plataforma")
|
|
3284
|
+
profile_save_parser.add_argument("--shell", default="", help="Shell")
|
|
3285
|
+
profile_save_parser.add_argument("--arch", default="", help="Arquitectura")
|
|
3286
|
+
profile_save_parser.add_argument("--tag", dest="tags", action="append", default=[], help="Tag repetible")
|
|
3287
|
+
profile_save_parser.add_argument("--preset", default="", help="Preset base")
|
|
3288
|
+
profile_save_parser.add_argument("--path", dest="paths", action="append", default=[], help="Path repetible")
|
|
3289
|
+
profile_save_parser.add_argument("--exclude", action="append", default=[], help="Glob a excluir")
|
|
3290
|
+
profile_save_parser.add_argument("--notes", default="", help="Notas")
|
|
3291
|
+
profile_save_parser.add_argument("--after-step", dest="after_steps", action="append", default=[], help="Paso post-apply")
|
|
3292
|
+
profile_save_parser.set_defaults(func=cmd_profile_save)
|
|
3293
|
+
|
|
3294
|
+
profile_list_parser = profile_subparsers.add_parser("list", help="Lista perfiles")
|
|
3295
|
+
profile_list_parser.set_defaults(func=cmd_profile_list)
|
|
3296
|
+
|
|
3297
|
+
profile_show_parser = profile_subparsers.add_parser("show", help="Muestra un perfil")
|
|
3298
|
+
profile_show_parser.add_argument("name", help="Nombre del perfil")
|
|
3299
|
+
profile_show_parser.set_defaults(func=cmd_profile_show)
|
|
3300
|
+
|
|
3301
|
+
profile_delete_parser = profile_subparsers.add_parser("delete", help="Elimina un perfil")
|
|
3302
|
+
profile_delete_parser.add_argument("name", help="Nombre del perfil")
|
|
3303
|
+
profile_delete_parser.set_defaults(func=cmd_profile_delete)
|
|
3304
|
+
|
|
3305
|
+
settings_parser = subparsers.add_parser("settings", help="Gestiona defaults persistentes de Peridot")
|
|
3306
|
+
settings_parser.add_argument("--show", action="store_true", help="Muestra los settings efectivos")
|
|
3307
|
+
settings_parser.add_argument("--set", action="append", default=[], help="Actualiza un setting con clave=valor. Repetible.")
|
|
3308
|
+
settings_parser.add_argument("--settings-path", type=Path, default=DEFAULT_SETTINGS_STORE, help="Ruta del store de settings")
|
|
3309
|
+
settings_parser.set_defaults(func=cmd_settings)
|
|
3310
|
+
|
|
3311
|
+
keygen_parser = subparsers.add_parser("keygen", help="Genera o muestra la clave activa")
|
|
3312
|
+
keygen_parser.set_defaults(func=cmd_keygen)
|
|
3313
|
+
|
|
3314
|
+
init_parser = subparsers.add_parser("init", help="Inicializa Peridot (key + settings)")
|
|
3315
|
+
init_parser.add_argument("--force", action="store_true", help="Sobrescribe settings existentes")
|
|
3316
|
+
init_parser.set_defaults(func=cmd_init)
|
|
3317
|
+
|
|
3318
|
+
ui_parser = subparsers.add_parser("ui", help="Lanza el command center visual")
|
|
3319
|
+
ui_parser.set_defaults(func=cmd_ui)
|
|
3320
|
+
|
|
3321
|
+
export_parser = subparsers.add_parser("export", help="Alias de pack")
|
|
3322
|
+
export_parser.add_argument("name", nargs="?", help="Nombre del bundle")
|
|
3323
|
+
export_parser.add_argument("paths", nargs="*", help="Rutas a incluir")
|
|
3324
|
+
export_parser.add_argument("--output", type=Path, help="Ruta del paquete de salida")
|
|
3325
|
+
export_parser.add_argument("--description", default="", help="Descripcion corta del bundle")
|
|
3326
|
+
export_parser.add_argument("--platform", default="", help="SO objetivo")
|
|
3327
|
+
export_parser.add_argument("--shell", default="", help="Shell o runtime principal")
|
|
3328
|
+
export_parser.add_argument("--arch", default="", help="Arquitectura objetivo")
|
|
3329
|
+
export_parser.add_argument("--tag", dest="tags", action="append", default=[], help="Tag del bundle. Repetible.")
|
|
3330
|
+
export_parser.add_argument("--preset", default="", help="Preset de dotfiles")
|
|
3331
|
+
export_parser.add_argument("--profile", default="", help="Perfil guardado")
|
|
3332
|
+
export_parser.add_argument("--exclude", action="append", default=[], help="Glob a excluir")
|
|
3333
|
+
export_parser.add_argument("--notes", default="", help="Notas del bundle")
|
|
3334
|
+
export_parser.add_argument("--after-step", dest="after_steps", action="append", default=[], help="Paso post-apply")
|
|
3335
|
+
export_parser.add_argument("--compression-level", type=int, default=None, choices=range(0, 10), help="Nivel de compresion Peridot: zstd si esta disponible, gzip como fallback. Si no se pasa, usa settings.")
|
|
3336
|
+
export_parser.add_argument("--jobs", type=int, default=None, help="Numero de workers para pack. Si no se pasa, usa settings.")
|
|
3337
|
+
export_parser.add_argument("-y", "--yes", action="store_true", help="Aceptar avisos sensibles")
|
|
3338
|
+
export_parser.set_defaults(func=cmd_pack)
|
|
3339
|
+
|
|
3340
|
+
import_parser = subparsers.add_parser("import", help="Alias de apply")
|
|
3341
|
+
import_parser.add_argument("package", type=Path, help="Ruta del paquete .peridot")
|
|
3342
|
+
import_parser.add_argument("--target", type=Path, default=Path.home(), help="Directorio destino para la restauracion")
|
|
3343
|
+
import_parser.add_argument("--backup-dir", type=Path, help="Directorio donde guardar backups")
|
|
3344
|
+
import_parser.add_argument("--dry-run", action="store_true", help="No escribir nada")
|
|
3345
|
+
import_parser.add_argument("--ignore-platform", action="store_true", help="Ignora el target del paquete")
|
|
3346
|
+
import_parser.add_argument("--select", action="append", default=[], help="Path exacto dentro del bundle")
|
|
3347
|
+
import_parser.add_argument("-y", "--yes", action="store_true", help="No pedir confirmacion")
|
|
3348
|
+
import_parser.set_defaults(func=cmd_apply)
|
|
3349
|
+
|
|
3350
|
+
localize_parser(parser)
|
|
3351
|
+
return parser
|
|
3352
|
+
|
|
3353
|
+
|
|
3354
|
+
def main(argv: Iterable[str] | None = None) -> None:
|
|
3355
|
+
set_current_language(detect_runtime_language())
|
|
3356
|
+
parser = build_parser()
|
|
3357
|
+
args = parser.parse_args(argv)
|
|
3358
|
+
args.func(args)
|
|
3359
|
+
|
|
3360
|
+
|
|
3361
|
+
if __name__ == "__main__":
|
|
3362
|
+
main()
|