clonebox 0.1.22__py3-none-any.whl → 0.1.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
clonebox/cloner.py CHANGED
@@ -3,6 +3,7 @@
3
3
  SelectiveVMCloner - Creates isolated VMs with only selected apps/paths/services.
4
4
  """
5
5
 
6
+ import json
6
7
  import os
7
8
  import subprocess
8
9
  import tempfile
@@ -13,6 +14,12 @@ from dataclasses import dataclass, field
13
14
  from pathlib import Path
14
15
  from typing import Optional
15
16
 
17
+ try:
18
+ from dotenv import load_dotenv
19
+ load_dotenv()
20
+ except ImportError:
21
+ pass # dotenv is optional
22
+
16
23
  try:
17
24
  import libvirt
18
25
  except ImportError:
@@ -33,21 +40,23 @@ DEFAULT_SNAP_INTERFACES = ['desktop', 'desktop-legacy', 'x11', 'home', 'network'
33
40
  class VMConfig:
34
41
  """Configuration for the VM to create."""
35
42
 
36
- name: str = "clonebox-vm"
37
- ram_mb: int = 4096
38
- vcpus: int = 4
39
- disk_size_gb: int = 20
40
- gui: bool = True
41
- base_image: Optional[str] = None
43
+ name: str = field(default_factory=lambda: os.getenv("VM_NAME", "clonebox-vm"))
44
+ ram_mb: int = field(default_factory=lambda: int(os.getenv("VM_RAM_MB", "8192")))
45
+ vcpus: int = field(default_factory=lambda: int(os.getenv("VM_VCPUS", "4")))
46
+ disk_size_gb: int = field(default_factory=lambda: int(os.getenv("VM_DISK_SIZE_GB", "20")))
47
+ gui: bool = field(default_factory=lambda: os.getenv("VM_GUI", "true").lower() == "true")
48
+ base_image: Optional[str] = field(default_factory=lambda: os.getenv("VM_BASE_IMAGE") or None)
42
49
  paths: dict = field(default_factory=dict)
43
50
  packages: list = field(default_factory=list)
44
51
  snap_packages: list = field(default_factory=list) # Snap packages to install
45
52
  services: list = field(default_factory=list)
46
53
  post_commands: list = field(default_factory=list) # Commands to run after setup
47
- user_session: bool = False # Use qemu:///session instead of qemu:///system
48
- network_mode: str = "auto" # auto|default|user
49
- username: str = "ubuntu" # VM default username
50
- password: str = "ubuntu" # VM default password
54
+ user_session: bool = field(default_factory=lambda: os.getenv("VM_USER_SESSION", "false").lower() == "true") # Use qemu:///session instead of qemu:///system
55
+ network_mode: str = field(default_factory=lambda: os.getenv("VM_NETWORK_MODE", "auto")) # auto|default|user
56
+ username: str = field(default_factory=lambda: os.getenv("VM_USERNAME", "ubuntu")) # VM default username
57
+ password: str = field(default_factory=lambda: os.getenv("VM_PASSWORD", "ubuntu")) # VM default password
58
+ autostart_apps: bool = field(default_factory=lambda: os.getenv("VM_AUTOSTART_APPS", "true").lower() == "true") # Auto-start GUI apps after login (desktop autostart)
59
+ web_services: list = field(default_factory=list) # Web services to start (uvicorn, etc.)
51
60
 
52
61
  def to_dict(self) -> dict:
53
62
  return {
@@ -63,15 +72,6 @@ class SelectiveVMCloner:
63
72
  Uses bind mounts instead of full disk cloning.
64
73
  """
65
74
 
66
- # Default images directories
67
- SYSTEM_IMAGES_DIR = Path("/var/lib/libvirt/images")
68
- USER_IMAGES_DIR = Path.home() / ".local/share/libvirt/images"
69
-
70
- DEFAULT_BASE_IMAGE_URL = (
71
- "https://cloud-images.ubuntu.com/jammy/current/jammy-server-cloudimg-amd64.img"
72
- )
73
- DEFAULT_BASE_IMAGE_FILENAME = "clonebox-ubuntu-jammy-amd64.qcow2"
74
-
75
75
  def __init__(self, conn_uri: str = None, user_session: bool = False):
76
76
  self.user_session = user_session
77
77
  if conn_uri:
@@ -81,6 +81,28 @@ class SelectiveVMCloner:
81
81
  self.conn = None
82
82
  self._connect()
83
83
 
84
+ @property
85
+ def SYSTEM_IMAGES_DIR(self) -> Path:
86
+ return Path(os.getenv("CLONEBOX_SYSTEM_IMAGES_DIR", "/var/lib/libvirt/images"))
87
+
88
+ @property
89
+ def USER_IMAGES_DIR(self) -> Path:
90
+ return Path(os.getenv("CLONEBOX_USER_IMAGES_DIR", str(Path.home() / ".local/share/libvirt/images"))).expanduser()
91
+
92
+ @property
93
+ def DEFAULT_BASE_IMAGE_URL(self) -> str:
94
+ return os.getenv(
95
+ "CLONEBOX_BASE_IMAGE_URL",
96
+ "https://cloud-images.ubuntu.com/jammy/current/jammy-server-cloudimg-amd64.img"
97
+ )
98
+
99
+ @property
100
+ def DEFAULT_BASE_IMAGE_FILENAME(self) -> str:
101
+ return os.getenv(
102
+ "CLONEBOX_BASE_IMAGE_FILENAME",
103
+ "clonebox-ubuntu-jammy-amd64.qcow2"
104
+ )
105
+
84
106
  def _connect(self):
85
107
  """Connect to libvirt."""
86
108
  if libvirt is None:
@@ -584,12 +606,76 @@ connect_interfaces() {{
584
606
  }}
585
607
 
586
608
  test_launch() {{
587
- case "$1" in
588
- pycharm-community) /snap/pycharm-community/current/jbr/bin/java -version &>/dev/null ;;
589
- chromium) timeout 10 chromium --headless=new --dump-dom about:blank &>/dev/null ;;
590
- firefox) timeout 10 firefox --headless --screenshot /tmp/ff-test.png about:blank &>/dev/null; rm -f /tmp/ff-test.png ;;
591
- docker) docker info &>/dev/null ;;
592
- *) command -v "$1" &>/dev/null ;;
609
+ local app="$1"
610
+ local temp_output="/tmp/$app-test.log"
611
+ local error_detail="/tmp/$app-error.log"
612
+
613
+ case "$app" in
614
+ pycharm-community)
615
+ if timeout 10 /snap/pycharm-community/current/jbr/bin/java -version &>"$temp_output"; then
616
+ return 0
617
+ else
618
+ echo "PyCharm Java test failed:" >> "$error_detail"
619
+ cat "$temp_output" >> "$error_detail" 2>&1 || true
620
+ return 1
621
+ fi
622
+ ;;
623
+ chromium)
624
+ # First check if chromium can run at all
625
+ if ! command -v chromium >/dev/null 2>&1; then
626
+ echo "ERROR: chromium not found in PATH" >> "$error_detail"
627
+ echo "PATH=$PATH" >> "$error_detail"
628
+ return 1
629
+ fi
630
+
631
+ # Try with different approaches
632
+ if timeout 10 chromium --headless=new --dump-dom about:blank &>"$temp_output" 2>&1; then
633
+ return 0
634
+ else
635
+ echo "Chromium headless test failed:" >> "$error_detail"
636
+ cat "$temp_output" >> "$error_detail"
637
+
638
+ # Try basic version check
639
+ echo "Trying chromium --version:" >> "$error_detail"
640
+ timeout 5 chromium --version >> "$error_detail" 2>&1 || true
641
+
642
+ # Check display
643
+ echo "Display check:" >> "$error_detail"
644
+ echo "DISPLAY=${{DISPLAY:-unset}}" >> "$error_detail"
645
+ echo "XDG_RUNTIME_DIR=${{XDG_RUNTIME_DIR:-unset}}" >> "$error_detail"
646
+ ls -la /tmp/.X11-unix/ >> "$error_detail" 2>&1 || true
647
+
648
+ return 1
649
+ fi
650
+ ;;
651
+ firefox)
652
+ if timeout 10 firefox --headless --screenshot /tmp/ff-test.png about:blank &>/dev/null; then
653
+ rm -f /tmp/ff-test.png
654
+ return 0
655
+ else
656
+ echo "Firefox headless test failed" >> "$error_detail"
657
+ timeout 5 firefox --version >> "$error_detail" 2>&1 || true
658
+ return 1
659
+ fi
660
+ ;;
661
+ docker)
662
+ if docker info &>/dev/null; then
663
+ return 0
664
+ else
665
+ echo "Docker info failed:" >> "$error_detail"
666
+ docker info >> "$error_detail" 2>&1 || true
667
+ return 1
668
+ fi
669
+ ;;
670
+ *)
671
+ if command -v "$1" &>/dev/null; then
672
+ return 0
673
+ else
674
+ echo "Command not found: $1" >> "$error_detail"
675
+ echo "PATH=$PATH" >> "$error_detail"
676
+ return 1
677
+ fi
678
+ ;;
593
679
  esac
594
680
  }}
595
681
 
@@ -599,8 +685,48 @@ write_status "starting" "boot diagnostic starting"
599
685
  APT_PACKAGES=({apt_packages})
600
686
  SNAP_PACKAGES=({snap_packages})
601
687
  SERVICES=({services})
688
+ VM_USER="${{SUDO_USER:-ubuntu}}"
689
+ VM_HOME="/home/$VM_USER"
690
+
691
+ # ═══════════════════════════════════════════════════════════════════════════════
692
+ # Section 0: Fix permissions for GNOME directories (runs first!)
693
+ # ═══════════════════════════════════════════════════════════════════════════════
694
+ section "0/7" "Fixing directory permissions..."
695
+ write_status "fixing_permissions" "fixing directory permissions"
696
+
697
+ GNOME_DIRS=(
698
+ "$VM_HOME/.config"
699
+ "$VM_HOME/.config/pulse"
700
+ "$VM_HOME/.config/dconf"
701
+ "$VM_HOME/.config/ibus"
702
+ "$VM_HOME/.cache"
703
+ "$VM_HOME/.cache/ibus"
704
+ "$VM_HOME/.cache/tracker3"
705
+ "$VM_HOME/.cache/mesa_shader_cache"
706
+ "$VM_HOME/.local"
707
+ "$VM_HOME/.local/share"
708
+ "$VM_HOME/.local/share/applications"
709
+ "$VM_HOME/.local/share/keyrings"
710
+ )
711
+
712
+ for dir in "${{GNOME_DIRS[@]}}"; do
713
+ if [ ! -d "$dir" ]; then
714
+ mkdir -p "$dir" 2>/dev/null && log " Created $dir" || true
715
+ fi
716
+ done
717
+
718
+ # Fix ownership for all critical directories
719
+ chown -R 1000:1000 "$VM_HOME/.config" "$VM_HOME/.cache" "$VM_HOME/.local" 2>/dev/null || true
720
+ chmod 700 "$VM_HOME/.config" "$VM_HOME/.cache" 2>/dev/null || true
602
721
 
603
- section "1/5" "Checking APT packages..."
722
+ # Fix snap directories ownership
723
+ for snap_dir in "$VM_HOME/snap"/*; do
724
+ [ -d "$snap_dir" ] && chown -R 1000:1000 "$snap_dir" 2>/dev/null || true
725
+ done
726
+
727
+ ok "Directory permissions fixed"
728
+
729
+ section "1/7" "Checking APT packages..."
604
730
  write_status "checking_apt" "checking APT packages"
605
731
  for pkg in "${{APT_PACKAGES[@]}}"; do
606
732
  [ -z "$pkg" ] && continue
@@ -617,7 +743,7 @@ for pkg in "${{APT_PACKAGES[@]}}"; do
617
743
  fi
618
744
  done
619
745
 
620
- section "2/5" "Checking Snap packages..."
746
+ section "2/7" "Checking Snap packages..."
621
747
  write_status "checking_snaps" "checking snap packages"
622
748
  timeout 120 snap wait system seed.loaded 2>/dev/null || true
623
749
  for pkg in "${{SNAP_PACKAGES[@]}}"; do
@@ -635,7 +761,7 @@ for pkg in "${{SNAP_PACKAGES[@]}}"; do
635
761
  fi
636
762
  done
637
763
 
638
- section "3/5" "Connecting Snap interfaces..."
764
+ section "3/7" "Connecting Snap interfaces..."
639
765
  write_status "connecting_interfaces" "connecting snap interfaces"
640
766
  for pkg in "${{SNAP_PACKAGES[@]}}"; do
641
767
  [ -z "$pkg" ] && continue
@@ -643,7 +769,7 @@ for pkg in "${{SNAP_PACKAGES[@]}}"; do
643
769
  done
644
770
  systemctl restart snapd 2>/dev/null || true
645
771
 
646
- section "4/5" "Testing application launch..."
772
+ section "4/7" "Testing application launch..."
647
773
  write_status "testing_launch" "testing application launch"
648
774
  APPS_TO_TEST=()
649
775
  for pkg in "${{SNAP_PACKAGES[@]}}"; do
@@ -689,10 +815,15 @@ for app in "${{APPS_TO_TEST[@]}}"; do
689
815
  ok "$app launches OK"
690
816
  else
691
817
  fail "$app launch test FAILED"
818
+ # Show error details in main log
819
+ if [ -f "/tmp/$app-error.log" ]; then
820
+ echo " Error details:" | tee -a "$LOG"
821
+ head -10 "/tmp/$app-error.log" | sed 's/^/ /' | tee -a "$LOG" || true
822
+ fi
692
823
  fi
693
824
  done
694
825
 
695
- section "5/6" "Checking mount points..."
826
+ section "5/7" "Checking mount points..."
696
827
  write_status "checking_mounts" "checking mount points"
697
828
  while IFS= read -r line; do
698
829
  tag=$(echo "$line" | awk '{{print $1}}')
@@ -713,7 +844,7 @@ while IFS= read -r line; do
713
844
  fi
714
845
  done < /etc/fstab
715
846
 
716
- section "6/6" "Checking services..."
847
+ section "6/7" "Checking services..."
717
848
  write_status "checking_services" "checking services"
718
849
  for svc in "${{SERVICES[@]}}"; do
719
850
  [ -z "$svc" ] && continue
@@ -1003,10 +1134,60 @@ fi
1003
1134
 
1004
1135
  # Add GUI setup if enabled - runs AFTER package installation completes
1005
1136
  if config.gui:
1137
+ # Create directories that GNOME services need BEFORE GUI starts
1138
+ # These may conflict with mounted host directories, so ensure they exist with correct perms
1006
1139
  runcmd_lines.extend([
1140
+ " - mkdir -p /home/ubuntu/.config/pulse /home/ubuntu/.cache/ibus /home/ubuntu/.local/share",
1141
+ " - mkdir -p /home/ubuntu/.config/dconf /home/ubuntu/.cache/tracker3",
1142
+ " - mkdir -p /home/ubuntu/.config/autostart",
1143
+ " - chown -R 1000:1000 /home/ubuntu/.config /home/ubuntu/.cache /home/ubuntu/.local",
1144
+ " - chmod 700 /home/ubuntu/.config /home/ubuntu/.cache",
1007
1145
  " - systemctl set-default graphical.target",
1008
1146
  " - systemctl enable gdm3 || systemctl enable gdm || true",
1009
1147
  ])
1148
+
1149
+ # Create autostart entries for GUI apps
1150
+ autostart_apps = {
1151
+ 'pycharm-community': ('PyCharm Community', '/snap/bin/pycharm-community', 'pycharm-community'),
1152
+ 'firefox': ('Firefox', '/snap/bin/firefox', 'firefox'),
1153
+ 'chromium': ('Chromium', '/snap/bin/chromium', 'chromium'),
1154
+ 'google-chrome': ('Google Chrome', 'google-chrome-stable', 'google-chrome'),
1155
+ }
1156
+
1157
+ for snap_pkg in config.snap_packages:
1158
+ if snap_pkg in autostart_apps:
1159
+ name, exec_cmd, icon = autostart_apps[snap_pkg]
1160
+ desktop_entry = f'''[Desktop Entry]
1161
+ Type=Application
1162
+ Name={name}
1163
+ Exec={exec_cmd}
1164
+ Icon={icon}
1165
+ X-GNOME-Autostart-enabled=true
1166
+ X-GNOME-Autostart-Delay=5
1167
+ Comment=CloneBox autostart
1168
+ '''
1169
+ import base64
1170
+ desktop_b64 = base64.b64encode(desktop_entry.encode()).decode()
1171
+ runcmd_lines.append(f" - echo '{desktop_b64}' | base64 -d > /home/ubuntu/.config/autostart/{snap_pkg}.desktop")
1172
+
1173
+ # Check if google-chrome is in paths (app_data_paths)
1174
+ wants_chrome = any('/google-chrome' in str(p) for p in (config.paths or {}).values())
1175
+ if wants_chrome:
1176
+ name, exec_cmd, icon = autostart_apps['google-chrome']
1177
+ desktop_entry = f'''[Desktop Entry]
1178
+ Type=Application
1179
+ Name={name}
1180
+ Exec={exec_cmd}
1181
+ Icon={icon}
1182
+ X-GNOME-Autostart-enabled=true
1183
+ X-GNOME-Autostart-Delay=5
1184
+ Comment=CloneBox autostart
1185
+ '''
1186
+ desktop_b64 = base64.b64encode(desktop_entry.encode()).decode()
1187
+ runcmd_lines.append(f" - echo '{desktop_b64}' | base64 -d > /home/ubuntu/.config/autostart/google-chrome.desktop")
1188
+
1189
+ # Fix ownership of autostart directory
1190
+ runcmd_lines.append(" - chown -R 1000:1000 /home/ubuntu/.config/autostart")
1010
1191
 
1011
1192
  # Run user-defined post commands
1012
1193
  if config.post_commands:
@@ -1076,6 +1257,635 @@ echo ""'''
1076
1257
  runcmd_lines.append(f" - echo '{motd_b64}' | base64 -d > /etc/update-motd.d/99-clonebox")
1077
1258
  runcmd_lines.append(" - chmod +x /etc/update-motd.d/99-clonebox")
1078
1259
 
1260
+ # Create user-friendly clonebox-repair script
1261
+ repair_script = r'''#!/bin/bash
1262
+ # CloneBox Repair - User-friendly repair utility for CloneBox VMs
1263
+ # Usage: clonebox-repair [--auto|--status|--logs|--help]
1264
+
1265
+ set -uo pipefail
1266
+
1267
+ RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' CYAN='\033[0;36m' NC='\033[0m' BOLD='\033[1m'
1268
+
1269
+ show_help() {
1270
+ echo -e "${BOLD}${CYAN}CloneBox Repair Utility${NC}"
1271
+ echo ""
1272
+ echo "Usage: clonebox-repair [OPTION]"
1273
+ echo ""
1274
+ echo "Options:"
1275
+ echo " --auto Run full automatic repair (same as boot diagnostic)"
1276
+ echo " --status Show current CloneBox status"
1277
+ echo " --logs Show recent repair logs"
1278
+ echo " --perms Fix directory permissions only"
1279
+ echo " --audio Fix audio (PulseAudio) and restart"
1280
+ echo " --keyring Reset GNOME Keyring (fixes password mismatch)"
1281
+ echo " --snaps Reconnect all snap interfaces only"
1282
+ echo " --mounts Remount all 9p filesystems only"
1283
+ echo " --all Run all fixes (perms + audio + snaps + mounts)"
1284
+ echo " --help Show this help message"
1285
+ echo ""
1286
+ echo "Without options, shows interactive menu."
1287
+ }
1288
+
1289
+ show_status() {
1290
+ echo -e "${BOLD}${CYAN}═══════════════════════════════════════════════════════════${NC}"
1291
+ echo -e "${BOLD}${CYAN} CloneBox VM Status${NC}"
1292
+ echo -e "${BOLD}${CYAN}═══════════════════════════════════════════════════════════${NC}"
1293
+
1294
+ if [ -f /var/run/clonebox-status ]; then
1295
+ source /var/run/clonebox-status
1296
+ if [ "${failed:-0}" -eq 0 ]; then
1297
+ echo -e " ${GREEN}✅ All systems operational${NC}"
1298
+ else
1299
+ echo -e " ${RED}⚠️ $failed checks failed${NC}"
1300
+ fi
1301
+ echo -e " Passed: ${passed:-0} | Repaired: ${repaired:-0} | Failed: ${failed:-0}"
1302
+ else
1303
+ echo -e " ${YELLOW}No status information available${NC}"
1304
+ fi
1305
+ echo ""
1306
+ echo -e " Last boot diagnostic: $(stat -c %y /var/log/clonebox-boot.log 2>/dev/null || echo 'never')"
1307
+ echo -e "${BOLD}${CYAN}═══════════════════════════════════════════════════════════${NC}"
1308
+ }
1309
+
1310
+ show_logs() {
1311
+ echo -e "${BOLD}Recent repair logs:${NC}"
1312
+ echo ""
1313
+ tail -n 50 /var/log/clonebox-boot.log 2>/dev/null || echo "No logs found"
1314
+ }
1315
+
1316
+ fix_permissions() {
1317
+ echo -e "${CYAN}Fixing directory permissions...${NC}"
1318
+ VM_USER="${SUDO_USER:-ubuntu}"
1319
+ VM_HOME="/home/$VM_USER"
1320
+
1321
+ DIRS_TO_CREATE=(
1322
+ "$VM_HOME/.config"
1323
+ "$VM_HOME/.config/pulse"
1324
+ "$VM_HOME/.config/dconf"
1325
+ "$VM_HOME/.config/ibus"
1326
+ "$VM_HOME/.cache"
1327
+ "$VM_HOME/.cache/ibus"
1328
+ "$VM_HOME/.cache/tracker3"
1329
+ "$VM_HOME/.cache/mesa_shader_cache"
1330
+ "$VM_HOME/.local"
1331
+ "$VM_HOME/.local/share"
1332
+ "$VM_HOME/.local/share/applications"
1333
+ "$VM_HOME/.local/share/keyrings"
1334
+ )
1335
+
1336
+ for dir in "${DIRS_TO_CREATE[@]}"; do
1337
+ if [ ! -d "$dir" ]; then
1338
+ mkdir -p "$dir" 2>/dev/null && echo " Created $dir"
1339
+ fi
1340
+ done
1341
+
1342
+ chown -R 1000:1000 "$VM_HOME/.config" "$VM_HOME/.cache" "$VM_HOME/.local" 2>/dev/null
1343
+ chmod 700 "$VM_HOME/.config" "$VM_HOME/.cache" 2>/dev/null
1344
+
1345
+ for snap_dir in "$VM_HOME/snap"/*; do
1346
+ [ -d "$snap_dir" ] && chown -R 1000:1000 "$snap_dir" 2>/dev/null
1347
+ done
1348
+
1349
+ echo -e "${GREEN}✅ Permissions fixed${NC}"
1350
+ }
1351
+
1352
+ fix_audio() {
1353
+ echo -e "${CYAN}Fixing audio (PulseAudio/PipeWire)...${NC}"
1354
+ VM_USER="${SUDO_USER:-ubuntu}"
1355
+ VM_HOME="/home/$VM_USER"
1356
+
1357
+ # Create pulse config directory with correct permissions
1358
+ mkdir -p "$VM_HOME/.config/pulse" 2>/dev/null
1359
+ chown -R 1000:1000 "$VM_HOME/.config/pulse" 2>/dev/null
1360
+ chmod 700 "$VM_HOME/.config/pulse" 2>/dev/null
1361
+
1362
+ # Kill and restart audio services as user
1363
+ if [ -n "$SUDO_USER" ]; then
1364
+ sudo -u "$SUDO_USER" pulseaudio --kill 2>/dev/null || true
1365
+ sleep 1
1366
+ sudo -u "$SUDO_USER" pulseaudio --start 2>/dev/null || true
1367
+ echo " Restarted PulseAudio for $SUDO_USER"
1368
+ else
1369
+ pulseaudio --kill 2>/dev/null || true
1370
+ sleep 1
1371
+ pulseaudio --start 2>/dev/null || true
1372
+ echo " Restarted PulseAudio"
1373
+ fi
1374
+
1375
+ # Restart pipewire if available
1376
+ systemctl --user restart pipewire pipewire-pulse 2>/dev/null || true
1377
+
1378
+ echo -e "${GREEN}✅ Audio fixed${NC}"
1379
+ }
1380
+
1381
+ fix_keyring() {
1382
+ echo -e "${CYAN}Resetting GNOME Keyring...${NC}"
1383
+ VM_USER="${SUDO_USER:-ubuntu}"
1384
+ VM_HOME="/home/$VM_USER"
1385
+ KEYRING_DIR="$VM_HOME/.local/share/keyrings"
1386
+
1387
+ echo -e "${YELLOW}⚠️ This will delete existing keyrings and create a new one on next login${NC}"
1388
+ echo -e "${YELLOW} Stored passwords (WiFi, Chrome, etc.) will be lost!${NC}"
1389
+
1390
+ if [ -t 0 ]; then
1391
+ read -rp "Continue? [y/N] " confirm
1392
+ [[ "$confirm" != [yY]* ]] && { echo "Cancelled"; return; }
1393
+ fi
1394
+
1395
+ # Backup old keyrings
1396
+ if [ -d "$KEYRING_DIR" ] && [ "$(ls -A "$KEYRING_DIR" 2>/dev/null)" ]; then
1397
+ backup_dir="$VM_HOME/.local/share/keyrings.backup.$(date +%Y%m%d%H%M%S)"
1398
+ mv "$KEYRING_DIR" "$backup_dir" 2>/dev/null
1399
+ echo " Backed up to $backup_dir"
1400
+ fi
1401
+
1402
+ # Create fresh keyring directory
1403
+ mkdir -p "$KEYRING_DIR" 2>/dev/null
1404
+ chown -R 1000:1000 "$KEYRING_DIR" 2>/dev/null
1405
+ chmod 700 "$KEYRING_DIR" 2>/dev/null
1406
+
1407
+ # Kill gnome-keyring-daemon to force restart on next login
1408
+ pkill -u "$VM_USER" gnome-keyring-daemon 2>/dev/null || true
1409
+
1410
+ echo -e "${GREEN}✅ Keyring reset - log out and back in to create new keyring${NC}"
1411
+ }
1412
+
1413
+ fix_ibus() {
1414
+ echo -e "${CYAN}Fixing IBus input method...${NC}"
1415
+ VM_USER="${SUDO_USER:-ubuntu}"
1416
+ VM_HOME="/home/$VM_USER"
1417
+
1418
+ # Create ibus cache directory
1419
+ mkdir -p "$VM_HOME/.cache/ibus" 2>/dev/null
1420
+ chown -R 1000:1000 "$VM_HOME/.cache/ibus" 2>/dev/null
1421
+ chmod 700 "$VM_HOME/.cache/ibus" 2>/dev/null
1422
+
1423
+ # Restart ibus
1424
+ if [ -n "$SUDO_USER" ]; then
1425
+ sudo -u "$SUDO_USER" ibus restart 2>/dev/null || true
1426
+ else
1427
+ ibus restart 2>/dev/null || true
1428
+ fi
1429
+
1430
+ echo -e "${GREEN}✅ IBus fixed${NC}"
1431
+ }
1432
+
1433
+ fix_snaps() {
1434
+ echo -e "${CYAN}Reconnecting snap interfaces...${NC}"
1435
+ IFACES="desktop desktop-legacy x11 wayland home network audio-playback audio-record camera opengl"
1436
+
1437
+ for snap in $(snap list --color=never 2>/dev/null | tail -n +2 | awk '{print $1}'); do
1438
+ [[ "$snap" =~ ^(core|snapd|gnome-|gtk-|mesa-) ]] && continue
1439
+ echo -e " ${YELLOW}$snap${NC}"
1440
+ for iface in $IFACES; do
1441
+ snap connect "$snap:$iface" ":$iface" 2>/dev/null && echo " ✓ $iface" || true
1442
+ done
1443
+ done
1444
+
1445
+ systemctl restart snapd 2>/dev/null || true
1446
+ echo -e "${GREEN}✅ Snap interfaces reconnected${NC}"
1447
+ }
1448
+
1449
+ fix_mounts() {
1450
+ echo -e "${CYAN}Remounting filesystems...${NC}"
1451
+
1452
+ while IFS= read -r line; do
1453
+ tag=$(echo "$line" | awk '{print $1}')
1454
+ mp=$(echo "$line" | awk '{print $2}')
1455
+ if [[ "$tag" =~ ^mount[0-9]+$ ]] && [[ "$mp" == /* ]]; then
1456
+ if ! mountpoint -q "$mp" 2>/dev/null; then
1457
+ mkdir -p "$mp" 2>/dev/null
1458
+ if mount "$mp" 2>/dev/null; then
1459
+ echo -e " ${GREEN}✓${NC} $mp"
1460
+ else
1461
+ echo -e " ${RED}✗${NC} $mp (failed)"
1462
+ fi
1463
+ else
1464
+ echo -e " ${GREEN}✓${NC} $mp (already mounted)"
1465
+ fi
1466
+ fi
1467
+ done < /etc/fstab
1468
+
1469
+ echo -e "${GREEN}✅ Mounts checked${NC}"
1470
+ }
1471
+
1472
+ fix_all() {
1473
+ echo -e "${BOLD}${CYAN}Running all fixes...${NC}"
1474
+ echo ""
1475
+ fix_permissions
1476
+ echo ""
1477
+ fix_audio
1478
+ echo ""
1479
+ fix_ibus
1480
+ echo ""
1481
+ fix_snaps
1482
+ echo ""
1483
+ fix_mounts
1484
+ echo ""
1485
+ echo -e "${BOLD}${GREEN}All fixes completed!${NC}"
1486
+ }
1487
+
1488
+ interactive_menu() {
1489
+ while true; do
1490
+ echo ""
1491
+ echo -e "${BOLD}${CYAN}═══════════════════════════════════════════════════════════${NC}"
1492
+ echo -e "${BOLD}${CYAN} CloneBox Repair Menu${NC}"
1493
+ echo -e "${BOLD}${CYAN}═══════════════════════════════════════════════════════════${NC}"
1494
+ echo ""
1495
+ echo " 1) Run full automatic repair (boot diagnostic)"
1496
+ echo " 2) Run all quick fixes (perms + audio + snaps + mounts)"
1497
+ echo " 3) Fix permissions only"
1498
+ echo " 4) Fix audio (PulseAudio) only"
1499
+ echo " 5) Reset GNOME Keyring (⚠️ deletes saved passwords)"
1500
+ echo " 6) Reconnect snap interfaces only"
1501
+ echo " 7) Remount filesystems only"
1502
+ echo " 8) Show status"
1503
+ echo " 9) Show logs"
1504
+ echo " q) Quit"
1505
+ echo ""
1506
+ read -rp "Select option: " choice
1507
+
1508
+ case "$choice" in
1509
+ 1) sudo /usr/local/bin/clonebox-boot-diagnostic ;;
1510
+ 2) fix_all ;;
1511
+ 3) fix_permissions ;;
1512
+ 4) fix_audio ;;
1513
+ 5) fix_keyring ;;
1514
+ 6) fix_snaps ;;
1515
+ 7) fix_mounts ;;
1516
+ 8) show_status ;;
1517
+ 9) show_logs ;;
1518
+ q|Q) exit 0 ;;
1519
+ *) echo -e "${RED}Invalid option${NC}" ;;
1520
+ esac
1521
+ done
1522
+ }
1523
+
1524
+ # Main
1525
+ case "${1:-}" in
1526
+ --auto) exec sudo /usr/local/bin/clonebox-boot-diagnostic ;;
1527
+ --all) fix_all ;;
1528
+ --status) show_status ;;
1529
+ --logs) show_logs ;;
1530
+ --perms) fix_permissions ;;
1531
+ --audio) fix_audio ;;
1532
+ --keyring) fix_keyring ;;
1533
+ --snaps) fix_snaps ;;
1534
+ --mounts) fix_mounts ;;
1535
+ --help|-h) show_help ;;
1536
+ "") interactive_menu ;;
1537
+ *) show_help; exit 1 ;;
1538
+ esac
1539
+ '''
1540
+ repair_b64 = base64.b64encode(repair_script.encode()).decode()
1541
+ runcmd_lines.append(f" - echo '{repair_b64}' | base64 -d > /usr/local/bin/clonebox-repair")
1542
+ runcmd_lines.append(" - chmod +x /usr/local/bin/clonebox-repair")
1543
+ runcmd_lines.append(" - ln -sf /usr/local/bin/clonebox-repair /usr/local/bin/cb-repair")
1544
+
1545
+ # === AUTOSTART: Systemd user services + Desktop autostart files ===
1546
+ # Create directories for user systemd services and autostart
1547
+ runcmd_lines.append(f" - mkdir -p /home/{config.username}/.config/systemd/user")
1548
+ runcmd_lines.append(f" - mkdir -p /home/{config.username}/.config/autostart")
1549
+
1550
+ # Enable lingering for the user (allows user services to run without login)
1551
+ runcmd_lines.append(f" - loginctl enable-linger {config.username}")
1552
+
1553
+ # Add environment variables for monitoring
1554
+ runcmd_lines.extend([
1555
+ " - echo 'CLONEBOX_ENABLE_MONITORING=true' >> /etc/environment",
1556
+ " - echo 'CLONEBOX_MONITOR_INTERVAL=30' >> /etc/environment",
1557
+ " - echo 'CLONEBOX_AUTO_REPAIR=true' >> /etc/environment",
1558
+ " - echo 'CLONEBOX_WATCH_APPS=true' >> /etc/environment",
1559
+ " - echo 'CLONEBOX_WATCH_SERVICES=true' >> /etc/environment",
1560
+ ])
1561
+
1562
+ # Generate autostart configurations based on installed apps (if enabled)
1563
+ autostart_apps = []
1564
+
1565
+ if getattr(config, 'autostart_apps', True):
1566
+ # Detect apps from snap_packages
1567
+ for snap_pkg in (config.snap_packages or []):
1568
+ if snap_pkg == "pycharm-community":
1569
+ autostart_apps.append({
1570
+ "name": "pycharm-community",
1571
+ "display_name": "PyCharm Community",
1572
+ "exec": "/snap/bin/pycharm-community %U",
1573
+ "type": "snap",
1574
+ "after": "graphical-session.target",
1575
+ })
1576
+ elif snap_pkg == "chromium":
1577
+ autostart_apps.append({
1578
+ "name": "chromium",
1579
+ "display_name": "Chromium Browser",
1580
+ "exec": "/snap/bin/chromium %U",
1581
+ "type": "snap",
1582
+ "after": "graphical-session.target",
1583
+ })
1584
+ elif snap_pkg == "firefox":
1585
+ autostart_apps.append({
1586
+ "name": "firefox",
1587
+ "display_name": "Firefox",
1588
+ "exec": "/snap/bin/firefox %U",
1589
+ "type": "snap",
1590
+ "after": "graphical-session.target",
1591
+ })
1592
+ elif snap_pkg == "code":
1593
+ autostart_apps.append({
1594
+ "name": "code",
1595
+ "display_name": "Visual Studio Code",
1596
+ "exec": "/snap/bin/code --new-window",
1597
+ "type": "snap",
1598
+ "after": "graphical-session.target",
1599
+ })
1600
+
1601
+ # Detect apps from packages (APT)
1602
+ for apt_pkg in (config.packages or []):
1603
+ if apt_pkg == "firefox":
1604
+ # Only add if not already added from snap
1605
+ if not any(a["name"] == "firefox" for a in autostart_apps):
1606
+ autostart_apps.append({
1607
+ "name": "firefox",
1608
+ "display_name": "Firefox",
1609
+ "exec": "/usr/bin/firefox %U",
1610
+ "type": "apt",
1611
+ "after": "graphical-session.target",
1612
+ })
1613
+
1614
+ # Check for google-chrome from app_data_paths
1615
+ for host_path, guest_path in (config.paths or {}).items():
1616
+ if guest_path == "/home/ubuntu/.config/google-chrome":
1617
+ autostart_apps.append({
1618
+ "name": "google-chrome",
1619
+ "display_name": "Google Chrome",
1620
+ "exec": "/usr/bin/google-chrome-stable %U",
1621
+ "type": "deb",
1622
+ "after": "graphical-session.target",
1623
+ })
1624
+ break
1625
+
1626
+ # Generate systemd user services for each app
1627
+ for app in autostart_apps:
1628
+ service_content = f'''[Unit]
1629
+ Description={app["display_name"]} Autostart
1630
+ After={app["after"]}
1631
+
1632
+ [Service]
1633
+ Type=simple
1634
+ Environment=DISPLAY=:0
1635
+ Environment=XDG_RUNTIME_DIR=/run/user/1000
1636
+ ExecStart={app["exec"]}
1637
+ Restart=on-failure
1638
+ RestartSec=5
1639
+
1640
+ [Install]
1641
+ WantedBy=default.target
1642
+ '''
1643
+ service_b64 = base64.b64encode(service_content.encode()).decode()
1644
+ service_path = f"/home/{config.username}/.config/systemd/user/{app['name']}.service"
1645
+ runcmd_lines.append(f" - echo '{service_b64}' | base64 -d > {service_path}")
1646
+
1647
+ # Generate desktop autostart files for GUI apps (alternative to systemd user services)
1648
+ for app in autostart_apps:
1649
+ desktop_content = f'''[Desktop Entry]
1650
+ Type=Application
1651
+ Name={app["display_name"]}
1652
+ Exec={app["exec"]}
1653
+ Hidden=false
1654
+ NoDisplay=false
1655
+ X-GNOME-Autostart-enabled=true
1656
+ X-GNOME-Autostart-Delay=5
1657
+ '''
1658
+ desktop_b64 = base64.b64encode(desktop_content.encode()).decode()
1659
+ desktop_path = f"/home/{config.username}/.config/autostart/{app['name']}.desktop"
1660
+ runcmd_lines.append(f" - echo '{desktop_b64}' | base64 -d > {desktop_path}")
1661
+
1662
+ # Fix ownership of all autostart files
1663
+ runcmd_lines.append(f" - chown -R 1000:1000 /home/{config.username}/.config/systemd")
1664
+ runcmd_lines.append(f" - chown -R 1000:1000 /home/{config.username}/.config/autostart")
1665
+
1666
+ # Enable systemd user services (must run as user)
1667
+ if autostart_apps:
1668
+ services_to_enable = " ".join(f"{app['name']}.service" for app in autostart_apps)
1669
+ runcmd_lines.append(f" - sudo -u {config.username} XDG_RUNTIME_DIR=/run/user/1000 systemctl --user daemon-reload || true")
1670
+ # Note: We don't enable services by default as desktop autostart is more reliable for GUI apps
1671
+ # User can enable them manually with: systemctl --user enable <service>
1672
+
1673
+ # === WEB SERVICES: System-wide services for uvicorn, nginx, etc. ===
1674
+ web_services = getattr(config, 'web_services', []) or []
1675
+ for svc in web_services:
1676
+ svc_name = svc.get("name", "clonebox-web")
1677
+ svc_desc = svc.get("description", f"CloneBox {svc_name}")
1678
+ svc_workdir = svc.get("workdir", "/mnt/project0")
1679
+ svc_exec = svc.get("exec", "uvicorn app:app --host 0.0.0.0 --port 8000")
1680
+ svc_user = svc.get("user", config.username)
1681
+ svc_after = svc.get("after", "network.target")
1682
+ svc_env = svc.get("environment", [])
1683
+
1684
+ env_lines = "\n".join(f"Environment={e}" for e in svc_env) if svc_env else ""
1685
+
1686
+ web_service_content = f'''[Unit]
1687
+ Description={svc_desc}
1688
+ After={svc_after}
1689
+
1690
+ [Service]
1691
+ Type=simple
1692
+ User={svc_user}
1693
+ WorkingDirectory={svc_workdir}
1694
+ {env_lines}
1695
+ ExecStart={svc_exec}
1696
+ Restart=always
1697
+ RestartSec=10
1698
+
1699
+ [Install]
1700
+ WantedBy=multi-user.target
1701
+ '''
1702
+ web_svc_b64 = base64.b64encode(web_service_content.encode()).decode()
1703
+ runcmd_lines.append(f" - echo '{web_svc_b64}' | base64 -d > /etc/systemd/system/{svc_name}.service")
1704
+ runcmd_lines.append(" - systemctl daemon-reload")
1705
+ runcmd_lines.append(f" - systemctl enable {svc_name}.service")
1706
+ runcmd_lines.append(f" - systemctl start {svc_name}.service || true")
1707
+
1708
+ # Install CloneBox Monitor for continuous monitoring and self-healing
1709
+ scripts_dir = Path(__file__).resolve().parent.parent.parent / "scripts"
1710
+ try:
1711
+ with open(scripts_dir / "clonebox-monitor.sh") as f:
1712
+ monitor_script = f.read()
1713
+ with open(scripts_dir / "clonebox-monitor.service") as f:
1714
+ monitor_service = f.read()
1715
+ with open(scripts_dir / "clonebox-monitor.default") as f:
1716
+ monitor_config = f.read()
1717
+ except (FileNotFoundError, OSError):
1718
+ # Fallback to embedded scripts if files not found
1719
+ monitor_script = '''#!/bin/bash
1720
+ # CloneBox Monitor - Fallback embedded version
1721
+ set -euo pipefail
1722
+ LOG_FILE="/var/log/clonebox-monitor.log"
1723
+ log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"; }
1724
+ log_info() { log "[INFO] $1"; }
1725
+ log_warn() { log "[WARN] $1"; }
1726
+ log_error() { log "[ERROR] $1"; }
1727
+ log_success() { log "[SUCCESS] $1"; }
1728
+ while true; do
1729
+ log_info "CloneBox Monitor running..."
1730
+ sleep 60
1731
+ done
1732
+ '''
1733
+ monitor_service = '''[Unit]
1734
+ Description=CloneBox Monitor
1735
+ After=graphical-session.target
1736
+ [Service]
1737
+ Type=simple
1738
+ User=ubuntu
1739
+ ExecStart=/usr/local/bin/clonebox-monitor
1740
+ Restart=always
1741
+ [Install]
1742
+ WantedBy=default.target
1743
+ '''
1744
+ monitor_config = '''# CloneBox Monitor Configuration
1745
+ CLONEBOX_MONITOR_INTERVAL=30
1746
+ CLONEBOX_AUTO_REPAIR=true
1747
+ '''
1748
+
1749
+ # Install monitor script
1750
+ monitor_b64 = base64.b64encode(monitor_script.encode()).decode()
1751
+ runcmd_lines.append(f" - echo '{monitor_b64}' | base64 -d > /usr/local/bin/clonebox-monitor")
1752
+ runcmd_lines.append(" - chmod +x /usr/local/bin/clonebox-monitor")
1753
+
1754
+ # Install monitor configuration
1755
+ config_b64 = base64.b64encode(monitor_config.encode()).decode()
1756
+ runcmd_lines.append(f" - echo '{config_b64}' | base64 -d > /etc/default/clonebox-monitor")
1757
+
1758
+ # Install systemd user service
1759
+ service_b64 = base64.b64encode(monitor_service.encode()).decode()
1760
+ runcmd_lines.append(f" - echo '{service_b64}' | base64 -d > /etc/systemd/user/clonebox-monitor.service")
1761
+
1762
+ # Enable lingering and start monitor
1763
+ runcmd_lines.extend([
1764
+ " - loginctl enable-linger ubuntu",
1765
+ " - sudo -u ubuntu systemctl --user daemon-reload",
1766
+ " - sudo -u ubuntu systemctl --user enable clonebox-monitor.service",
1767
+ " - sudo -u ubuntu systemctl --user start clonebox-monitor.service || true",
1768
+ ])
1769
+
1770
+ # Create Python monitor service for continuous diagnostics (legacy)
1771
+ monitor_script = f'''#!/usr/bin/env python3
1772
+ """CloneBox Monitor - Continuous diagnostics and app restart service."""
1773
+ import subprocess
1774
+ import time
1775
+ import os
1776
+ import sys
1777
+ import json
1778
+ from pathlib import Path
1779
+
1780
+ REQUIRED_APPS = {json.dumps([app["name"] for app in autostart_apps])}
1781
+ CHECK_INTERVAL = 60 # seconds
1782
+ LOG_FILE = "/var/log/clonebox-monitor.log"
1783
+ STATUS_FILE = "/var/run/clonebox-monitor-status.json"
1784
+
1785
+ def log(msg):
1786
+ timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
1787
+ line = f"[{{timestamp}}] {{msg}}"
1788
+ print(line)
1789
+ try:
1790
+ with open(LOG_FILE, "a") as f:
1791
+ f.write(line + "\\n")
1792
+ except:
1793
+ pass
1794
+
1795
+ def get_running_processes():
1796
+ try:
1797
+ result = subprocess.run(["ps", "aux"], capture_output=True, text=True, timeout=10)
1798
+ return result.stdout
1799
+ except:
1800
+ return ""
1801
+
1802
+ def is_app_running(app_name, ps_output):
1803
+ patterns = {{
1804
+ "pycharm-community": ["pycharm", "idea"],
1805
+ "chromium": ["chromium"],
1806
+ "firefox": ["firefox", "firefox-esr"],
1807
+ "google-chrome": ["chrome", "google-chrome"],
1808
+ "code": ["code", "vscode"],
1809
+ }}
1810
+ for pattern in patterns.get(app_name, [app_name]):
1811
+ if pattern.lower() in ps_output.lower():
1812
+ return True
1813
+ return False
1814
+
1815
+ def restart_app(app_name):
1816
+ log(f"Restarting {{app_name}}...")
1817
+ try:
1818
+ subprocess.run(
1819
+ ["sudo", "-u", "{config.username}", "systemctl", "--user", "restart", f"{{app_name}}.service"],
1820
+ timeout=30, capture_output=True
1821
+ )
1822
+ return True
1823
+ except Exception as e:
1824
+ log(f"Failed to restart {{app_name}}: {{e}}")
1825
+ return False
1826
+
1827
+ def check_mounts():
1828
+ try:
1829
+ with open("/etc/fstab", "r") as f:
1830
+ fstab = f.read()
1831
+ for line in fstab.split("\\n"):
1832
+ parts = line.split()
1833
+ if len(parts) >= 2 and parts[0].startswith("mount"):
1834
+ mp = parts[1]
1835
+ result = subprocess.run(["mountpoint", "-q", mp], capture_output=True)
1836
+ if result.returncode != 0:
1837
+ log(f"Mount {{mp}} not active, attempting remount...")
1838
+ subprocess.run(["mount", mp], capture_output=True)
1839
+ except Exception as e:
1840
+ log(f"Mount check failed: {{e}}")
1841
+
1842
+ def write_status(status):
1843
+ try:
1844
+ with open(STATUS_FILE, "w") as f:
1845
+ json.dump(status, f)
1846
+ except:
1847
+ pass
1848
+
1849
+ def main():
1850
+ log("CloneBox Monitor started")
1851
+
1852
+ while True:
1853
+ status = {{"timestamp": time.strftime("%Y-%m-%dT%H:%M:%S"), "apps": {{}}, "mounts_ok": True}}
1854
+
1855
+ # Check mounts
1856
+ check_mounts()
1857
+
1858
+ # Check apps (only if GUI session is active)
1859
+ if os.path.exists("/run/user/1000"):
1860
+ ps_output = get_running_processes()
1861
+ for app in REQUIRED_APPS:
1862
+ running = is_app_running(app, ps_output)
1863
+ status["apps"][app] = "running" if running else "stopped"
1864
+ # Don't auto-restart apps - user may have closed them intentionally
1865
+
1866
+ write_status(status)
1867
+ time.sleep(CHECK_INTERVAL)
1868
+
1869
+ if __name__ == "__main__":
1870
+ main()
1871
+ '''
1872
+ # Note: The bash monitor is already installed above, no need to install Python monitor
1873
+
1874
+ # Create logs disk for host access
1875
+ runcmd_lines.extend([
1876
+ " - mkdir -p /mnt/logs",
1877
+ " - truncate -s 1G /var/lib/libvirt/images/clonebox-logs.qcow2",
1878
+ " - mkfs.ext4 -F /var/lib/libvirt/images/clonebox-logs.qcow2",
1879
+ " - echo '/var/lib/libvirt/images/clonebox-logs.qcow2 /mnt/logs ext4 loop,defaults 0 0' >> /etc/fstab",
1880
+ " - mount -a",
1881
+ " - mkdir -p /mnt/logs/var/log",
1882
+ " - mkdir -p /mnt/logs/tmp",
1883
+ " - cp -r /var/log/clonebox*.log /mnt/logs/var/log/ 2>/dev/null || true",
1884
+ " - cp -r /tmp/*-error.log /mnt/logs/tmp/ 2>/dev/null || true",
1885
+ " - echo 'Logs disk mounted at /mnt/logs - accessible from host as /var/lib/libvirt/images/clonebox-logs.qcow2'",
1886
+ " - echo 'To view logs on host: sudo mount -o loop /var/lib/libvirt/images/clonebox-logs.qcow2 /mnt/clonebox-logs'",
1887
+ ])
1888
+
1079
1889
  # Add reboot command at the end if GUI is enabled
1080
1890
  if config.gui:
1081
1891
  runcmd_lines.append(" - echo 'Rebooting in 10 seconds to start GUI...'")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: clonebox
3
- Version: 0.1.22
3
+ Version: 0.1.24
4
4
  Summary: Clone your workstation environment to an isolated VM with selective apps, paths and services
5
5
  Author: CloneBox Team
6
6
  License: Apache-2.0
@@ -31,6 +31,7 @@ Requires-Dist: questionary>=2.0.0
31
31
  Requires-Dist: psutil>=5.9.0
32
32
  Requires-Dist: pyyaml>=6.0
33
33
  Requires-Dist: pydantic>=2.0.0
34
+ Requires-Dist: python-dotenv>=1.0.0
34
35
  Provides-Extra: dev
35
36
  Requires-Dist: pytest>=7.0.0; extra == "dev"
36
37
  Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
@@ -81,6 +82,9 @@ CloneBox lets you create isolated virtual machines with only the applications, d
81
82
  - ⚡ **Fast creation** - No full disk cloning, VMs are ready in seconds
82
83
  - 📥 **Auto-download** - Automatically downloads and caches Ubuntu cloud images (stored in ~/Downloads)
83
84
  - 📊 **Health monitoring** - Built-in health checks for packages, services, and mounts
85
+ - 🔄 **Self-healing** - Automatic monitoring and repair of apps and services
86
+ - 📈 **Live monitoring** - Real-time dashboard for running applications and services
87
+ - 🔧 **Repair tools** - One-click fix for common VM issues (audio, permissions, mounts)
84
88
  - 🔄 **VM migration** - Export/import VMs with data between workstations
85
89
  - 🧪 **Configuration testing** - Validate VM settings and functionality
86
90
  - 📁 **App data sync** - Include browser profiles, IDE settings, and app configs
@@ -96,10 +100,12 @@ CloneBox excels in scenarios where developers need:
96
100
  ## What's Next
97
101
 
98
102
  Project roadmap includes:
99
- - Container runtime integration (Podman/Docker lightweight mode)
100
- - Local dashboard for VM and container management
101
- - Profile system for reusable configuration presets
102
- - Proxmox export capabilities for production migration
103
+ - **v0.2.0**: `clonebox exec` command, VM snapshots, web dashboard MVP
104
+ - **v0.3.0**: Container runtime integration (Podman/Docker), multi-VM orchestration
105
+ - **v0.4.0**: Cloud provider support (AWS, GCP, Azure), Windows WSL2 support
106
+ - **v1.0.0**: Production-ready with full monitoring, backup/restore, enterprise features
107
+
108
+ See [TODO.md](TODO.md) for detailed roadmap and [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines.
103
109
 
104
110
 
105
111
 
@@ -111,6 +117,8 @@ Kluczowe komendy:
111
117
  - `clonebox` – interaktywny wizard (detect + create + start)
112
118
  - `clonebox detect` – skanuje usługi/apps/ścieżki
113
119
  - `clonebox clone . --user --run` – szybki klon bieżącego katalogu z użytkownikiem i autostartem
120
+ - `clonebox watch . --user` – monitoruj na żywo aplikacje i usługi w VM
121
+ - `clonebox repair . --user` – napraw problemy z uprawnieniami, audio, usługami
114
122
  - `clonebox container up|ps|stop|rm` – lekki runtime kontenerowy (podman/docker)
115
123
  - `clonebox dashboard` – lokalny dashboard (VM + containers)
116
124
 
@@ -496,6 +504,111 @@ clonebox status . --user --health
496
504
  # Or rebuild: clonebox clone . --user --run --replace
497
505
  ```
498
506
 
507
+ ## 📊 Monitoring and Self-Healing
508
+
509
+ CloneBox includes continuous monitoring and automatic self-healing capabilities for both GUI applications and system services.
510
+
511
+ ### Monitor Running Applications and Services
512
+
513
+ ```bash
514
+ # Watch real-time status of apps and services
515
+ clonebox watch . --user
516
+
517
+ # Output shows live dashboard:
518
+ # ╔══════════════════════════════════════════════════════════╗
519
+ # ║ CloneBox Live Monitor ║
520
+ # ╠══════════════════════════════════════════════════════════╣
521
+ # ║ 🖥️ GUI Apps: ║
522
+ # ║ ✅ pycharm-community PID: 1234 Memory: 512MB ║
523
+ # ║ ✅ firefox PID: 5678 Memory: 256MB ║
524
+ # ║ ❌ chromium Not running ║
525
+ # ║ ║
526
+ # ║ 🔧 System Services: ║
527
+ # ║ ✅ docker Active: 2h 15m ║
528
+ # ║ ✅ nginx Active: 1h 30m ║
529
+ # ║ ✅ uvicorn Active: 45m (port 8000) ║
530
+ # ║ ║
531
+ # ║ 📊 Last check: 2024-01-31 13:25:30 ║
532
+ # ║ 🔄 Next check in: 25 seconds ║
533
+ # ╚══════════════════════════════════════════════════════════╝
534
+
535
+ # Check detailed status with logs
536
+ clonebox status . --user --verbose
537
+
538
+ # View monitor logs from host
539
+ ./scripts/clonebox-logs.sh # Interactive log viewer
540
+ # Or via SSH:
541
+ ssh ubuntu@<IP_VM> "tail -f /var/log/clonebox-monitor.log"
542
+ ```
543
+
544
+ ### Repair and Troubleshooting
545
+
546
+ ```bash
547
+ # Run automatic repair from host
548
+ clonebox repair . --user
549
+
550
+ # This triggers the repair script inside VM which:
551
+ # - Fixes directory permissions (pulse, ibus, dconf)
552
+ # - Restarts audio services (PulseAudio/PipeWire)
553
+ # - Reconnects snap interfaces
554
+ # - Remounts missing filesystems
555
+ # - Resets GNOME keyring if needed
556
+
557
+ # Interactive repair menu (via SSH)
558
+ ssh ubuntu@<IP_VM> "clonebox-repair"
559
+
560
+ # Manual repair options from host:
561
+ clonebox repair . --user --auto # Full automatic repair
562
+ clonebox repair . --user --perms # Fix permissions only
563
+ clonebox repair . --user --audio # Fix audio only
564
+ clonebox repair . --user --snaps # Reconnect snaps only
565
+ clonebox repair . --user --mounts # Remount filesystems only
566
+
567
+ # Check repair status (via SSH)
568
+ ssh ubuntu@<IP_VM> "cat /var/run/clonebox-status"
569
+
570
+ # View repair logs
571
+ ./scripts/clonebox-logs.sh # Interactive viewer
572
+ # Or via SSH:
573
+ ssh ubuntu@<IP_VM> "tail -n 50 /var/log/clonebox-boot.log"
574
+ ```
575
+
576
+ ### Monitor Configuration
577
+
578
+ The monitoring system is configured through environment variables in `.env`:
579
+
580
+ ```bash
581
+ # Enable/disable monitoring
582
+ CLONEBOX_ENABLE_MONITORING=true
583
+ CLONEBOX_MONITOR_INTERVAL=30 # Check every 30 seconds
584
+ CLONEBOX_AUTO_REPAIR=true # Auto-restart failed services
585
+ CLONEBOX_WATCH_APPS=true # Monitor GUI apps
586
+ CLONEBOX_WATCH_SERVICES=true # Monitor system services
587
+ ```
588
+
589
+ ### Inside the VM - Manual Controls
590
+
591
+ ```bash
592
+ # Check monitor service status
593
+ systemctl --user status clonebox-monitor
594
+
595
+ # View monitor logs
596
+ journalctl --user -u clonebox-monitor -f
597
+ tail -f /var/log/clonebox-monitor.log
598
+
599
+ # Stop/start monitoring
600
+ systemctl --user stop clonebox-monitor
601
+ systemctl --user start clonebox-monitor
602
+
603
+ # Check last status
604
+ cat /var/run/clonebox-monitor-status
605
+
606
+ # Run repair manually
607
+ clonebox-repair --all # Run all fixes
608
+ clonebox-repair --status # Show current status
609
+ clonebox-repair --logs # Show recent logs
610
+ ```
611
+
499
612
  ### Export/Import Workflow
500
613
 
501
614
  ```bash
@@ -536,6 +649,11 @@ virt-viewer --connect qemu:///session clone-clonebox
536
649
  # Check VM details:
537
650
  clonebox list # List all VMs
538
651
  virsh --connect qemu:///session dominfo clone-clonebox
652
+
653
+ # Restart VM if needed:
654
+ clonebox stop . --user && clonebox start . --user # Soft reboot
655
+ virsh --connect qemu:///session reboot clone-clonebox # Direct reboot
656
+ virsh --connect qemu:///session reset clone-clonebox # Hard reset if frozen
539
657
  ```
540
658
 
541
659
  ## Legacy Examples (Manual Config)
@@ -1,7 +1,7 @@
1
1
  clonebox/__init__.py,sha256=CyfHVVq6KqBr4CNERBpXk_O6Q5B35q03YpdQbokVvvI,408
2
2
  clonebox/__main__.py,sha256=Fcoyzwwyz5-eC_sBlQk5a5RbKx8uodQz5sKJ190U0NU,135
3
3
  clonebox/cli.py,sha256=vbJ65ShdXG1nGkQteCaFtDTas0L2RNV--aay2Qx-6F0,110765
4
- clonebox/cloner.py,sha256=dX6K56goT3qZD3GOYjZBuAPMrAI0PriyFJWsJpQvyKc,46320
4
+ clonebox/cloner.py,sha256=qIcSgG-W28-qBhEVSL8d2gFx2-BI7ygh1Jyh9BjAIBI,79100
5
5
  clonebox/container.py,sha256=tiYK1ZB-DhdD6A2FuMA0h_sRNkUI7KfYcJ0tFOcdyeM,6105
6
6
  clonebox/dashboard.py,sha256=RhSPvR6kWglqXeLkCWesBZQid7wv2WpJa6w78mXbPjY,4268
7
7
  clonebox/detector.py,sha256=aS_QlbG93-DE3hsjRt88E7O-PGC2TUBgUbP9wqT9g60,23221
@@ -9,9 +9,9 @@ clonebox/models.py,sha256=yBRUlJejpeJHZjvCYMGq1nXPFcmhLFxN-LqkEyveWsA,7913
9
9
  clonebox/profiles.py,sha256=VaKVuxCrgyMxx-8_WOTcw7E8irwGxUPhZHVY6RxYYiE,2034
10
10
  clonebox/validator.py,sha256=z4YuIgVnX6ZqfIdJtjKIFwZ-iWlRUnpX7gmWwq-Jr88,35352
11
11
  clonebox/templates/profiles/ml-dev.yaml,sha256=MT7Wu3xGBnYIsO5mzZ2GDI4AAEFGOroIx0eU3XjNARg,140
12
- clonebox-0.1.22.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
13
- clonebox-0.1.22.dist-info/METADATA,sha256=MuI44ArtnU0ql1rF99Hf_4frTRHe7_AikJK9w2jk6tI,41591
14
- clonebox-0.1.22.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
15
- clonebox-0.1.22.dist-info/entry_points.txt,sha256=FES95Vi3btfViLEEoHdb8nikNxTqzaooi9ehZw9ZfWI,47
16
- clonebox-0.1.22.dist-info/top_level.txt,sha256=LdMo2cvCrEcRGH2M8JgQNVsCoszLV0xug6kx1JnaRjo,9
17
- clonebox-0.1.22.dist-info/RECORD,,
12
+ clonebox-0.1.24.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
13
+ clonebox-0.1.24.dist-info/METADATA,sha256=ua1S-CkW_inx_jQYxZLCEGvvRW6o3qY6HIZqJsark_U,46452
14
+ clonebox-0.1.24.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
15
+ clonebox-0.1.24.dist-info/entry_points.txt,sha256=FES95Vi3btfViLEEoHdb8nikNxTqzaooi9ehZw9ZfWI,47
16
+ clonebox-0.1.24.dist-info/top_level.txt,sha256=LdMo2cvCrEcRGH2M8JgQNVsCoszLV0xug6kx1JnaRjo,9
17
+ clonebox-0.1.24.dist-info/RECORD,,