xenfra 0.4.2__py3-none-any.whl → 0.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xenfra/commands/__init__.py +3 -3
- xenfra/commands/auth.py +144 -144
- xenfra/commands/auth_device.py +164 -164
- xenfra/commands/deployments.py +1133 -912
- xenfra/commands/intelligence.py +503 -412
- xenfra/commands/projects.py +204 -204
- xenfra/commands/security_cmd.py +233 -233
- xenfra/main.py +76 -75
- xenfra/utils/__init__.py +3 -3
- xenfra/utils/auth.py +374 -374
- xenfra/utils/codebase.py +169 -169
- xenfra/utils/config.py +459 -432
- xenfra/utils/errors.py +116 -116
- xenfra/utils/file_sync.py +286 -0
- xenfra/utils/security.py +336 -336
- xenfra/utils/validation.py +234 -234
- xenfra-0.4.4.dist-info/METADATA +113 -0
- xenfra-0.4.4.dist-info/RECORD +21 -0
- {xenfra-0.4.2.dist-info → xenfra-0.4.4.dist-info}/WHEEL +2 -2
- xenfra-0.4.2.dist-info/METADATA +0 -118
- xenfra-0.4.2.dist-info/RECORD +0 -20
- {xenfra-0.4.2.dist-info → xenfra-0.4.4.dist-info}/entry_points.txt +0 -0
xenfra/utils/errors.py
CHANGED
|
@@ -1,116 +1,116 @@
|
|
|
1
|
-
"""Human-friendly error messages with actionable solutions."""
|
|
2
|
-
|
|
3
|
-
from rich.console import Console
|
|
4
|
-
|
|
5
|
-
console = Console()
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
ERROR_SOLUTIONS = {
|
|
9
|
-
"port_in_use": {
|
|
10
|
-
"message": "Port {port} is already in use on the droplet",
|
|
11
|
-
"solution": "Change the port in xenfra.yaml or stop the conflicting service",
|
|
12
|
-
"command": "ssh root@{{ip}} 'lsof -i :{port}' # Find process using port",
|
|
13
|
-
},
|
|
14
|
-
"missing_dependency": {
|
|
15
|
-
"message": "Missing dependency: {package}",
|
|
16
|
-
"solution": "Add {package} to dependencies in {file}",
|
|
17
|
-
"command": "uv add {package} # OR: echo '{package}' >> requirements.txt",
|
|
18
|
-
},
|
|
19
|
-
"ssh_failure": {
|
|
20
|
-
"message": "Cannot connect to droplet via SSH",
|
|
21
|
-
"solution": "Check firewall rules, wait for droplet boot, or verify SSH keys",
|
|
22
|
-
"command": "ssh -v root@{ip} # Verbose SSH for debugging",
|
|
23
|
-
},
|
|
24
|
-
"docker_build_failed": {
|
|
25
|
-
"message": "Docker build failed",
|
|
26
|
-
"solution": "Check Dockerfile syntax and base image availability",
|
|
27
|
-
"command": "docker build . --no-cache # Test locally",
|
|
28
|
-
},
|
|
29
|
-
"health_check_failed": {
|
|
30
|
-
"message": "Application failed health check",
|
|
31
|
-
"solution": "Ensure your app responds on port {port} at /health or /",
|
|
32
|
-
"command": "curl http://{{ip}}:{port}/health",
|
|
33
|
-
},
|
|
34
|
-
"out_of_memory": {
|
|
35
|
-
"message": "Container out of memory",
|
|
36
|
-
"solution": "Upgrade to a larger instance size in xenfra.yaml",
|
|
37
|
-
"command": "docker stats # Check memory usage",
|
|
38
|
-
},
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def show_error_with_solution(error_type: str, **kwargs) -> None:
|
|
43
|
-
"""
|
|
44
|
-
Display error with actionable solution.
|
|
45
|
-
|
|
46
|
-
Args:
|
|
47
|
-
error_type: Key from ERROR_SOLUTIONS
|
|
48
|
-
**kwargs: Template variables (port, ip, package, file, etc.)
|
|
49
|
-
"""
|
|
50
|
-
error = ERROR_SOLUTIONS.get(error_type)
|
|
51
|
-
|
|
52
|
-
if not error:
|
|
53
|
-
# Fallback for unknown errors
|
|
54
|
-
console.print(f"[red]❌ Error: {error_type}[/red]")
|
|
55
|
-
return
|
|
56
|
-
|
|
57
|
-
# Format message with provided kwargs
|
|
58
|
-
try:
|
|
59
|
-
message = error["message"].format(**kwargs)
|
|
60
|
-
solution = error["solution"].format(**kwargs)
|
|
61
|
-
command = error.get("command", "").format(**kwargs)
|
|
62
|
-
except KeyError as e:
|
|
63
|
-
console.print(f"[red]❌ Error formatting message: missing {e}[/red]")
|
|
64
|
-
return
|
|
65
|
-
|
|
66
|
-
console.print()
|
|
67
|
-
console.print(f"[red]❌ {message}[/red]")
|
|
68
|
-
console.print(f"[yellow]💡 Solution: {solution}[/yellow]")
|
|
69
|
-
|
|
70
|
-
if command:
|
|
71
|
-
console.print(f"[dim]Try: {command}[/dim]")
|
|
72
|
-
console.print()
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def detect_error_type(error_message: str) -> tuple[str, dict]:
|
|
76
|
-
"""
|
|
77
|
-
Attempt to detect error type from message.
|
|
78
|
-
|
|
79
|
-
Returns:
|
|
80
|
-
(error_type, kwargs) for show_error_with_solution()
|
|
81
|
-
"""
|
|
82
|
-
error_lower = error_message.lower()
|
|
83
|
-
|
|
84
|
-
# Port detection
|
|
85
|
-
if "port" in error_lower and ("in use" in error_lower or "already" in error_lower):
|
|
86
|
-
# Try to extract port number
|
|
87
|
-
import re
|
|
88
|
-
port_match = re.search(r"port\s+(\d+)", error_lower)
|
|
89
|
-
port = port_match.group(1) if port_match else "8000"
|
|
90
|
-
return "port_in_use", {"port": port}
|
|
91
|
-
|
|
92
|
-
# SSH detection
|
|
93
|
-
if "ssh" in error_lower or "connection refused" in error_lower:
|
|
94
|
-
return "ssh_failure", {"ip": "DROPLET_IP"}
|
|
95
|
-
|
|
96
|
-
# Docker detection
|
|
97
|
-
if "docker" in error_lower and "build" in error_lower:
|
|
98
|
-
return "docker_build_failed", {}
|
|
99
|
-
|
|
100
|
-
# Health check detection
|
|
101
|
-
if "health" in error_lower and ("fail" in error_lower or "timeout" in error_lower):
|
|
102
|
-
return "health_check_failed", {"port": "8000"}
|
|
103
|
-
|
|
104
|
-
# Memory detection
|
|
105
|
-
if "memory" in error_lower or "oom" in error_lower:
|
|
106
|
-
return "out_of_memory", {}
|
|
107
|
-
|
|
108
|
-
# Module not found
|
|
109
|
-
if "modulenotfounderror" in error_lower or "no module named" in error_lower:
|
|
110
|
-
import re
|
|
111
|
-
module_match = re.search(r"no module named ['\"]([^'\"]+)['\"]", error_lower)
|
|
112
|
-
package = module_match.group(1) if module_match else "PACKAGE_NAME"
|
|
113
|
-
return "missing_dependency", {"package": package, "file": "pyproject.toml"}
|
|
114
|
-
|
|
115
|
-
# Unknown
|
|
116
|
-
return None, {}
|
|
1
|
+
"""Human-friendly error messages with actionable solutions."""
|
|
2
|
+
|
|
3
|
+
from rich.console import Console
|
|
4
|
+
|
|
5
|
+
console = Console()
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
ERROR_SOLUTIONS = {
|
|
9
|
+
"port_in_use": {
|
|
10
|
+
"message": "Port {port} is already in use on the droplet",
|
|
11
|
+
"solution": "Change the port in xenfra.yaml or stop the conflicting service",
|
|
12
|
+
"command": "ssh root@{{ip}} 'lsof -i :{port}' # Find process using port",
|
|
13
|
+
},
|
|
14
|
+
"missing_dependency": {
|
|
15
|
+
"message": "Missing dependency: {package}",
|
|
16
|
+
"solution": "Add {package} to dependencies in {file}",
|
|
17
|
+
"command": "uv add {package} # OR: echo '{package}' >> requirements.txt",
|
|
18
|
+
},
|
|
19
|
+
"ssh_failure": {
|
|
20
|
+
"message": "Cannot connect to droplet via SSH",
|
|
21
|
+
"solution": "Check firewall rules, wait for droplet boot, or verify SSH keys",
|
|
22
|
+
"command": "ssh -v root@{ip} # Verbose SSH for debugging",
|
|
23
|
+
},
|
|
24
|
+
"docker_build_failed": {
|
|
25
|
+
"message": "Docker build failed",
|
|
26
|
+
"solution": "Check Dockerfile syntax and base image availability",
|
|
27
|
+
"command": "docker build . --no-cache # Test locally",
|
|
28
|
+
},
|
|
29
|
+
"health_check_failed": {
|
|
30
|
+
"message": "Application failed health check",
|
|
31
|
+
"solution": "Ensure your app responds on port {port} at /health or /",
|
|
32
|
+
"command": "curl http://{{ip}}:{port}/health",
|
|
33
|
+
},
|
|
34
|
+
"out_of_memory": {
|
|
35
|
+
"message": "Container out of memory",
|
|
36
|
+
"solution": "Upgrade to a larger instance size in xenfra.yaml",
|
|
37
|
+
"command": "docker stats # Check memory usage",
|
|
38
|
+
},
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def show_error_with_solution(error_type: str, **kwargs) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Display error with actionable solution.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
error_type: Key from ERROR_SOLUTIONS
|
|
48
|
+
**kwargs: Template variables (port, ip, package, file, etc.)
|
|
49
|
+
"""
|
|
50
|
+
error = ERROR_SOLUTIONS.get(error_type)
|
|
51
|
+
|
|
52
|
+
if not error:
|
|
53
|
+
# Fallback for unknown errors
|
|
54
|
+
console.print(f"[red]❌ Error: {error_type}[/red]")
|
|
55
|
+
return
|
|
56
|
+
|
|
57
|
+
# Format message with provided kwargs
|
|
58
|
+
try:
|
|
59
|
+
message = error["message"].format(**kwargs)
|
|
60
|
+
solution = error["solution"].format(**kwargs)
|
|
61
|
+
command = error.get("command", "").format(**kwargs)
|
|
62
|
+
except KeyError as e:
|
|
63
|
+
console.print(f"[red]❌ Error formatting message: missing {e}[/red]")
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
console.print()
|
|
67
|
+
console.print(f"[red]❌ {message}[/red]")
|
|
68
|
+
console.print(f"[yellow]💡 Solution: {solution}[/yellow]")
|
|
69
|
+
|
|
70
|
+
if command:
|
|
71
|
+
console.print(f"[dim]Try: {command}[/dim]")
|
|
72
|
+
console.print()
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def detect_error_type(error_message: str) -> tuple[str, dict]:
|
|
76
|
+
"""
|
|
77
|
+
Attempt to detect error type from message.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
(error_type, kwargs) for show_error_with_solution()
|
|
81
|
+
"""
|
|
82
|
+
error_lower = error_message.lower()
|
|
83
|
+
|
|
84
|
+
# Port detection
|
|
85
|
+
if "port" in error_lower and ("in use" in error_lower or "already" in error_lower):
|
|
86
|
+
# Try to extract port number
|
|
87
|
+
import re
|
|
88
|
+
port_match = re.search(r"port\s+(\d+)", error_lower)
|
|
89
|
+
port = port_match.group(1) if port_match else "8000"
|
|
90
|
+
return "port_in_use", {"port": port}
|
|
91
|
+
|
|
92
|
+
# SSH detection
|
|
93
|
+
if "ssh" in error_lower or "connection refused" in error_lower:
|
|
94
|
+
return "ssh_failure", {"ip": "DROPLET_IP"}
|
|
95
|
+
|
|
96
|
+
# Docker detection
|
|
97
|
+
if "docker" in error_lower and "build" in error_lower:
|
|
98
|
+
return "docker_build_failed", {}
|
|
99
|
+
|
|
100
|
+
# Health check detection
|
|
101
|
+
if "health" in error_lower and ("fail" in error_lower or "timeout" in error_lower):
|
|
102
|
+
return "health_check_failed", {"port": "8000"}
|
|
103
|
+
|
|
104
|
+
# Memory detection
|
|
105
|
+
if "memory" in error_lower or "oom" in error_lower:
|
|
106
|
+
return "out_of_memory", {}
|
|
107
|
+
|
|
108
|
+
# Module not found
|
|
109
|
+
if "modulenotfounderror" in error_lower or "no module named" in error_lower:
|
|
110
|
+
import re
|
|
111
|
+
module_match = re.search(r"no module named ['\"]([^'\"]+)['\"]", error_lower)
|
|
112
|
+
package = module_match.group(1) if module_match else "PACKAGE_NAME"
|
|
113
|
+
return "missing_dependency", {"package": package, "file": "pyproject.toml"}
|
|
114
|
+
|
|
115
|
+
# Unknown
|
|
116
|
+
return None, {}
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File synchronization utilities for delta uploads.
|
|
3
|
+
|
|
4
|
+
Provides functions to scan project files, compute SHA256 hashes,
|
|
5
|
+
and manage local file caches for incremental deployments.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import hashlib
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Dict, List, Optional, Set
|
|
14
|
+
|
|
15
|
+
# Patterns to exclude from deployment
|
|
16
|
+
EXCLUDE_PATTERNS: Set[str] = {
|
|
17
|
+
# Version control
|
|
18
|
+
'.git',
|
|
19
|
+
'.svn',
|
|
20
|
+
'.hg',
|
|
21
|
+
|
|
22
|
+
# Python
|
|
23
|
+
'.venv',
|
|
24
|
+
'venv',
|
|
25
|
+
'__pycache__',
|
|
26
|
+
'*.pyc',
|
|
27
|
+
'*.pyo',
|
|
28
|
+
'.pytest_cache',
|
|
29
|
+
'.mypy_cache',
|
|
30
|
+
'*.egg-info',
|
|
31
|
+
'dist',
|
|
32
|
+
'build',
|
|
33
|
+
|
|
34
|
+
# Node.js
|
|
35
|
+
'node_modules',
|
|
36
|
+
|
|
37
|
+
# IDE/Editor
|
|
38
|
+
'.idea',
|
|
39
|
+
'.vscode',
|
|
40
|
+
'*.swp',
|
|
41
|
+
|
|
42
|
+
# Xenfra
|
|
43
|
+
'.xenfra',
|
|
44
|
+
|
|
45
|
+
# Environment
|
|
46
|
+
'.env',
|
|
47
|
+
'.env.local',
|
|
48
|
+
'.env.*.local',
|
|
49
|
+
|
|
50
|
+
# OS
|
|
51
|
+
'.DS_Store',
|
|
52
|
+
'Thumbs.db',
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
# File extensions to always exclude
|
|
56
|
+
EXCLUDE_EXTENSIONS: Set[str] = {
|
|
57
|
+
'.pyc', '.pyo', '.so', '.dylib', '.dll',
|
|
58
|
+
'.exe', '.bin', '.obj', '.o',
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def should_exclude(path: Path, root: Path) -> bool:
|
|
63
|
+
"""Check if a path should be excluded from upload."""
|
|
64
|
+
rel_parts = path.relative_to(root).parts
|
|
65
|
+
|
|
66
|
+
# Check each part of the path against exclusion patterns
|
|
67
|
+
for part in rel_parts:
|
|
68
|
+
if part in EXCLUDE_PATTERNS:
|
|
69
|
+
return True
|
|
70
|
+
# Check wildcard patterns
|
|
71
|
+
for pattern in EXCLUDE_PATTERNS:
|
|
72
|
+
if pattern.startswith('*') and part.endswith(pattern[1:]):
|
|
73
|
+
return True
|
|
74
|
+
|
|
75
|
+
# Check file extension
|
|
76
|
+
if path.suffix.lower() in EXCLUDE_EXTENSIONS:
|
|
77
|
+
return True
|
|
78
|
+
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def compute_file_sha(filepath: str) -> str:
|
|
83
|
+
"""Compute SHA256 hash of a file's content."""
|
|
84
|
+
sha256 = hashlib.sha256()
|
|
85
|
+
with open(filepath, 'rb') as f:
|
|
86
|
+
for chunk in iter(lambda: f.read(8192), b''):
|
|
87
|
+
sha256.update(chunk)
|
|
88
|
+
return sha256.hexdigest()
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def scan_project_files(root: str = '.') -> List[Dict]:
|
|
92
|
+
"""
|
|
93
|
+
Scan project directory and return list of files with their metadata.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
List of dicts with keys: path, sha, size, abs_path
|
|
97
|
+
"""
|
|
98
|
+
files = []
|
|
99
|
+
root_path = Path(root).resolve()
|
|
100
|
+
|
|
101
|
+
for filepath in root_path.rglob('*'):
|
|
102
|
+
# Skip directories
|
|
103
|
+
if not filepath.is_file():
|
|
104
|
+
continue
|
|
105
|
+
|
|
106
|
+
# Check exclusions
|
|
107
|
+
if should_exclude(filepath, root_path):
|
|
108
|
+
continue
|
|
109
|
+
|
|
110
|
+
# Skip very large files (> 50MB)
|
|
111
|
+
file_size = filepath.stat().st_size
|
|
112
|
+
if file_size > 50 * 1024 * 1024:
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
# Normalize path to use forward slashes
|
|
116
|
+
rel_path = str(filepath.relative_to(root_path)).replace('\\', '/')
|
|
117
|
+
|
|
118
|
+
files.append({
|
|
119
|
+
'path': rel_path,
|
|
120
|
+
'sha': compute_file_sha(str(filepath)),
|
|
121
|
+
'size': file_size,
|
|
122
|
+
'abs_path': str(filepath),
|
|
123
|
+
})
|
|
124
|
+
|
|
125
|
+
return files
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def get_xenfra_dir(project_root: str = '.') -> Path:
|
|
129
|
+
"""Get or create the .xenfra directory."""
|
|
130
|
+
xenfra_dir = Path(project_root).resolve() / '.xenfra'
|
|
131
|
+
xenfra_dir.mkdir(exist_ok=True)
|
|
132
|
+
|
|
133
|
+
# Create cache subdirectory
|
|
134
|
+
cache_dir = xenfra_dir / 'cache'
|
|
135
|
+
cache_dir.mkdir(exist_ok=True)
|
|
136
|
+
|
|
137
|
+
return xenfra_dir
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def load_file_cache(project_root: str = '.') -> Dict[str, Dict]:
|
|
141
|
+
"""
|
|
142
|
+
Load cached file hashes from .xenfra/cache/file_hashes.json.
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
Dict mapping file paths to {sha, mtime, size}
|
|
146
|
+
"""
|
|
147
|
+
xenfra_dir = get_xenfra_dir(project_root)
|
|
148
|
+
cache_file = xenfra_dir / 'cache' / 'file_hashes.json'
|
|
149
|
+
|
|
150
|
+
if cache_file.exists():
|
|
151
|
+
try:
|
|
152
|
+
with open(cache_file, 'r') as f:
|
|
153
|
+
return json.load(f)
|
|
154
|
+
except (json.JSONDecodeError, IOError):
|
|
155
|
+
return {}
|
|
156
|
+
return {}
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def save_file_cache(cache: Dict[str, Dict], project_root: str = '.'):
|
|
160
|
+
"""Save file hashes to .xenfra/cache/file_hashes.json."""
|
|
161
|
+
xenfra_dir = get_xenfra_dir(project_root)
|
|
162
|
+
cache_file = xenfra_dir / 'cache' / 'file_hashes.json'
|
|
163
|
+
|
|
164
|
+
with open(cache_file, 'w') as f:
|
|
165
|
+
json.dump(cache, f, indent=2)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def scan_project_files_cached(root: str = '.') -> List[Dict]:
|
|
169
|
+
"""
|
|
170
|
+
Scan project files using local cache for unchanged files.
|
|
171
|
+
|
|
172
|
+
Only recomputes SHA for files whose mtime or size changed.
|
|
173
|
+
This is much faster for large projects with few changes.
|
|
174
|
+
"""
|
|
175
|
+
files = []
|
|
176
|
+
root_path = Path(root).resolve()
|
|
177
|
+
cache = load_file_cache(root)
|
|
178
|
+
new_cache = {}
|
|
179
|
+
|
|
180
|
+
for filepath in root_path.rglob('*'):
|
|
181
|
+
if not filepath.is_file():
|
|
182
|
+
continue
|
|
183
|
+
|
|
184
|
+
if should_exclude(filepath, root_path):
|
|
185
|
+
continue
|
|
186
|
+
|
|
187
|
+
file_size = filepath.stat().st_size
|
|
188
|
+
if file_size > 50 * 1024 * 1024:
|
|
189
|
+
continue
|
|
190
|
+
|
|
191
|
+
rel_path = str(filepath.relative_to(root_path)).replace('\\', '/')
|
|
192
|
+
mtime = filepath.stat().st_mtime
|
|
193
|
+
|
|
194
|
+
# Check if we can use cached value
|
|
195
|
+
cached = cache.get(rel_path)
|
|
196
|
+
if cached and cached.get('mtime') == mtime and cached.get('size') == file_size:
|
|
197
|
+
sha = cached['sha']
|
|
198
|
+
else:
|
|
199
|
+
# File changed, recompute SHA
|
|
200
|
+
sha = compute_file_sha(str(filepath))
|
|
201
|
+
|
|
202
|
+
# Update cache
|
|
203
|
+
new_cache[rel_path] = {
|
|
204
|
+
'sha': sha,
|
|
205
|
+
'mtime': mtime,
|
|
206
|
+
'size': file_size,
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
files.append({
|
|
210
|
+
'path': rel_path,
|
|
211
|
+
'sha': sha,
|
|
212
|
+
'size': file_size,
|
|
213
|
+
'abs_path': str(filepath),
|
|
214
|
+
})
|
|
215
|
+
|
|
216
|
+
# Save updated cache
|
|
217
|
+
save_file_cache(new_cache, root)
|
|
218
|
+
|
|
219
|
+
return files
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def load_project_config(project_root: str = '.') -> Optional[Dict]:
|
|
223
|
+
"""Load .xenfra/config.json if it exists."""
|
|
224
|
+
xenfra_dir = Path(project_root).resolve() / '.xenfra'
|
|
225
|
+
config_file = xenfra_dir / 'config.json'
|
|
226
|
+
|
|
227
|
+
if config_file.exists():
|
|
228
|
+
try:
|
|
229
|
+
with open(config_file, 'r') as f:
|
|
230
|
+
return json.load(f)
|
|
231
|
+
except (json.JSONDecodeError, IOError):
|
|
232
|
+
return None
|
|
233
|
+
return None
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def ensure_gitignore_ignored(project_root: str = '.'):
|
|
237
|
+
"""Ensure .xenfra/ is in the .gitignore file."""
|
|
238
|
+
root_path = Path(project_root).resolve()
|
|
239
|
+
gitignore_path = root_path / '.gitignore'
|
|
240
|
+
|
|
241
|
+
entry = '.xenfra/\n'
|
|
242
|
+
|
|
243
|
+
if not gitignore_path.exists():
|
|
244
|
+
try:
|
|
245
|
+
with open(gitignore_path, 'w') as f:
|
|
246
|
+
f.write(entry)
|
|
247
|
+
return True
|
|
248
|
+
except IOError:
|
|
249
|
+
return False
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
with open(gitignore_path, 'r') as f:
|
|
253
|
+
content = f.read()
|
|
254
|
+
|
|
255
|
+
if '.xenfra/' not in content and '.xenfra' not in content:
|
|
256
|
+
with open(gitignore_path, 'a') as f:
|
|
257
|
+
if not content.endswith('\n'):
|
|
258
|
+
f.write('\n')
|
|
259
|
+
f.write(entry)
|
|
260
|
+
return True
|
|
261
|
+
except IOError:
|
|
262
|
+
return False
|
|
263
|
+
|
|
264
|
+
return False
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def save_project_config(config: Dict, project_root: str = '.'):
|
|
268
|
+
"""Save project config to .xenfra/config.json."""
|
|
269
|
+
xenfra_dir = get_xenfra_dir(project_root)
|
|
270
|
+
config_file = xenfra_dir / 'config.json'
|
|
271
|
+
|
|
272
|
+
with open(config_file, 'w') as f:
|
|
273
|
+
json.dump(config, f, indent=2)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def update_last_deployment(deployment_id: str, url: str = None, project_root: str = '.'):
|
|
277
|
+
"""Update the last deployment info in project config."""
|
|
278
|
+
config = load_project_config(project_root) or {}
|
|
279
|
+
|
|
280
|
+
config['lastDeployment'] = {
|
|
281
|
+
'id': deployment_id,
|
|
282
|
+
'url': url,
|
|
283
|
+
'createdAt': datetime.utcnow().isoformat() + 'Z',
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
save_project_config(config, project_root)
|