xenfra 0.4.3__py3-none-any.whl → 0.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
xenfra/utils/file_sync.py CHANGED
@@ -1,286 +1,286 @@
1
- """
2
- File synchronization utilities for delta uploads.
3
-
4
- Provides functions to scan project files, compute SHA256 hashes,
5
- and manage local file caches for incremental deployments.
6
- """
7
-
8
- import hashlib
9
- import json
10
- import os
11
- from datetime import datetime
12
- from pathlib import Path
13
- from typing import Dict, List, Optional, Set
14
-
15
- # Patterns to exclude from deployment
16
- EXCLUDE_PATTERNS: Set[str] = {
17
- # Version control
18
- '.git',
19
- '.svn',
20
- '.hg',
21
-
22
- # Python
23
- '.venv',
24
- 'venv',
25
- '__pycache__',
26
- '*.pyc',
27
- '*.pyo',
28
- '.pytest_cache',
29
- '.mypy_cache',
30
- '*.egg-info',
31
- 'dist',
32
- 'build',
33
-
34
- # Node.js
35
- 'node_modules',
36
-
37
- # IDE/Editor
38
- '.idea',
39
- '.vscode',
40
- '*.swp',
41
-
42
- # Xenfra
43
- '.xenfra',
44
-
45
- # Environment
46
- '.env',
47
- '.env.local',
48
- '.env.*.local',
49
-
50
- # OS
51
- '.DS_Store',
52
- 'Thumbs.db',
53
- }
54
-
55
- # File extensions to always exclude
56
- EXCLUDE_EXTENSIONS: Set[str] = {
57
- '.pyc', '.pyo', '.so', '.dylib', '.dll',
58
- '.exe', '.bin', '.obj', '.o',
59
- }
60
-
61
-
62
- def should_exclude(path: Path, root: Path) -> bool:
63
- """Check if a path should be excluded from upload."""
64
- rel_parts = path.relative_to(root).parts
65
-
66
- # Check each part of the path against exclusion patterns
67
- for part in rel_parts:
68
- if part in EXCLUDE_PATTERNS:
69
- return True
70
- # Check wildcard patterns
71
- for pattern in EXCLUDE_PATTERNS:
72
- if pattern.startswith('*') and part.endswith(pattern[1:]):
73
- return True
74
-
75
- # Check file extension
76
- if path.suffix.lower() in EXCLUDE_EXTENSIONS:
77
- return True
78
-
79
- return False
80
-
81
-
82
- def compute_file_sha(filepath: str) -> str:
83
- """Compute SHA256 hash of a file's content."""
84
- sha256 = hashlib.sha256()
85
- with open(filepath, 'rb') as f:
86
- for chunk in iter(lambda: f.read(8192), b''):
87
- sha256.update(chunk)
88
- return sha256.hexdigest()
89
-
90
-
91
- def scan_project_files(root: str = '.') -> List[Dict]:
92
- """
93
- Scan project directory and return list of files with their metadata.
94
-
95
- Returns:
96
- List of dicts with keys: path, sha, size, abs_path
97
- """
98
- files = []
99
- root_path = Path(root).resolve()
100
-
101
- for filepath in root_path.rglob('*'):
102
- # Skip directories
103
- if not filepath.is_file():
104
- continue
105
-
106
- # Check exclusions
107
- if should_exclude(filepath, root_path):
108
- continue
109
-
110
- # Skip very large files (> 50MB)
111
- file_size = filepath.stat().st_size
112
- if file_size > 50 * 1024 * 1024:
113
- continue
114
-
115
- # Normalize path to use forward slashes
116
- rel_path = str(filepath.relative_to(root_path)).replace('\\', '/')
117
-
118
- files.append({
119
- 'path': rel_path,
120
- 'sha': compute_file_sha(str(filepath)),
121
- 'size': file_size,
122
- 'abs_path': str(filepath),
123
- })
124
-
125
- return files
126
-
127
-
128
- def get_xenfra_dir(project_root: str = '.') -> Path:
129
- """Get or create the .xenfra directory."""
130
- xenfra_dir = Path(project_root).resolve() / '.xenfra'
131
- xenfra_dir.mkdir(exist_ok=True)
132
-
133
- # Create cache subdirectory
134
- cache_dir = xenfra_dir / 'cache'
135
- cache_dir.mkdir(exist_ok=True)
136
-
137
- return xenfra_dir
138
-
139
-
140
- def load_file_cache(project_root: str = '.') -> Dict[str, Dict]:
141
- """
142
- Load cached file hashes from .xenfra/cache/file_hashes.json.
143
-
144
- Returns:
145
- Dict mapping file paths to {sha, mtime, size}
146
- """
147
- xenfra_dir = get_xenfra_dir(project_root)
148
- cache_file = xenfra_dir / 'cache' / 'file_hashes.json'
149
-
150
- if cache_file.exists():
151
- try:
152
- with open(cache_file, 'r') as f:
153
- return json.load(f)
154
- except (json.JSONDecodeError, IOError):
155
- return {}
156
- return {}
157
-
158
-
159
- def save_file_cache(cache: Dict[str, Dict], project_root: str = '.'):
160
- """Save file hashes to .xenfra/cache/file_hashes.json."""
161
- xenfra_dir = get_xenfra_dir(project_root)
162
- cache_file = xenfra_dir / 'cache' / 'file_hashes.json'
163
-
164
- with open(cache_file, 'w') as f:
165
- json.dump(cache, f, indent=2)
166
-
167
-
168
- def scan_project_files_cached(root: str = '.') -> List[Dict]:
169
- """
170
- Scan project files using local cache for unchanged files.
171
-
172
- Only recomputes SHA for files whose mtime or size changed.
173
- This is much faster for large projects with few changes.
174
- """
175
- files = []
176
- root_path = Path(root).resolve()
177
- cache = load_file_cache(root)
178
- new_cache = {}
179
-
180
- for filepath in root_path.rglob('*'):
181
- if not filepath.is_file():
182
- continue
183
-
184
- if should_exclude(filepath, root_path):
185
- continue
186
-
187
- file_size = filepath.stat().st_size
188
- if file_size > 50 * 1024 * 1024:
189
- continue
190
-
191
- rel_path = str(filepath.relative_to(root_path)).replace('\\', '/')
192
- mtime = filepath.stat().st_mtime
193
-
194
- # Check if we can use cached value
195
- cached = cache.get(rel_path)
196
- if cached and cached.get('mtime') == mtime and cached.get('size') == file_size:
197
- sha = cached['sha']
198
- else:
199
- # File changed, recompute SHA
200
- sha = compute_file_sha(str(filepath))
201
-
202
- # Update cache
203
- new_cache[rel_path] = {
204
- 'sha': sha,
205
- 'mtime': mtime,
206
- 'size': file_size,
207
- }
208
-
209
- files.append({
210
- 'path': rel_path,
211
- 'sha': sha,
212
- 'size': file_size,
213
- 'abs_path': str(filepath),
214
- })
215
-
216
- # Save updated cache
217
- save_file_cache(new_cache, root)
218
-
219
- return files
220
-
221
-
222
- def load_project_config(project_root: str = '.') -> Optional[Dict]:
223
- """Load .xenfra/config.json if it exists."""
224
- xenfra_dir = Path(project_root).resolve() / '.xenfra'
225
- config_file = xenfra_dir / 'config.json'
226
-
227
- if config_file.exists():
228
- try:
229
- with open(config_file, 'r') as f:
230
- return json.load(f)
231
- except (json.JSONDecodeError, IOError):
232
- return None
233
- return None
234
-
235
-
236
- def ensure_gitignore_ignored(project_root: str = '.'):
237
- """Ensure .xenfra/ is in the .gitignore file."""
238
- root_path = Path(project_root).resolve()
239
- gitignore_path = root_path / '.gitignore'
240
-
241
- entry = '.xenfra/\n'
242
-
243
- if not gitignore_path.exists():
244
- try:
245
- with open(gitignore_path, 'w') as f:
246
- f.write(entry)
247
- return True
248
- except IOError:
249
- return False
250
-
251
- try:
252
- with open(gitignore_path, 'r') as f:
253
- content = f.read()
254
-
255
- if '.xenfra/' not in content and '.xenfra' not in content:
256
- with open(gitignore_path, 'a') as f:
257
- if not content.endswith('\n'):
258
- f.write('\n')
259
- f.write(entry)
260
- return True
261
- except IOError:
262
- return False
263
-
264
- return False
265
-
266
-
267
- def save_project_config(config: Dict, project_root: str = '.'):
268
- """Save project config to .xenfra/config.json."""
269
- xenfra_dir = get_xenfra_dir(project_root)
270
- config_file = xenfra_dir / 'config.json'
271
-
272
- with open(config_file, 'w') as f:
273
- json.dump(config, f, indent=2)
274
-
275
-
276
- def update_last_deployment(deployment_id: str, url: str = None, project_root: str = '.'):
277
- """Update the last deployment info in project config."""
278
- config = load_project_config(project_root) or {}
279
-
280
- config['lastDeployment'] = {
281
- 'id': deployment_id,
282
- 'url': url,
283
- 'createdAt': datetime.utcnow().isoformat() + 'Z',
284
- }
285
-
286
- save_project_config(config, project_root)
1
+ """
2
+ File synchronization utilities for delta uploads.
3
+
4
+ Provides functions to scan project files, compute SHA256 hashes,
5
+ and manage local file caches for incremental deployments.
6
+ """
7
+
8
+ import hashlib
9
+ import json
10
+ import os
11
+ from datetime import datetime
12
+ from pathlib import Path
13
+ from typing import Dict, List, Optional, Set
14
+
15
+ # Patterns to exclude from deployment
16
+ EXCLUDE_PATTERNS: Set[str] = {
17
+ # Version control
18
+ '.git',
19
+ '.svn',
20
+ '.hg',
21
+
22
+ # Python
23
+ '.venv',
24
+ 'venv',
25
+ '__pycache__',
26
+ '*.pyc',
27
+ '*.pyo',
28
+ '.pytest_cache',
29
+ '.mypy_cache',
30
+ '*.egg-info',
31
+ 'dist',
32
+ 'build',
33
+
34
+ # Node.js
35
+ 'node_modules',
36
+
37
+ # IDE/Editor
38
+ '.idea',
39
+ '.vscode',
40
+ '*.swp',
41
+
42
+ # Xenfra
43
+ '.xenfra',
44
+
45
+ # Environment
46
+ '.env',
47
+ '.env.local',
48
+ '.env.*.local',
49
+
50
+ # OS
51
+ '.DS_Store',
52
+ 'Thumbs.db',
53
+ }
54
+
55
+ # File extensions to always exclude
56
+ EXCLUDE_EXTENSIONS: Set[str] = {
57
+ '.pyc', '.pyo', '.so', '.dylib', '.dll',
58
+ '.exe', '.bin', '.obj', '.o',
59
+ }
60
+
61
+
62
+ def should_exclude(path: Path, root: Path) -> bool:
63
+ """Check if a path should be excluded from upload."""
64
+ rel_parts = path.relative_to(root).parts
65
+
66
+ # Check each part of the path against exclusion patterns
67
+ for part in rel_parts:
68
+ if part in EXCLUDE_PATTERNS:
69
+ return True
70
+ # Check wildcard patterns
71
+ for pattern in EXCLUDE_PATTERNS:
72
+ if pattern.startswith('*') and part.endswith(pattern[1:]):
73
+ return True
74
+
75
+ # Check file extension
76
+ if path.suffix.lower() in EXCLUDE_EXTENSIONS:
77
+ return True
78
+
79
+ return False
80
+
81
+
82
+ def compute_file_sha(filepath: str) -> str:
83
+ """Compute SHA256 hash of a file's content."""
84
+ sha256 = hashlib.sha256()
85
+ with open(filepath, 'rb') as f:
86
+ for chunk in iter(lambda: f.read(8192), b''):
87
+ sha256.update(chunk)
88
+ return sha256.hexdigest()
89
+
90
+
91
+ def scan_project_files(root: str = '.') -> List[Dict]:
92
+ """
93
+ Scan project directory and return list of files with their metadata.
94
+
95
+ Returns:
96
+ List of dicts with keys: path, sha, size, abs_path
97
+ """
98
+ files = []
99
+ root_path = Path(root).resolve()
100
+
101
+ for filepath in root_path.rglob('*'):
102
+ # Skip directories
103
+ if not filepath.is_file():
104
+ continue
105
+
106
+ # Check exclusions
107
+ if should_exclude(filepath, root_path):
108
+ continue
109
+
110
+ # Skip very large files (> 50MB)
111
+ file_size = filepath.stat().st_size
112
+ if file_size > 50 * 1024 * 1024:
113
+ continue
114
+
115
+ # Normalize path to use forward slashes
116
+ rel_path = str(filepath.relative_to(root_path)).replace('\\', '/')
117
+
118
+ files.append({
119
+ 'path': rel_path,
120
+ 'sha': compute_file_sha(str(filepath)),
121
+ 'size': file_size,
122
+ 'abs_path': str(filepath),
123
+ })
124
+
125
+ return files
126
+
127
+
128
+ def get_xenfra_dir(project_root: str = '.') -> Path:
129
+ """Get or create the .xenfra directory."""
130
+ xenfra_dir = Path(project_root).resolve() / '.xenfra'
131
+ xenfra_dir.mkdir(exist_ok=True)
132
+
133
+ # Create cache subdirectory
134
+ cache_dir = xenfra_dir / 'cache'
135
+ cache_dir.mkdir(exist_ok=True)
136
+
137
+ return xenfra_dir
138
+
139
+
140
+ def load_file_cache(project_root: str = '.') -> Dict[str, Dict]:
141
+ """
142
+ Load cached file hashes from .xenfra/cache/file_hashes.json.
143
+
144
+ Returns:
145
+ Dict mapping file paths to {sha, mtime, size}
146
+ """
147
+ xenfra_dir = get_xenfra_dir(project_root)
148
+ cache_file = xenfra_dir / 'cache' / 'file_hashes.json'
149
+
150
+ if cache_file.exists():
151
+ try:
152
+ with open(cache_file, 'r') as f:
153
+ return json.load(f)
154
+ except (json.JSONDecodeError, IOError):
155
+ return {}
156
+ return {}
157
+
158
+
159
+ def save_file_cache(cache: Dict[str, Dict], project_root: str = '.'):
160
+ """Save file hashes to .xenfra/cache/file_hashes.json."""
161
+ xenfra_dir = get_xenfra_dir(project_root)
162
+ cache_file = xenfra_dir / 'cache' / 'file_hashes.json'
163
+
164
+ with open(cache_file, 'w') as f:
165
+ json.dump(cache, f, indent=2)
166
+
167
+
168
+ def scan_project_files_cached(root: str = '.') -> List[Dict]:
169
+ """
170
+ Scan project files using local cache for unchanged files.
171
+
172
+ Only recomputes SHA for files whose mtime or size changed.
173
+ This is much faster for large projects with few changes.
174
+ """
175
+ files = []
176
+ root_path = Path(root).resolve()
177
+ cache = load_file_cache(root)
178
+ new_cache = {}
179
+
180
+ for filepath in root_path.rglob('*'):
181
+ if not filepath.is_file():
182
+ continue
183
+
184
+ if should_exclude(filepath, root_path):
185
+ continue
186
+
187
+ file_size = filepath.stat().st_size
188
+ if file_size > 50 * 1024 * 1024:
189
+ continue
190
+
191
+ rel_path = str(filepath.relative_to(root_path)).replace('\\', '/')
192
+ mtime = filepath.stat().st_mtime
193
+
194
+ # Check if we can use cached value
195
+ cached = cache.get(rel_path)
196
+ if cached and cached.get('mtime') == mtime and cached.get('size') == file_size:
197
+ sha = cached['sha']
198
+ else:
199
+ # File changed, recompute SHA
200
+ sha = compute_file_sha(str(filepath))
201
+
202
+ # Update cache
203
+ new_cache[rel_path] = {
204
+ 'sha': sha,
205
+ 'mtime': mtime,
206
+ 'size': file_size,
207
+ }
208
+
209
+ files.append({
210
+ 'path': rel_path,
211
+ 'sha': sha,
212
+ 'size': file_size,
213
+ 'abs_path': str(filepath),
214
+ })
215
+
216
+ # Save updated cache
217
+ save_file_cache(new_cache, root)
218
+
219
+ return files
220
+
221
+
222
+ def load_project_config(project_root: str = '.') -> Optional[Dict]:
223
+ """Load .xenfra/config.json if it exists."""
224
+ xenfra_dir = Path(project_root).resolve() / '.xenfra'
225
+ config_file = xenfra_dir / 'config.json'
226
+
227
+ if config_file.exists():
228
+ try:
229
+ with open(config_file, 'r') as f:
230
+ return json.load(f)
231
+ except (json.JSONDecodeError, IOError):
232
+ return None
233
+ return None
234
+
235
+
236
+ def ensure_gitignore_ignored(project_root: str = '.'):
237
+ """Ensure .xenfra/ is in the .gitignore file."""
238
+ root_path = Path(project_root).resolve()
239
+ gitignore_path = root_path / '.gitignore'
240
+
241
+ entry = '.xenfra/\n'
242
+
243
+ if not gitignore_path.exists():
244
+ try:
245
+ with open(gitignore_path, 'w') as f:
246
+ f.write(entry)
247
+ return True
248
+ except IOError:
249
+ return False
250
+
251
+ try:
252
+ with open(gitignore_path, 'r') as f:
253
+ content = f.read()
254
+
255
+ if '.xenfra/' not in content and '.xenfra' not in content:
256
+ with open(gitignore_path, 'a') as f:
257
+ if not content.endswith('\n'):
258
+ f.write('\n')
259
+ f.write(entry)
260
+ return True
261
+ except IOError:
262
+ return False
263
+
264
+ return False
265
+
266
+
267
+ def save_project_config(config: Dict, project_root: str = '.'):
268
+ """Save project config to .xenfra/config.json."""
269
+ xenfra_dir = get_xenfra_dir(project_root)
270
+ config_file = xenfra_dir / 'config.json'
271
+
272
+ with open(config_file, 'w') as f:
273
+ json.dump(config, f, indent=2)
274
+
275
+
276
+ def update_last_deployment(deployment_id: str, url: str = None, project_root: str = '.'):
277
+ """Update the last deployment info in project config."""
278
+ config = load_project_config(project_root) or {}
279
+
280
+ config['lastDeployment'] = {
281
+ 'id': deployment_id,
282
+ 'url': url,
283
+ 'createdAt': datetime.utcnow().isoformat() + 'Z',
284
+ }
285
+
286
+ save_project_config(config, project_root)