fbuild 1.2.8__py3-none-any.whl → 1.2.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fbuild/__init__.py +5 -1
- fbuild/build/configurable_compiler.py +49 -6
- fbuild/build/configurable_linker.py +14 -9
- fbuild/build/orchestrator_esp32.py +6 -3
- fbuild/build/orchestrator_rp2040.py +6 -2
- fbuild/cli.py +300 -5
- fbuild/config/ini_parser.py +13 -1
- fbuild/daemon/__init__.py +11 -0
- fbuild/daemon/async_client.py +5 -4
- fbuild/daemon/async_client_lib.py +1543 -0
- fbuild/daemon/async_protocol.py +825 -0
- fbuild/daemon/async_server.py +2100 -0
- fbuild/daemon/client.py +425 -13
- fbuild/daemon/configuration_lock.py +13 -13
- fbuild/daemon/connection.py +508 -0
- fbuild/daemon/connection_registry.py +579 -0
- fbuild/daemon/daemon.py +517 -164
- fbuild/daemon/daemon_context.py +72 -1
- fbuild/daemon/device_discovery.py +477 -0
- fbuild/daemon/device_manager.py +821 -0
- fbuild/daemon/error_collector.py +263 -263
- fbuild/daemon/file_cache.py +332 -332
- fbuild/daemon/firmware_ledger.py +46 -123
- fbuild/daemon/lock_manager.py +508 -508
- fbuild/daemon/messages.py +431 -0
- fbuild/daemon/operation_registry.py +288 -288
- fbuild/daemon/processors/build_processor.py +34 -1
- fbuild/daemon/processors/deploy_processor.py +1 -3
- fbuild/daemon/processors/locking_processor.py +7 -7
- fbuild/daemon/request_processor.py +457 -457
- fbuild/daemon/shared_serial.py +7 -7
- fbuild/daemon/status_manager.py +238 -238
- fbuild/daemon/subprocess_manager.py +316 -316
- fbuild/deploy/docker_utils.py +182 -2
- fbuild/deploy/monitor.py +1 -1
- fbuild/deploy/qemu_runner.py +71 -13
- fbuild/ledger/board_ledger.py +46 -122
- fbuild/output.py +238 -2
- fbuild/packages/library_compiler.py +15 -5
- fbuild/packages/library_manager.py +12 -6
- fbuild-1.2.15.dist-info/METADATA +569 -0
- {fbuild-1.2.8.dist-info → fbuild-1.2.15.dist-info}/RECORD +46 -39
- fbuild-1.2.8.dist-info/METADATA +0 -468
- {fbuild-1.2.8.dist-info → fbuild-1.2.15.dist-info}/WHEEL +0 -0
- {fbuild-1.2.8.dist-info → fbuild-1.2.15.dist-info}/entry_points.txt +0 -0
- {fbuild-1.2.8.dist-info → fbuild-1.2.15.dist-info}/licenses/LICENSE +0 -0
- {fbuild-1.2.8.dist-info → fbuild-1.2.15.dist-info}/top_level.txt +0 -0
fbuild/daemon/file_cache.py
CHANGED
|
@@ -1,332 +1,332 @@
|
|
|
1
|
-
"""File-level incremental compilation cache.
|
|
2
|
-
|
|
3
|
-
This module tracks source file changes for incremental compilation, allowing
|
|
4
|
-
the build system to skip recompilation of unchanged files.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
from __future__ import annotations
|
|
8
|
-
|
|
9
|
-
import hashlib
|
|
10
|
-
import json
|
|
11
|
-
import logging
|
|
12
|
-
import threading
|
|
13
|
-
from pathlib import Path
|
|
14
|
-
from typing import Optional
|
|
15
|
-
|
|
16
|
-
logger = logging.getLogger(__name__)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class FileCache:
|
|
20
|
-
"""Tracks source file changes for incremental compilation.
|
|
21
|
-
|
|
22
|
-
Uses SHA256 hashing to detect file changes and maintains a persistent cache
|
|
23
|
-
on disk. Thread-safe for concurrent use.
|
|
24
|
-
"""
|
|
25
|
-
|
|
26
|
-
def __init__(self, cache_file: Path):
|
|
27
|
-
"""Initialize file cache.
|
|
28
|
-
|
|
29
|
-
Args:
|
|
30
|
-
cache_file: Path to cache file (JSON format)
|
|
31
|
-
"""
|
|
32
|
-
self.cache_file = cache_file
|
|
33
|
-
self.cache: dict[str, str] = {}
|
|
34
|
-
self.lock = threading.Lock()
|
|
35
|
-
self._load_cache()
|
|
36
|
-
logger.info(f"FileCache initialized with {len(self.cache)} cached entries")
|
|
37
|
-
|
|
38
|
-
def _load_cache(self):
|
|
39
|
-
"""Load cache from disk."""
|
|
40
|
-
if not self.cache_file.exists():
|
|
41
|
-
return
|
|
42
|
-
|
|
43
|
-
try:
|
|
44
|
-
with open(self.cache_file, "r", encoding="utf-8") as f:
|
|
45
|
-
data = json.load(f)
|
|
46
|
-
logger.debug(f"Parsed {len(data)} cache entries from JSON")
|
|
47
|
-
self.cache = data
|
|
48
|
-
logger.info(f"Loaded cache with {len(self.cache)} entries from {self.cache_file}")
|
|
49
|
-
if len(self.cache) > 0:
|
|
50
|
-
logger.debug(f"Sample cache keys: {list(self.cache.keys())[:3]}")
|
|
51
|
-
except json.JSONDecodeError as e:
|
|
52
|
-
logger.warning(f"Failed to parse cache JSON from {self.cache_file}: {e}")
|
|
53
|
-
self.cache = {}
|
|
54
|
-
except IOError as e:
|
|
55
|
-
logger.warning(f"Failed to read cache file {self.cache_file}: {e}")
|
|
56
|
-
self.cache = {}
|
|
57
|
-
|
|
58
|
-
def _save_cache(self):
|
|
59
|
-
"""Save cache to disk atomically.
|
|
60
|
-
|
|
61
|
-
Uses atomic write pattern (temp file + rename) to prevent corruption.
|
|
62
|
-
"""
|
|
63
|
-
logger.debug(f"Cache entries to save: {len(self.cache)}")
|
|
64
|
-
|
|
65
|
-
try:
|
|
66
|
-
# Ensure cache directory exists
|
|
67
|
-
self.cache_file.parent.mkdir(parents=True, exist_ok=True)
|
|
68
|
-
|
|
69
|
-
# Write to temporary file
|
|
70
|
-
temp_file = self.cache_file.with_suffix(".tmp")
|
|
71
|
-
with open(temp_file, "w", encoding="utf-8") as f:
|
|
72
|
-
json.dump(self.cache, f, indent=2)
|
|
73
|
-
|
|
74
|
-
# Atomic rename
|
|
75
|
-
temp_file.replace(self.cache_file)
|
|
76
|
-
|
|
77
|
-
logger.debug(f"Saved cache with {len(self.cache)} entries to {self.cache_file}")
|
|
78
|
-
|
|
79
|
-
except IOError as e:
|
|
80
|
-
logger.error(f"Failed to save cache to {self.cache_file}: {e}")
|
|
81
|
-
logger.debug(f"Cache save error details: {type(e).__name__}: {e}")
|
|
82
|
-
|
|
83
|
-
def get_file_hash(self, file_path: Path) -> str:
|
|
84
|
-
"""Calculate SHA256 hash of file contents.
|
|
85
|
-
|
|
86
|
-
Args:
|
|
87
|
-
file_path: Path to file
|
|
88
|
-
|
|
89
|
-
Returns:
|
|
90
|
-
SHA256 hash as hex string
|
|
91
|
-
|
|
92
|
-
Raises:
|
|
93
|
-
FileNotFoundError: If file does not exist
|
|
94
|
-
IOError: If file cannot be read
|
|
95
|
-
"""
|
|
96
|
-
sha256 = hashlib.sha256()
|
|
97
|
-
bytes_read = 0
|
|
98
|
-
|
|
99
|
-
try:
|
|
100
|
-
with open(file_path, "rb") as f:
|
|
101
|
-
# Read in chunks for memory efficiency
|
|
102
|
-
for chunk in iter(lambda: f.read(8192), b""):
|
|
103
|
-
sha256.update(chunk)
|
|
104
|
-
bytes_read += len(chunk)
|
|
105
|
-
|
|
106
|
-
hash_value = sha256.hexdigest()
|
|
107
|
-
logger.debug(f"File hash computed: {hash_value[:16]}... (read {bytes_read} bytes)")
|
|
108
|
-
return hash_value
|
|
109
|
-
|
|
110
|
-
except FileNotFoundError:
|
|
111
|
-
logger.error(f"File not found for hashing: {file_path}")
|
|
112
|
-
raise
|
|
113
|
-
except IOError as e:
|
|
114
|
-
logger.error(f"Failed to read file for hashing: {file_path}: {e}")
|
|
115
|
-
raise
|
|
116
|
-
|
|
117
|
-
def has_changed(self, file_path: Path) -> bool:
|
|
118
|
-
"""Check if file has changed since last cache update.
|
|
119
|
-
|
|
120
|
-
Args:
|
|
121
|
-
file_path: Path to file
|
|
122
|
-
|
|
123
|
-
Returns:
|
|
124
|
-
True if file has changed or not in cache, False otherwise
|
|
125
|
-
"""
|
|
126
|
-
|
|
127
|
-
if not file_path.exists():
|
|
128
|
-
logger.warning(f"File does not exist: {file_path}")
|
|
129
|
-
return True
|
|
130
|
-
|
|
131
|
-
file_key = str(file_path.absolute())
|
|
132
|
-
|
|
133
|
-
with self.lock:
|
|
134
|
-
cached_hash = self.cache.get(file_key)
|
|
135
|
-
|
|
136
|
-
# File not in cache - consider it changed
|
|
137
|
-
if cached_hash is None:
|
|
138
|
-
return True
|
|
139
|
-
|
|
140
|
-
try:
|
|
141
|
-
current_hash = self.get_file_hash(file_path)
|
|
142
|
-
changed = current_hash != cached_hash
|
|
143
|
-
|
|
144
|
-
if changed:
|
|
145
|
-
logger.debug(f"File changed: {file_path} (cached: {cached_hash[:16]}..., current: {current_hash[:16]}...)")
|
|
146
|
-
else:
|
|
147
|
-
logger.debug(f"File unchanged: {file_path}")
|
|
148
|
-
|
|
149
|
-
return changed
|
|
150
|
-
|
|
151
|
-
except (FileNotFoundError, IOError):
|
|
152
|
-
# If we can't hash the file, assume it changed
|
|
153
|
-
return True
|
|
154
|
-
|
|
155
|
-
def update(self, file_path: Path):
|
|
156
|
-
"""Update cache with current file hash.
|
|
157
|
-
|
|
158
|
-
Args:
|
|
159
|
-
file_path: Path to file
|
|
160
|
-
"""
|
|
161
|
-
|
|
162
|
-
if not file_path.exists():
|
|
163
|
-
logger.warning(f"Cannot update cache for non-existent file: {file_path}")
|
|
164
|
-
return
|
|
165
|
-
|
|
166
|
-
try:
|
|
167
|
-
file_key = str(file_path.absolute())
|
|
168
|
-
current_hash = self.get_file_hash(file_path)
|
|
169
|
-
|
|
170
|
-
with self.lock:
|
|
171
|
-
was_cached = file_key in self.cache
|
|
172
|
-
self.cache[file_key] = current_hash
|
|
173
|
-
cache_size = len(self.cache)
|
|
174
|
-
|
|
175
|
-
logger.debug(f"Cache entry {'updated' if was_cached else 'added'}: {file_path} (total entries: {cache_size})")
|
|
176
|
-
|
|
177
|
-
with self.lock:
|
|
178
|
-
self._save_cache()
|
|
179
|
-
|
|
180
|
-
except (FileNotFoundError, IOError) as e:
|
|
181
|
-
logger.error(f"Failed to update cache for {file_path}: {e}")
|
|
182
|
-
|
|
183
|
-
def update_batch(self, file_paths: list[Path]):
|
|
184
|
-
"""Update cache for multiple files efficiently.
|
|
185
|
-
|
|
186
|
-
Args:
|
|
187
|
-
file_paths: List of file paths to update
|
|
188
|
-
"""
|
|
189
|
-
logger.info(f"Batch cache update starting: {len(file_paths)} files")
|
|
190
|
-
updated_count = 0
|
|
191
|
-
skipped_count = 0
|
|
192
|
-
failed_count = 0
|
|
193
|
-
|
|
194
|
-
for file_path in file_paths:
|
|
195
|
-
if not file_path.exists():
|
|
196
|
-
skipped_count += 1
|
|
197
|
-
continue
|
|
198
|
-
|
|
199
|
-
try:
|
|
200
|
-
file_key = str(file_path.absolute())
|
|
201
|
-
current_hash = self.get_file_hash(file_path)
|
|
202
|
-
|
|
203
|
-
with self.lock:
|
|
204
|
-
self.cache[file_key] = current_hash
|
|
205
|
-
updated_count += 1
|
|
206
|
-
|
|
207
|
-
except (FileNotFoundError, IOError) as e:
|
|
208
|
-
logger.warning(f"Failed to update cache for {file_path}: {e}")
|
|
209
|
-
failed_count += 1
|
|
210
|
-
|
|
211
|
-
# Save once after all updates
|
|
212
|
-
logger.debug(f"Saving batch cache update to disk ({updated_count} files updated)")
|
|
213
|
-
with self.lock:
|
|
214
|
-
self._save_cache()
|
|
215
|
-
|
|
216
|
-
logger.info(f"Batch cache update complete: {updated_count}/{len(file_paths)} files updated, {skipped_count} skipped, {failed_count} failed")
|
|
217
|
-
logger.debug(f"Total cache entries after batch update: {len(self.cache)}")
|
|
218
|
-
|
|
219
|
-
def needs_recompilation(self, source_path: Path, object_path: Path) -> bool:
|
|
220
|
-
"""Check if source file needs recompilation.
|
|
221
|
-
|
|
222
|
-
A file needs recompilation if:
|
|
223
|
-
1. Object file doesn't exist
|
|
224
|
-
2. Source file has changed (cache check)
|
|
225
|
-
3. Object file is older than source file (mtime check)
|
|
226
|
-
|
|
227
|
-
Args:
|
|
228
|
-
source_path: Path to source file (.c, .cpp, etc.)
|
|
229
|
-
object_path: Path to object file (.o)
|
|
230
|
-
|
|
231
|
-
Returns:
|
|
232
|
-
True if recompilation needed, False otherwise
|
|
233
|
-
"""
|
|
234
|
-
|
|
235
|
-
# Object doesn't exist - must compile
|
|
236
|
-
if not object_path.exists():
|
|
237
|
-
logger.debug("Reason: object file does not exist")
|
|
238
|
-
return True
|
|
239
|
-
|
|
240
|
-
# Source changed - must recompile
|
|
241
|
-
if self.has_changed(source_path):
|
|
242
|
-
logger.debug("Reason: source file hash differs from cache")
|
|
243
|
-
return True
|
|
244
|
-
|
|
245
|
-
# Object older than source - must recompile
|
|
246
|
-
try:
|
|
247
|
-
source_mtime = source_path.stat().st_mtime
|
|
248
|
-
object_mtime = object_path.stat().st_mtime
|
|
249
|
-
|
|
250
|
-
if object_mtime < source_mtime:
|
|
251
|
-
logger.debug(f"Reason: object mtime ({object_mtime}) < source mtime ({source_mtime})")
|
|
252
|
-
return True
|
|
253
|
-
|
|
254
|
-
except OSError as e:
|
|
255
|
-
logger.warning(f"Failed to check file times: {e} - assuming recompilation needed")
|
|
256
|
-
logger.debug(f"Reason: stat() failed with {type(e).__name__}")
|
|
257
|
-
return True
|
|
258
|
-
|
|
259
|
-
# No recompilation needed
|
|
260
|
-
logger.debug(f"Skipping unchanged file: {source_path} (all checks passed)")
|
|
261
|
-
return False
|
|
262
|
-
|
|
263
|
-
def invalidate(self, file_path: Path):
|
|
264
|
-
"""Remove file from cache, forcing recompilation on next build.
|
|
265
|
-
|
|
266
|
-
Args:
|
|
267
|
-
file_path: Path to file
|
|
268
|
-
"""
|
|
269
|
-
file_key = str(file_path.absolute())
|
|
270
|
-
|
|
271
|
-
with self.lock:
|
|
272
|
-
if file_key in self.cache:
|
|
273
|
-
del self.cache[file_key]
|
|
274
|
-
self._save_cache()
|
|
275
|
-
logger.info(f"Invalidated cache entry: {file_path} (total entries: {len(self.cache)})")
|
|
276
|
-
else:
|
|
277
|
-
logger.debug(f"Cache entry not found: {file_path}")
|
|
278
|
-
|
|
279
|
-
def clear(self):
|
|
280
|
-
"""Clear entire cache."""
|
|
281
|
-
logger.info(f"Clearing entire cache (current entries: {len(self.cache)})")
|
|
282
|
-
with self.lock:
|
|
283
|
-
old_size = len(self.cache)
|
|
284
|
-
self.cache.clear()
|
|
285
|
-
self._save_cache()
|
|
286
|
-
logger.info(f"Cache cleared: removed {old_size} entries")
|
|
287
|
-
|
|
288
|
-
def get_statistics(self) -> dict[str, int]:
|
|
289
|
-
"""Get cache statistics.
|
|
290
|
-
|
|
291
|
-
Returns:
|
|
292
|
-
Dictionary with cache statistics
|
|
293
|
-
"""
|
|
294
|
-
with self.lock:
|
|
295
|
-
stats = {
|
|
296
|
-
"total_entries": len(self.cache),
|
|
297
|
-
}
|
|
298
|
-
return stats
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
# Global file cache instance (initialized by daemon)
|
|
302
|
-
_file_cache: Optional[FileCache] = None
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
def get_file_cache() -> FileCache:
|
|
306
|
-
"""Get global file cache instance.
|
|
307
|
-
|
|
308
|
-
Returns:
|
|
309
|
-
Global FileCache instance
|
|
310
|
-
|
|
311
|
-
Raises:
|
|
312
|
-
RuntimeError: If file cache not initialized
|
|
313
|
-
"""
|
|
314
|
-
global _file_cache
|
|
315
|
-
if _file_cache is None:
|
|
316
|
-
raise RuntimeError("FileCache not initialized. Call init_file_cache() first.")
|
|
317
|
-
return _file_cache
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
def init_file_cache(cache_file: Path) -> FileCache:
|
|
321
|
-
"""Initialize global file cache.
|
|
322
|
-
|
|
323
|
-
Args:
|
|
324
|
-
cache_file: Path to cache file
|
|
325
|
-
|
|
326
|
-
Returns:
|
|
327
|
-
Initialized FileCache instance
|
|
328
|
-
"""
|
|
329
|
-
global _file_cache
|
|
330
|
-
_file_cache = FileCache(cache_file=cache_file)
|
|
331
|
-
logger.info(f"FileCache initialized with cache file: {cache_file}")
|
|
332
|
-
return _file_cache
|
|
1
|
+
"""File-level incremental compilation cache.
|
|
2
|
+
|
|
3
|
+
This module tracks source file changes for incremental compilation, allowing
|
|
4
|
+
the build system to skip recompilation of unchanged files.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import hashlib
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
import threading
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Optional
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class FileCache:
|
|
20
|
+
"""Tracks source file changes for incremental compilation.
|
|
21
|
+
|
|
22
|
+
Uses SHA256 hashing to detect file changes and maintains a persistent cache
|
|
23
|
+
on disk. Thread-safe for concurrent use.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, cache_file: Path):
|
|
27
|
+
"""Initialize file cache.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
cache_file: Path to cache file (JSON format)
|
|
31
|
+
"""
|
|
32
|
+
self.cache_file = cache_file
|
|
33
|
+
self.cache: dict[str, str] = {}
|
|
34
|
+
self.lock = threading.Lock()
|
|
35
|
+
self._load_cache()
|
|
36
|
+
logger.info(f"FileCache initialized with {len(self.cache)} cached entries")
|
|
37
|
+
|
|
38
|
+
def _load_cache(self):
|
|
39
|
+
"""Load cache from disk."""
|
|
40
|
+
if not self.cache_file.exists():
|
|
41
|
+
return
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
with open(self.cache_file, "r", encoding="utf-8") as f:
|
|
45
|
+
data = json.load(f)
|
|
46
|
+
logger.debug(f"Parsed {len(data)} cache entries from JSON")
|
|
47
|
+
self.cache = data
|
|
48
|
+
logger.info(f"Loaded cache with {len(self.cache)} entries from {self.cache_file}")
|
|
49
|
+
if len(self.cache) > 0:
|
|
50
|
+
logger.debug(f"Sample cache keys: {list(self.cache.keys())[:3]}")
|
|
51
|
+
except json.JSONDecodeError as e:
|
|
52
|
+
logger.warning(f"Failed to parse cache JSON from {self.cache_file}: {e}")
|
|
53
|
+
self.cache = {}
|
|
54
|
+
except IOError as e:
|
|
55
|
+
logger.warning(f"Failed to read cache file {self.cache_file}: {e}")
|
|
56
|
+
self.cache = {}
|
|
57
|
+
|
|
58
|
+
def _save_cache(self):
|
|
59
|
+
"""Save cache to disk atomically.
|
|
60
|
+
|
|
61
|
+
Uses atomic write pattern (temp file + rename) to prevent corruption.
|
|
62
|
+
"""
|
|
63
|
+
logger.debug(f"Cache entries to save: {len(self.cache)}")
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
# Ensure cache directory exists
|
|
67
|
+
self.cache_file.parent.mkdir(parents=True, exist_ok=True)
|
|
68
|
+
|
|
69
|
+
# Write to temporary file
|
|
70
|
+
temp_file = self.cache_file.with_suffix(".tmp")
|
|
71
|
+
with open(temp_file, "w", encoding="utf-8") as f:
|
|
72
|
+
json.dump(self.cache, f, indent=2)
|
|
73
|
+
|
|
74
|
+
# Atomic rename
|
|
75
|
+
temp_file.replace(self.cache_file)
|
|
76
|
+
|
|
77
|
+
logger.debug(f"Saved cache with {len(self.cache)} entries to {self.cache_file}")
|
|
78
|
+
|
|
79
|
+
except IOError as e:
|
|
80
|
+
logger.error(f"Failed to save cache to {self.cache_file}: {e}")
|
|
81
|
+
logger.debug(f"Cache save error details: {type(e).__name__}: {e}")
|
|
82
|
+
|
|
83
|
+
def get_file_hash(self, file_path: Path) -> str:
|
|
84
|
+
"""Calculate SHA256 hash of file contents.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
file_path: Path to file
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
SHA256 hash as hex string
|
|
91
|
+
|
|
92
|
+
Raises:
|
|
93
|
+
FileNotFoundError: If file does not exist
|
|
94
|
+
IOError: If file cannot be read
|
|
95
|
+
"""
|
|
96
|
+
sha256 = hashlib.sha256()
|
|
97
|
+
bytes_read = 0
|
|
98
|
+
|
|
99
|
+
try:
|
|
100
|
+
with open(file_path, "rb") as f:
|
|
101
|
+
# Read in chunks for memory efficiency
|
|
102
|
+
for chunk in iter(lambda: f.read(8192), b""):
|
|
103
|
+
sha256.update(chunk)
|
|
104
|
+
bytes_read += len(chunk)
|
|
105
|
+
|
|
106
|
+
hash_value = sha256.hexdigest()
|
|
107
|
+
logger.debug(f"File hash computed: {hash_value[:16]}... (read {bytes_read} bytes)")
|
|
108
|
+
return hash_value
|
|
109
|
+
|
|
110
|
+
except FileNotFoundError:
|
|
111
|
+
logger.error(f"File not found for hashing: {file_path}")
|
|
112
|
+
raise
|
|
113
|
+
except IOError as e:
|
|
114
|
+
logger.error(f"Failed to read file for hashing: {file_path}: {e}")
|
|
115
|
+
raise
|
|
116
|
+
|
|
117
|
+
def has_changed(self, file_path: Path) -> bool:
|
|
118
|
+
"""Check if file has changed since last cache update.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
file_path: Path to file
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
True if file has changed or not in cache, False otherwise
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
if not file_path.exists():
|
|
128
|
+
logger.warning(f"File does not exist: {file_path}")
|
|
129
|
+
return True
|
|
130
|
+
|
|
131
|
+
file_key = str(file_path.absolute())
|
|
132
|
+
|
|
133
|
+
with self.lock:
|
|
134
|
+
cached_hash = self.cache.get(file_key)
|
|
135
|
+
|
|
136
|
+
# File not in cache - consider it changed
|
|
137
|
+
if cached_hash is None:
|
|
138
|
+
return True
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
current_hash = self.get_file_hash(file_path)
|
|
142
|
+
changed = current_hash != cached_hash
|
|
143
|
+
|
|
144
|
+
if changed:
|
|
145
|
+
logger.debug(f"File changed: {file_path} (cached: {cached_hash[:16]}..., current: {current_hash[:16]}...)")
|
|
146
|
+
else:
|
|
147
|
+
logger.debug(f"File unchanged: {file_path}")
|
|
148
|
+
|
|
149
|
+
return changed
|
|
150
|
+
|
|
151
|
+
except (FileNotFoundError, IOError):
|
|
152
|
+
# If we can't hash the file, assume it changed
|
|
153
|
+
return True
|
|
154
|
+
|
|
155
|
+
def update(self, file_path: Path):
|
|
156
|
+
"""Update cache with current file hash.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
file_path: Path to file
|
|
160
|
+
"""
|
|
161
|
+
|
|
162
|
+
if not file_path.exists():
|
|
163
|
+
logger.warning(f"Cannot update cache for non-existent file: {file_path}")
|
|
164
|
+
return
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
file_key = str(file_path.absolute())
|
|
168
|
+
current_hash = self.get_file_hash(file_path)
|
|
169
|
+
|
|
170
|
+
with self.lock:
|
|
171
|
+
was_cached = file_key in self.cache
|
|
172
|
+
self.cache[file_key] = current_hash
|
|
173
|
+
cache_size = len(self.cache)
|
|
174
|
+
|
|
175
|
+
logger.debug(f"Cache entry {'updated' if was_cached else 'added'}: {file_path} (total entries: {cache_size})")
|
|
176
|
+
|
|
177
|
+
with self.lock:
|
|
178
|
+
self._save_cache()
|
|
179
|
+
|
|
180
|
+
except (FileNotFoundError, IOError) as e:
|
|
181
|
+
logger.error(f"Failed to update cache for {file_path}: {e}")
|
|
182
|
+
|
|
183
|
+
def update_batch(self, file_paths: list[Path]):
|
|
184
|
+
"""Update cache for multiple files efficiently.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
file_paths: List of file paths to update
|
|
188
|
+
"""
|
|
189
|
+
logger.info(f"Batch cache update starting: {len(file_paths)} files")
|
|
190
|
+
updated_count = 0
|
|
191
|
+
skipped_count = 0
|
|
192
|
+
failed_count = 0
|
|
193
|
+
|
|
194
|
+
for file_path in file_paths:
|
|
195
|
+
if not file_path.exists():
|
|
196
|
+
skipped_count += 1
|
|
197
|
+
continue
|
|
198
|
+
|
|
199
|
+
try:
|
|
200
|
+
file_key = str(file_path.absolute())
|
|
201
|
+
current_hash = self.get_file_hash(file_path)
|
|
202
|
+
|
|
203
|
+
with self.lock:
|
|
204
|
+
self.cache[file_key] = current_hash
|
|
205
|
+
updated_count += 1
|
|
206
|
+
|
|
207
|
+
except (FileNotFoundError, IOError) as e:
|
|
208
|
+
logger.warning(f"Failed to update cache for {file_path}: {e}")
|
|
209
|
+
failed_count += 1
|
|
210
|
+
|
|
211
|
+
# Save once after all updates
|
|
212
|
+
logger.debug(f"Saving batch cache update to disk ({updated_count} files updated)")
|
|
213
|
+
with self.lock:
|
|
214
|
+
self._save_cache()
|
|
215
|
+
|
|
216
|
+
logger.info(f"Batch cache update complete: {updated_count}/{len(file_paths)} files updated, {skipped_count} skipped, {failed_count} failed")
|
|
217
|
+
logger.debug(f"Total cache entries after batch update: {len(self.cache)}")
|
|
218
|
+
|
|
219
|
+
def needs_recompilation(self, source_path: Path, object_path: Path) -> bool:
|
|
220
|
+
"""Check if source file needs recompilation.
|
|
221
|
+
|
|
222
|
+
A file needs recompilation if:
|
|
223
|
+
1. Object file doesn't exist
|
|
224
|
+
2. Source file has changed (cache check)
|
|
225
|
+
3. Object file is older than source file (mtime check)
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
source_path: Path to source file (.c, .cpp, etc.)
|
|
229
|
+
object_path: Path to object file (.o)
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
True if recompilation needed, False otherwise
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
# Object doesn't exist - must compile
|
|
236
|
+
if not object_path.exists():
|
|
237
|
+
logger.debug("Reason: object file does not exist")
|
|
238
|
+
return True
|
|
239
|
+
|
|
240
|
+
# Source changed - must recompile
|
|
241
|
+
if self.has_changed(source_path):
|
|
242
|
+
logger.debug("Reason: source file hash differs from cache")
|
|
243
|
+
return True
|
|
244
|
+
|
|
245
|
+
# Object older than source - must recompile
|
|
246
|
+
try:
|
|
247
|
+
source_mtime = source_path.stat().st_mtime
|
|
248
|
+
object_mtime = object_path.stat().st_mtime
|
|
249
|
+
|
|
250
|
+
if object_mtime < source_mtime:
|
|
251
|
+
logger.debug(f"Reason: object mtime ({object_mtime}) < source mtime ({source_mtime})")
|
|
252
|
+
return True
|
|
253
|
+
|
|
254
|
+
except OSError as e:
|
|
255
|
+
logger.warning(f"Failed to check file times: {e} - assuming recompilation needed")
|
|
256
|
+
logger.debug(f"Reason: stat() failed with {type(e).__name__}")
|
|
257
|
+
return True
|
|
258
|
+
|
|
259
|
+
# No recompilation needed
|
|
260
|
+
logger.debug(f"Skipping unchanged file: {source_path} (all checks passed)")
|
|
261
|
+
return False
|
|
262
|
+
|
|
263
|
+
def invalidate(self, file_path: Path):
|
|
264
|
+
"""Remove file from cache, forcing recompilation on next build.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
file_path: Path to file
|
|
268
|
+
"""
|
|
269
|
+
file_key = str(file_path.absolute())
|
|
270
|
+
|
|
271
|
+
with self.lock:
|
|
272
|
+
if file_key in self.cache:
|
|
273
|
+
del self.cache[file_key]
|
|
274
|
+
self._save_cache()
|
|
275
|
+
logger.info(f"Invalidated cache entry: {file_path} (total entries: {len(self.cache)})")
|
|
276
|
+
else:
|
|
277
|
+
logger.debug(f"Cache entry not found: {file_path}")
|
|
278
|
+
|
|
279
|
+
def clear(self):
|
|
280
|
+
"""Clear entire cache."""
|
|
281
|
+
logger.info(f"Clearing entire cache (current entries: {len(self.cache)})")
|
|
282
|
+
with self.lock:
|
|
283
|
+
old_size = len(self.cache)
|
|
284
|
+
self.cache.clear()
|
|
285
|
+
self._save_cache()
|
|
286
|
+
logger.info(f"Cache cleared: removed {old_size} entries")
|
|
287
|
+
|
|
288
|
+
def get_statistics(self) -> dict[str, int]:
|
|
289
|
+
"""Get cache statistics.
|
|
290
|
+
|
|
291
|
+
Returns:
|
|
292
|
+
Dictionary with cache statistics
|
|
293
|
+
"""
|
|
294
|
+
with self.lock:
|
|
295
|
+
stats = {
|
|
296
|
+
"total_entries": len(self.cache),
|
|
297
|
+
}
|
|
298
|
+
return stats
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
# Global file cache instance (initialized by daemon)
|
|
302
|
+
_file_cache: Optional[FileCache] = None
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def get_file_cache() -> FileCache:
|
|
306
|
+
"""Get global file cache instance.
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
Global FileCache instance
|
|
310
|
+
|
|
311
|
+
Raises:
|
|
312
|
+
RuntimeError: If file cache not initialized
|
|
313
|
+
"""
|
|
314
|
+
global _file_cache
|
|
315
|
+
if _file_cache is None:
|
|
316
|
+
raise RuntimeError("FileCache not initialized. Call init_file_cache() first.")
|
|
317
|
+
return _file_cache
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def init_file_cache(cache_file: Path) -> FileCache:
|
|
321
|
+
"""Initialize global file cache.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
cache_file: Path to cache file
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
Initialized FileCache instance
|
|
328
|
+
"""
|
|
329
|
+
global _file_cache
|
|
330
|
+
_file_cache = FileCache(cache_file=cache_file)
|
|
331
|
+
logger.info(f"FileCache initialized with cache file: {cache_file}")
|
|
332
|
+
return _file_cache
|