fbuild 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fbuild might be problematic. Click here for more details.

Files changed (93) hide show
  1. fbuild/__init__.py +0 -0
  2. fbuild/assets/example.txt +1 -0
  3. fbuild/build/__init__.py +117 -0
  4. fbuild/build/archive_creator.py +186 -0
  5. fbuild/build/binary_generator.py +444 -0
  6. fbuild/build/build_component_factory.py +131 -0
  7. fbuild/build/build_state.py +325 -0
  8. fbuild/build/build_utils.py +98 -0
  9. fbuild/build/compilation_executor.py +422 -0
  10. fbuild/build/compiler.py +165 -0
  11. fbuild/build/compiler_avr.py +574 -0
  12. fbuild/build/configurable_compiler.py +612 -0
  13. fbuild/build/configurable_linker.py +637 -0
  14. fbuild/build/flag_builder.py +186 -0
  15. fbuild/build/library_dependency_processor.py +185 -0
  16. fbuild/build/linker.py +708 -0
  17. fbuild/build/orchestrator.py +67 -0
  18. fbuild/build/orchestrator_avr.py +656 -0
  19. fbuild/build/orchestrator_esp32.py +797 -0
  20. fbuild/build/orchestrator_teensy.py +543 -0
  21. fbuild/build/source_compilation_orchestrator.py +220 -0
  22. fbuild/build/source_scanner.py +516 -0
  23. fbuild/cli.py +566 -0
  24. fbuild/cli_utils.py +312 -0
  25. fbuild/config/__init__.py +16 -0
  26. fbuild/config/board_config.py +457 -0
  27. fbuild/config/board_loader.py +92 -0
  28. fbuild/config/ini_parser.py +209 -0
  29. fbuild/config/mcu_specs.py +88 -0
  30. fbuild/daemon/__init__.py +34 -0
  31. fbuild/daemon/client.py +929 -0
  32. fbuild/daemon/compilation_queue.py +293 -0
  33. fbuild/daemon/daemon.py +474 -0
  34. fbuild/daemon/daemon_context.py +196 -0
  35. fbuild/daemon/error_collector.py +263 -0
  36. fbuild/daemon/file_cache.py +332 -0
  37. fbuild/daemon/lock_manager.py +270 -0
  38. fbuild/daemon/logging_utils.py +149 -0
  39. fbuild/daemon/messages.py +301 -0
  40. fbuild/daemon/operation_registry.py +288 -0
  41. fbuild/daemon/process_tracker.py +366 -0
  42. fbuild/daemon/processors/__init__.py +12 -0
  43. fbuild/daemon/processors/build_processor.py +157 -0
  44. fbuild/daemon/processors/deploy_processor.py +327 -0
  45. fbuild/daemon/processors/monitor_processor.py +146 -0
  46. fbuild/daemon/request_processor.py +401 -0
  47. fbuild/daemon/status_manager.py +216 -0
  48. fbuild/daemon/subprocess_manager.py +316 -0
  49. fbuild/deploy/__init__.py +17 -0
  50. fbuild/deploy/deployer.py +67 -0
  51. fbuild/deploy/deployer_esp32.py +314 -0
  52. fbuild/deploy/monitor.py +495 -0
  53. fbuild/interrupt_utils.py +34 -0
  54. fbuild/packages/__init__.py +53 -0
  55. fbuild/packages/archive_utils.py +1098 -0
  56. fbuild/packages/arduino_core.py +412 -0
  57. fbuild/packages/cache.py +249 -0
  58. fbuild/packages/downloader.py +366 -0
  59. fbuild/packages/framework_esp32.py +538 -0
  60. fbuild/packages/framework_teensy.py +346 -0
  61. fbuild/packages/github_utils.py +96 -0
  62. fbuild/packages/header_trampoline_cache.py +394 -0
  63. fbuild/packages/library_compiler.py +203 -0
  64. fbuild/packages/library_manager.py +549 -0
  65. fbuild/packages/library_manager_esp32.py +413 -0
  66. fbuild/packages/package.py +163 -0
  67. fbuild/packages/platform_esp32.py +383 -0
  68. fbuild/packages/platform_teensy.py +312 -0
  69. fbuild/packages/platform_utils.py +131 -0
  70. fbuild/packages/platformio_registry.py +325 -0
  71. fbuild/packages/sdk_utils.py +231 -0
  72. fbuild/packages/toolchain.py +436 -0
  73. fbuild/packages/toolchain_binaries.py +196 -0
  74. fbuild/packages/toolchain_esp32.py +484 -0
  75. fbuild/packages/toolchain_metadata.py +185 -0
  76. fbuild/packages/toolchain_teensy.py +404 -0
  77. fbuild/platform_configs/esp32.json +150 -0
  78. fbuild/platform_configs/esp32c2.json +144 -0
  79. fbuild/platform_configs/esp32c3.json +143 -0
  80. fbuild/platform_configs/esp32c5.json +151 -0
  81. fbuild/platform_configs/esp32c6.json +151 -0
  82. fbuild/platform_configs/esp32p4.json +149 -0
  83. fbuild/platform_configs/esp32s3.json +151 -0
  84. fbuild/platform_configs/imxrt1062.json +56 -0
  85. fbuild-1.1.0.dist-info/METADATA +447 -0
  86. fbuild-1.1.0.dist-info/RECORD +93 -0
  87. fbuild-1.1.0.dist-info/WHEEL +5 -0
  88. fbuild-1.1.0.dist-info/entry_points.txt +5 -0
  89. fbuild-1.1.0.dist-info/licenses/LICENSE +21 -0
  90. fbuild-1.1.0.dist-info/top_level.txt +2 -0
  91. fbuild_lint/__init__.py +0 -0
  92. fbuild_lint/ruff_plugins/__init__.py +0 -0
  93. fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
@@ -0,0 +1,263 @@
1
+ """
2
+ Error Collector - Structured error collection for async operations.
3
+
4
+ This module provides error collection and aggregation for asynchronous build
5
+ operations, replacing simple exception handling with structured error tracking.
6
+ """
7
+
8
+ import logging
9
+ import threading
10
+ import time
11
+ from dataclasses import dataclass, field
12
+ from enum import Enum
13
+ from typing import Optional
14
+
15
+
16
+ class ErrorSeverity(Enum):
17
+ """Severity level of a build error."""
18
+
19
+ WARNING = "warning"
20
+ ERROR = "error"
21
+ FATAL = "fatal"
22
+
23
+
24
+ @dataclass
25
+ class BuildError:
26
+ """Single build error."""
27
+
28
+ severity: ErrorSeverity
29
+ phase: str # "download", "compile", "link", "upload"
30
+ file_path: Optional[str]
31
+ error_message: str
32
+ stderr: Optional[str] = None
33
+ stdout: Optional[str] = None
34
+ timestamp: float = field(default_factory=time.time)
35
+
36
+ def format(self) -> str:
37
+ """Format error as human-readable string.
38
+
39
+ Returns:
40
+ Formatted error message
41
+ """
42
+ lines = [f"[{self.severity.value.upper()}] {self.phase}: {self.error_message}"]
43
+
44
+ if self.file_path:
45
+ lines.append(f" File: {self.file_path}")
46
+
47
+ if self.stderr:
48
+ # Truncate stderr to reasonable length
49
+ stderr_preview = self.stderr[:500]
50
+ if len(self.stderr) > 500:
51
+ stderr_preview += "... (truncated)"
52
+ lines.append(f" stderr: {stderr_preview}")
53
+
54
+ return "\n".join(lines)
55
+
56
+
57
+ class ErrorCollector:
58
+ """Collects errors during async build operations."""
59
+
60
+ def __init__(self, max_errors: int = 100):
61
+ """Initialize error collector.
62
+
63
+ Args:
64
+ max_errors: Maximum number of errors to collect
65
+ """
66
+ self.errors: list[BuildError] = []
67
+ self.lock = threading.Lock()
68
+ self.max_errors = max_errors
69
+
70
+ logging.debug(f"ErrorCollector initialized (max_errors={max_errors})")
71
+
72
+ def add_error(self, error: BuildError) -> None:
73
+ """Add error to collection.
74
+
75
+ Args:
76
+ error: Build error to add
77
+ """
78
+ with self.lock:
79
+ if len(self.errors) >= self.max_errors:
80
+ logging.warning(f"ErrorCollector full ({self.max_errors} errors), dropping oldest")
81
+ self.errors.pop(0)
82
+
83
+ self.errors.append(error)
84
+
85
+ def get_errors(self, severity: Optional[ErrorSeverity] = None) -> list[BuildError]:
86
+ """Get all errors, optionally filtered by severity.
87
+
88
+ Args:
89
+ severity: Filter by severity (None = all errors)
90
+
91
+ Returns:
92
+ List of build errors
93
+ """
94
+ logging.debug(f"Retrieving errors (severity filter: {severity.value if severity else 'None'})")
95
+ with self.lock:
96
+ if severity:
97
+ filtered = [e for e in self.errors if e.severity == severity]
98
+ logging.debug(f"Filtered {len(filtered)} errors by severity {severity.value} (total: {len(self.errors)})")
99
+ return filtered
100
+ logging.debug(f"Returning all {len(self.errors)} errors")
101
+ return self.errors.copy()
102
+
103
+ def get_errors_by_phase(self, phase: str) -> list[BuildError]:
104
+ """Get errors for a specific phase.
105
+
106
+ Args:
107
+ phase: Phase to filter by
108
+
109
+ Returns:
110
+ List of build errors for the phase
111
+ """
112
+ with self.lock:
113
+ phase_errors = [e for e in self.errors if e.phase == phase]
114
+ logging.debug(f"Found {len(phase_errors)} errors in phase '{phase}' (total: {len(self.errors)})")
115
+ return phase_errors
116
+
117
+ def has_fatal_errors(self) -> bool:
118
+ """Check if any fatal errors occurred.
119
+
120
+ Returns:
121
+ True if fatal errors exist
122
+ """
123
+ with self.lock:
124
+ has_fatal = any(e.severity == ErrorSeverity.FATAL for e in self.errors)
125
+ fatal_count = sum(1 for e in self.errors if e.severity == ErrorSeverity.FATAL)
126
+ logging.debug(f"Fatal error check result: {has_fatal} ({fatal_count} fatal errors)")
127
+ return has_fatal
128
+
129
+ def has_errors(self) -> bool:
130
+ """Check if any errors (non-warning) occurred.
131
+
132
+ Returns:
133
+ True if errors exist
134
+ """
135
+ logging.debug("Checking for errors (non-warning)")
136
+ with self.lock:
137
+ has_errs = any(e.severity in (ErrorSeverity.ERROR, ErrorSeverity.FATAL) for e in self.errors)
138
+ error_count = sum(1 for e in self.errors if e.severity in (ErrorSeverity.ERROR, ErrorSeverity.FATAL))
139
+ logging.debug(f"Error check result: {has_errs} ({error_count} errors or fatal)")
140
+ return has_errs
141
+
142
+ def has_warnings(self) -> bool:
143
+ """Check if any warnings occurred.
144
+
145
+ Returns:
146
+ True if warnings exist
147
+ """
148
+ with self.lock:
149
+ has_warn = any(e.severity == ErrorSeverity.WARNING for e in self.errors)
150
+ warning_count = sum(1 for e in self.errors if e.severity == ErrorSeverity.WARNING)
151
+ logging.debug(f"Warning check result: {has_warn} ({warning_count} warnings)")
152
+ return has_warn
153
+
154
+ def get_error_count(self) -> dict[str, int]:
155
+ """Get count of errors by severity.
156
+
157
+ Returns:
158
+ Dictionary with counts by severity
159
+ """
160
+ with self.lock:
161
+ counts = {
162
+ "warnings": sum(1 for e in self.errors if e.severity == ErrorSeverity.WARNING),
163
+ "errors": sum(1 for e in self.errors if e.severity == ErrorSeverity.ERROR),
164
+ "fatal": sum(1 for e in self.errors if e.severity == ErrorSeverity.FATAL),
165
+ "total": len(self.errors),
166
+ }
167
+ logging.debug(f"Error counts: {counts['total']} total ({counts['fatal']} fatal, {counts['errors']} errors, {counts['warnings']} warnings)")
168
+ return counts
169
+
170
+ def format_errors(self, max_errors: Optional[int] = None) -> str:
171
+ """Format all errors as human-readable string.
172
+
173
+ Args:
174
+ max_errors: Maximum number of errors to include (None = all)
175
+
176
+ Returns:
177
+ Formatted error report
178
+ """
179
+ logging.debug(f"Formatting errors (max_errors: {max_errors})")
180
+ with self.lock:
181
+ if not self.errors:
182
+ return "No errors"
183
+
184
+ errors_to_show = self.errors if max_errors is None else self.errors[:max_errors]
185
+ logging.debug(f"Formatting {len(errors_to_show)} errors (total: {len(self.errors)})")
186
+ lines = []
187
+
188
+ for err in errors_to_show:
189
+ lines.append(err.format())
190
+
191
+ if max_errors and len(self.errors) > max_errors:
192
+ lines.append(f"\n... and {len(self.errors) - max_errors} more errors")
193
+
194
+ # Add summary
195
+ counts = self.get_error_count()
196
+ summary = f"\nSummary: {counts['fatal']} fatal, {counts['errors']} errors, {counts['warnings']} warnings"
197
+ lines.append(summary)
198
+
199
+ formatted = "\n\n".join(lines)
200
+ logging.debug(f"Error formatting complete: {len(lines)} sections, {len(formatted)} characters")
201
+ return formatted
202
+
203
+ def format_summary(self) -> str:
204
+ """Format a brief summary of errors.
205
+
206
+ Returns:
207
+ Brief error summary
208
+ """
209
+ counts = self.get_error_count()
210
+ if counts["total"] == 0:
211
+ return "No errors"
212
+
213
+ parts = []
214
+ if counts["fatal"] > 0:
215
+ parts.append(f"{counts['fatal']} fatal")
216
+ if counts["errors"] > 0:
217
+ parts.append(f"{counts['errors']} errors")
218
+ if counts["warnings"] > 0:
219
+ parts.append(f"{counts['warnings']} warnings")
220
+
221
+ summary = ", ".join(parts)
222
+ return summary
223
+
224
+ def clear(self) -> None:
225
+ """Clear all collected errors."""
226
+ with self.lock:
227
+ error_count = len(self.errors)
228
+ self.errors.clear()
229
+
230
+ if error_count > 0:
231
+ logging.info(f"Cleared {error_count} errors")
232
+
233
+ def get_first_fatal_error(self) -> Optional[BuildError]:
234
+ """Get the first fatal error encountered.
235
+
236
+ Returns:
237
+ First fatal error or None
238
+ """
239
+ with self.lock:
240
+ for error in self.errors:
241
+ if error.severity == ErrorSeverity.FATAL:
242
+ return error
243
+ return None
244
+
245
+ def get_compilation_errors(self) -> list[BuildError]:
246
+ """Get all compilation-phase errors.
247
+
248
+ Returns:
249
+ List of compilation errors
250
+ """
251
+ compilation_errors = self.get_errors_by_phase("compile")
252
+ logging.debug(f"Found {len(compilation_errors)} compilation errors")
253
+ return compilation_errors
254
+
255
+ def get_link_errors(self) -> list[BuildError]:
256
+ """Get all link-phase errors.
257
+
258
+ Returns:
259
+ List of link errors
260
+ """
261
+ link_errors = self.get_errors_by_phase("link")
262
+ logging.debug(f"Found {len(link_errors)} link errors")
263
+ return link_errors
@@ -0,0 +1,332 @@
1
+ """File-level incremental compilation cache.
2
+
3
+ This module tracks source file changes for incremental compilation, allowing
4
+ the build system to skip recompilation of unchanged files.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import hashlib
10
+ import json
11
+ import logging
12
+ import threading
13
+ from pathlib import Path
14
+ from typing import Optional
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class FileCache:
20
+ """Tracks source file changes for incremental compilation.
21
+
22
+ Uses SHA256 hashing to detect file changes and maintains a persistent cache
23
+ on disk. Thread-safe for concurrent use.
24
+ """
25
+
26
+ def __init__(self, cache_file: Path):
27
+ """Initialize file cache.
28
+
29
+ Args:
30
+ cache_file: Path to cache file (JSON format)
31
+ """
32
+ self.cache_file = cache_file
33
+ self.cache: dict[str, str] = {}
34
+ self.lock = threading.Lock()
35
+ self._load_cache()
36
+ logger.info(f"FileCache initialized with {len(self.cache)} cached entries")
37
+
38
+ def _load_cache(self):
39
+ """Load cache from disk."""
40
+ if not self.cache_file.exists():
41
+ return
42
+
43
+ try:
44
+ with open(self.cache_file, "r", encoding="utf-8") as f:
45
+ data = json.load(f)
46
+ logger.debug(f"Parsed {len(data)} cache entries from JSON")
47
+ self.cache = data
48
+ logger.info(f"Loaded cache with {len(self.cache)} entries from {self.cache_file}")
49
+ if len(self.cache) > 0:
50
+ logger.debug(f"Sample cache keys: {list(self.cache.keys())[:3]}")
51
+ except json.JSONDecodeError as e:
52
+ logger.warning(f"Failed to parse cache JSON from {self.cache_file}: {e}")
53
+ self.cache = {}
54
+ except IOError as e:
55
+ logger.warning(f"Failed to read cache file {self.cache_file}: {e}")
56
+ self.cache = {}
57
+
58
+ def _save_cache(self):
59
+ """Save cache to disk atomically.
60
+
61
+ Uses atomic write pattern (temp file + rename) to prevent corruption.
62
+ """
63
+ logger.debug(f"Cache entries to save: {len(self.cache)}")
64
+
65
+ try:
66
+ # Ensure cache directory exists
67
+ self.cache_file.parent.mkdir(parents=True, exist_ok=True)
68
+
69
+ # Write to temporary file
70
+ temp_file = self.cache_file.with_suffix(".tmp")
71
+ with open(temp_file, "w", encoding="utf-8") as f:
72
+ json.dump(self.cache, f, indent=2)
73
+
74
+ # Atomic rename
75
+ temp_file.replace(self.cache_file)
76
+
77
+ logger.debug(f"Saved cache with {len(self.cache)} entries to {self.cache_file}")
78
+
79
+ except IOError as e:
80
+ logger.error(f"Failed to save cache to {self.cache_file}: {e}")
81
+ logger.debug(f"Cache save error details: {type(e).__name__}: {e}")
82
+
83
+ def get_file_hash(self, file_path: Path) -> str:
84
+ """Calculate SHA256 hash of file contents.
85
+
86
+ Args:
87
+ file_path: Path to file
88
+
89
+ Returns:
90
+ SHA256 hash as hex string
91
+
92
+ Raises:
93
+ FileNotFoundError: If file does not exist
94
+ IOError: If file cannot be read
95
+ """
96
+ sha256 = hashlib.sha256()
97
+ bytes_read = 0
98
+
99
+ try:
100
+ with open(file_path, "rb") as f:
101
+ # Read in chunks for memory efficiency
102
+ for chunk in iter(lambda: f.read(8192), b""):
103
+ sha256.update(chunk)
104
+ bytes_read += len(chunk)
105
+
106
+ hash_value = sha256.hexdigest()
107
+ logger.debug(f"File hash computed: {hash_value[:16]}... (read {bytes_read} bytes)")
108
+ return hash_value
109
+
110
+ except FileNotFoundError:
111
+ logger.error(f"File not found for hashing: {file_path}")
112
+ raise
113
+ except IOError as e:
114
+ logger.error(f"Failed to read file for hashing: {file_path}: {e}")
115
+ raise
116
+
117
+ def has_changed(self, file_path: Path) -> bool:
118
+ """Check if file has changed since last cache update.
119
+
120
+ Args:
121
+ file_path: Path to file
122
+
123
+ Returns:
124
+ True if file has changed or not in cache, False otherwise
125
+ """
126
+
127
+ if not file_path.exists():
128
+ logger.warning(f"File does not exist: {file_path}")
129
+ return True
130
+
131
+ file_key = str(file_path.absolute())
132
+
133
+ with self.lock:
134
+ cached_hash = self.cache.get(file_key)
135
+
136
+ # File not in cache - consider it changed
137
+ if cached_hash is None:
138
+ return True
139
+
140
+ try:
141
+ current_hash = self.get_file_hash(file_path)
142
+ changed = current_hash != cached_hash
143
+
144
+ if changed:
145
+ logger.debug(f"File changed: {file_path} (cached: {cached_hash[:16]}..., current: {current_hash[:16]}...)")
146
+ else:
147
+ logger.debug(f"File unchanged: {file_path}")
148
+
149
+ return changed
150
+
151
+ except (FileNotFoundError, IOError):
152
+ # If we can't hash the file, assume it changed
153
+ return True
154
+
155
+ def update(self, file_path: Path):
156
+ """Update cache with current file hash.
157
+
158
+ Args:
159
+ file_path: Path to file
160
+ """
161
+
162
+ if not file_path.exists():
163
+ logger.warning(f"Cannot update cache for non-existent file: {file_path}")
164
+ return
165
+
166
+ try:
167
+ file_key = str(file_path.absolute())
168
+ current_hash = self.get_file_hash(file_path)
169
+
170
+ with self.lock:
171
+ was_cached = file_key in self.cache
172
+ self.cache[file_key] = current_hash
173
+ cache_size = len(self.cache)
174
+
175
+ logger.debug(f"Cache entry {'updated' if was_cached else 'added'}: {file_path} (total entries: {cache_size})")
176
+
177
+ with self.lock:
178
+ self._save_cache()
179
+
180
+ except (FileNotFoundError, IOError) as e:
181
+ logger.error(f"Failed to update cache for {file_path}: {e}")
182
+
183
+ def update_batch(self, file_paths: list[Path]):
184
+ """Update cache for multiple files efficiently.
185
+
186
+ Args:
187
+ file_paths: List of file paths to update
188
+ """
189
+ logger.info(f"Batch cache update starting: {len(file_paths)} files")
190
+ updated_count = 0
191
+ skipped_count = 0
192
+ failed_count = 0
193
+
194
+ for file_path in file_paths:
195
+ if not file_path.exists():
196
+ skipped_count += 1
197
+ continue
198
+
199
+ try:
200
+ file_key = str(file_path.absolute())
201
+ current_hash = self.get_file_hash(file_path)
202
+
203
+ with self.lock:
204
+ self.cache[file_key] = current_hash
205
+ updated_count += 1
206
+
207
+ except (FileNotFoundError, IOError) as e:
208
+ logger.warning(f"Failed to update cache for {file_path}: {e}")
209
+ failed_count += 1
210
+
211
+ # Save once after all updates
212
+ logger.debug(f"Saving batch cache update to disk ({updated_count} files updated)")
213
+ with self.lock:
214
+ self._save_cache()
215
+
216
+ logger.info(f"Batch cache update complete: {updated_count}/{len(file_paths)} files updated, {skipped_count} skipped, {failed_count} failed")
217
+ logger.debug(f"Total cache entries after batch update: {len(self.cache)}")
218
+
219
+ def needs_recompilation(self, source_path: Path, object_path: Path) -> bool:
220
+ """Check if source file needs recompilation.
221
+
222
+ A file needs recompilation if:
223
+ 1. Object file doesn't exist
224
+ 2. Source file has changed (cache check)
225
+ 3. Object file is older than source file (mtime check)
226
+
227
+ Args:
228
+ source_path: Path to source file (.c, .cpp, etc.)
229
+ object_path: Path to object file (.o)
230
+
231
+ Returns:
232
+ True if recompilation needed, False otherwise
233
+ """
234
+
235
+ # Object doesn't exist - must compile
236
+ if not object_path.exists():
237
+ logger.debug("Reason: object file does not exist")
238
+ return True
239
+
240
+ # Source changed - must recompile
241
+ if self.has_changed(source_path):
242
+ logger.debug("Reason: source file hash differs from cache")
243
+ return True
244
+
245
+ # Object older than source - must recompile
246
+ try:
247
+ source_mtime = source_path.stat().st_mtime
248
+ object_mtime = object_path.stat().st_mtime
249
+
250
+ if object_mtime < source_mtime:
251
+ logger.debug(f"Reason: object mtime ({object_mtime}) < source mtime ({source_mtime})")
252
+ return True
253
+
254
+ except OSError as e:
255
+ logger.warning(f"Failed to check file times: {e} - assuming recompilation needed")
256
+ logger.debug(f"Reason: stat() failed with {type(e).__name__}")
257
+ return True
258
+
259
+ # No recompilation needed
260
+ logger.debug(f"Skipping unchanged file: {source_path} (all checks passed)")
261
+ return False
262
+
263
+ def invalidate(self, file_path: Path):
264
+ """Remove file from cache, forcing recompilation on next build.
265
+
266
+ Args:
267
+ file_path: Path to file
268
+ """
269
+ file_key = str(file_path.absolute())
270
+
271
+ with self.lock:
272
+ if file_key in self.cache:
273
+ del self.cache[file_key]
274
+ self._save_cache()
275
+ logger.info(f"Invalidated cache entry: {file_path} (total entries: {len(self.cache)})")
276
+ else:
277
+ logger.debug(f"Cache entry not found: {file_path}")
278
+
279
+ def clear(self):
280
+ """Clear entire cache."""
281
+ logger.info(f"Clearing entire cache (current entries: {len(self.cache)})")
282
+ with self.lock:
283
+ old_size = len(self.cache)
284
+ self.cache.clear()
285
+ self._save_cache()
286
+ logger.info(f"Cache cleared: removed {old_size} entries")
287
+
288
+ def get_statistics(self) -> dict[str, int]:
289
+ """Get cache statistics.
290
+
291
+ Returns:
292
+ Dictionary with cache statistics
293
+ """
294
+ with self.lock:
295
+ stats = {
296
+ "total_entries": len(self.cache),
297
+ }
298
+ return stats
299
+
300
+
301
+ # Global file cache instance (initialized by daemon)
302
+ _file_cache: Optional[FileCache] = None
303
+
304
+
305
+ def get_file_cache() -> FileCache:
306
+ """Get global file cache instance.
307
+
308
+ Returns:
309
+ Global FileCache instance
310
+
311
+ Raises:
312
+ RuntimeError: If file cache not initialized
313
+ """
314
+ global _file_cache
315
+ if _file_cache is None:
316
+ raise RuntimeError("FileCache not initialized. Call init_file_cache() first.")
317
+ return _file_cache
318
+
319
+
320
+ def init_file_cache(cache_file: Path) -> FileCache:
321
+ """Initialize global file cache.
322
+
323
+ Args:
324
+ cache_file: Path to cache file
325
+
326
+ Returns:
327
+ Initialized FileCache instance
328
+ """
329
+ global _file_cache
330
+ _file_cache = FileCache(cache_file=cache_file)
331
+ logger.info(f"FileCache initialized with cache file: {cache_file}")
332
+ return _file_cache