fbuild 1.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fbuild/__init__.py +390 -0
- fbuild/assets/example.txt +1 -0
- fbuild/build/__init__.py +117 -0
- fbuild/build/archive_creator.py +186 -0
- fbuild/build/binary_generator.py +444 -0
- fbuild/build/build_component_factory.py +131 -0
- fbuild/build/build_info_generator.py +624 -0
- fbuild/build/build_state.py +325 -0
- fbuild/build/build_utils.py +93 -0
- fbuild/build/compilation_executor.py +422 -0
- fbuild/build/compiler.py +165 -0
- fbuild/build/compiler_avr.py +574 -0
- fbuild/build/configurable_compiler.py +664 -0
- fbuild/build/configurable_linker.py +637 -0
- fbuild/build/flag_builder.py +214 -0
- fbuild/build/library_dependency_processor.py +185 -0
- fbuild/build/linker.py +708 -0
- fbuild/build/orchestrator.py +67 -0
- fbuild/build/orchestrator_avr.py +651 -0
- fbuild/build/orchestrator_esp32.py +878 -0
- fbuild/build/orchestrator_rp2040.py +719 -0
- fbuild/build/orchestrator_stm32.py +696 -0
- fbuild/build/orchestrator_teensy.py +580 -0
- fbuild/build/source_compilation_orchestrator.py +218 -0
- fbuild/build/source_scanner.py +516 -0
- fbuild/cli.py +717 -0
- fbuild/cli_utils.py +314 -0
- fbuild/config/__init__.py +16 -0
- fbuild/config/board_config.py +542 -0
- fbuild/config/board_loader.py +92 -0
- fbuild/config/ini_parser.py +369 -0
- fbuild/config/mcu_specs.py +88 -0
- fbuild/daemon/__init__.py +42 -0
- fbuild/daemon/async_client.py +531 -0
- fbuild/daemon/client.py +1505 -0
- fbuild/daemon/compilation_queue.py +293 -0
- fbuild/daemon/configuration_lock.py +865 -0
- fbuild/daemon/daemon.py +585 -0
- fbuild/daemon/daemon_context.py +293 -0
- fbuild/daemon/error_collector.py +263 -0
- fbuild/daemon/file_cache.py +332 -0
- fbuild/daemon/firmware_ledger.py +546 -0
- fbuild/daemon/lock_manager.py +508 -0
- fbuild/daemon/logging_utils.py +149 -0
- fbuild/daemon/messages.py +957 -0
- fbuild/daemon/operation_registry.py +288 -0
- fbuild/daemon/port_state_manager.py +249 -0
- fbuild/daemon/process_tracker.py +366 -0
- fbuild/daemon/processors/__init__.py +18 -0
- fbuild/daemon/processors/build_processor.py +248 -0
- fbuild/daemon/processors/deploy_processor.py +664 -0
- fbuild/daemon/processors/install_deps_processor.py +431 -0
- fbuild/daemon/processors/locking_processor.py +777 -0
- fbuild/daemon/processors/monitor_processor.py +285 -0
- fbuild/daemon/request_processor.py +457 -0
- fbuild/daemon/shared_serial.py +819 -0
- fbuild/daemon/status_manager.py +238 -0
- fbuild/daemon/subprocess_manager.py +316 -0
- fbuild/deploy/__init__.py +21 -0
- fbuild/deploy/deployer.py +67 -0
- fbuild/deploy/deployer_esp32.py +310 -0
- fbuild/deploy/docker_utils.py +315 -0
- fbuild/deploy/monitor.py +519 -0
- fbuild/deploy/qemu_runner.py +603 -0
- fbuild/interrupt_utils.py +34 -0
- fbuild/ledger/__init__.py +52 -0
- fbuild/ledger/board_ledger.py +560 -0
- fbuild/output.py +352 -0
- fbuild/packages/__init__.py +66 -0
- fbuild/packages/archive_utils.py +1098 -0
- fbuild/packages/arduino_core.py +412 -0
- fbuild/packages/cache.py +256 -0
- fbuild/packages/concurrent_manager.py +510 -0
- fbuild/packages/downloader.py +518 -0
- fbuild/packages/fingerprint.py +423 -0
- fbuild/packages/framework_esp32.py +538 -0
- fbuild/packages/framework_rp2040.py +349 -0
- fbuild/packages/framework_stm32.py +459 -0
- fbuild/packages/framework_teensy.py +346 -0
- fbuild/packages/github_utils.py +96 -0
- fbuild/packages/header_trampoline_cache.py +394 -0
- fbuild/packages/library_compiler.py +203 -0
- fbuild/packages/library_manager.py +549 -0
- fbuild/packages/library_manager_esp32.py +725 -0
- fbuild/packages/package.py +163 -0
- fbuild/packages/platform_esp32.py +383 -0
- fbuild/packages/platform_rp2040.py +400 -0
- fbuild/packages/platform_stm32.py +581 -0
- fbuild/packages/platform_teensy.py +312 -0
- fbuild/packages/platform_utils.py +131 -0
- fbuild/packages/platformio_registry.py +369 -0
- fbuild/packages/sdk_utils.py +231 -0
- fbuild/packages/toolchain.py +436 -0
- fbuild/packages/toolchain_binaries.py +196 -0
- fbuild/packages/toolchain_esp32.py +489 -0
- fbuild/packages/toolchain_metadata.py +185 -0
- fbuild/packages/toolchain_rp2040.py +436 -0
- fbuild/packages/toolchain_stm32.py +417 -0
- fbuild/packages/toolchain_teensy.py +404 -0
- fbuild/platform_configs/esp32.json +150 -0
- fbuild/platform_configs/esp32c2.json +144 -0
- fbuild/platform_configs/esp32c3.json +143 -0
- fbuild/platform_configs/esp32c5.json +151 -0
- fbuild/platform_configs/esp32c6.json +151 -0
- fbuild/platform_configs/esp32p4.json +149 -0
- fbuild/platform_configs/esp32s3.json +151 -0
- fbuild/platform_configs/imxrt1062.json +56 -0
- fbuild/platform_configs/rp2040.json +70 -0
- fbuild/platform_configs/rp2350.json +76 -0
- fbuild/platform_configs/stm32f1.json +59 -0
- fbuild/platform_configs/stm32f4.json +63 -0
- fbuild/py.typed +0 -0
- fbuild-1.2.8.dist-info/METADATA +468 -0
- fbuild-1.2.8.dist-info/RECORD +121 -0
- fbuild-1.2.8.dist-info/WHEEL +5 -0
- fbuild-1.2.8.dist-info/entry_points.txt +5 -0
- fbuild-1.2.8.dist-info/licenses/LICENSE +21 -0
- fbuild-1.2.8.dist-info/top_level.txt +2 -0
- fbuild_lint/__init__.py +0 -0
- fbuild_lint/ruff_plugins/__init__.py +0 -0
- fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
|
@@ -0,0 +1,394 @@
|
|
|
1
|
+
"""Header Trampoline Cache System.
|
|
2
|
+
|
|
3
|
+
This module implements ordered header trampoline directories that resolve Windows
|
|
4
|
+
CreateProcess() command-line length failures caused by excessive GCC -I arguments.
|
|
5
|
+
|
|
6
|
+
The problem:
|
|
7
|
+
- GCC invocations contain hundreds of long -I paths
|
|
8
|
+
- sccache expands response files into a single CreateProcess() call
|
|
9
|
+
- Windows enforces a hard 32,767 character string-length limit
|
|
10
|
+
- This causes build failures with ESP32-C6 (ESP-IDF) projects
|
|
11
|
+
|
|
12
|
+
The solution:
|
|
13
|
+
- Create "trampoline" header files that redirect to original headers
|
|
14
|
+
- Use ordered, short directory names (e.g., C:/inc/001, C:/inc/002, ...)
|
|
15
|
+
- Preserve include ordering semantics (critical for ESP-IDF)
|
|
16
|
+
- Maintain full sccache compatibility with deterministic generation
|
|
17
|
+
|
|
18
|
+
Design:
|
|
19
|
+
Original: -I D:/toolchains/esp-idf/components/freertos/include
|
|
20
|
+
-I D:/toolchains/esp-idf/components/driver/include
|
|
21
|
+
-I D:/build/project/config
|
|
22
|
+
|
|
23
|
+
Rewritten: -I C:/inc/001
|
|
24
|
+
-I C:/inc/002
|
|
25
|
+
-I C:/inc/003
|
|
26
|
+
|
|
27
|
+
Where C:/inc/001/freertos/FreeRTOS.h contains:
|
|
28
|
+
#pragma once
|
|
29
|
+
#include "D:/toolchains/esp-idf/components/freertos/include/freertos/FreeRTOS.h"
|
|
30
|
+
|
|
31
|
+
Properties:
|
|
32
|
+
- Include order is identical (preserves ESP-IDF correctness)
|
|
33
|
+
- Header collision behavior unchanged
|
|
34
|
+
- Generated headers resolve correctly
|
|
35
|
+
- Only string length of -I arguments changes
|
|
36
|
+
- Deterministic and reproducible
|
|
37
|
+
- Fully compatible with GCC and sccache
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
import _thread
|
|
41
|
+
import hashlib
|
|
42
|
+
import json
|
|
43
|
+
import platform
|
|
44
|
+
from pathlib import Path
|
|
45
|
+
from typing import Any, Dict, List, Optional
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class TrampolineCacheError(Exception):
|
|
49
|
+
"""Raised when trampoline cache operations fail."""
|
|
50
|
+
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class HeaderTrampolineCache:
|
|
55
|
+
"""Manages header trampoline cache for reducing command-line length.
|
|
56
|
+
|
|
57
|
+
This class handles:
|
|
58
|
+
- Generating ordered trampoline directories
|
|
59
|
+
- Creating trampoline header files
|
|
60
|
+
- Managing cache invalidation
|
|
61
|
+
- Providing rewritten include paths
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
def __init__(self, cache_root: Optional[Path] = None, show_progress: bool = True):
|
|
65
|
+
"""Initialize header trampoline cache.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
cache_root: Root directory for trampoline cache (default: C:/inc on Windows)
|
|
69
|
+
show_progress: Whether to show cache generation progress
|
|
70
|
+
"""
|
|
71
|
+
self.show_progress = show_progress
|
|
72
|
+
|
|
73
|
+
# Determine cache root
|
|
74
|
+
if cache_root is None:
|
|
75
|
+
if platform.system() == "Windows":
|
|
76
|
+
# Use short, root-level path on Windows
|
|
77
|
+
self.cache_root = Path("C:/inc")
|
|
78
|
+
else:
|
|
79
|
+
# Use /tmp/inc on Linux/Mac
|
|
80
|
+
self.cache_root = Path("/tmp/inc")
|
|
81
|
+
else:
|
|
82
|
+
self.cache_root = Path(cache_root)
|
|
83
|
+
|
|
84
|
+
# Metadata file tracks cache state
|
|
85
|
+
self.metadata_file = self.cache_root / ".metadata.json"
|
|
86
|
+
|
|
87
|
+
def needs_regeneration(self, include_paths: List[Path]) -> bool:
|
|
88
|
+
"""Check if trampoline cache needs regeneration.
|
|
89
|
+
|
|
90
|
+
Cache needs regeneration when:
|
|
91
|
+
- Cache doesn't exist
|
|
92
|
+
- Include path list changed
|
|
93
|
+
- Include path order changed
|
|
94
|
+
- Any original header files changed (not implemented yet)
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
include_paths: Ordered list of include directory paths
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
True if cache needs regeneration
|
|
101
|
+
"""
|
|
102
|
+
if not self.cache_root.exists() or not self.metadata_file.exists():
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
# Read existing metadata
|
|
106
|
+
try:
|
|
107
|
+
with open(self.metadata_file, "r") as f:
|
|
108
|
+
metadata = json.load(f)
|
|
109
|
+
except KeyboardInterrupt:
|
|
110
|
+
_thread.interrupt_main()
|
|
111
|
+
raise
|
|
112
|
+
except Exception:
|
|
113
|
+
return True
|
|
114
|
+
|
|
115
|
+
# Check if include paths changed
|
|
116
|
+
current_hash = self._compute_include_hash(include_paths)
|
|
117
|
+
cached_hash = metadata.get("include_hash", "")
|
|
118
|
+
|
|
119
|
+
return current_hash != cached_hash
|
|
120
|
+
|
|
121
|
+
def generate_trampolines(self, include_paths: List[Path], exclude_patterns: Optional[List[str]] = None) -> List[Path]:
|
|
122
|
+
"""Generate trampoline cache and return rewritten include paths.
|
|
123
|
+
|
|
124
|
+
This is the main entry point for the trampoline system.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
include_paths: Ordered list of original include directory paths
|
|
128
|
+
exclude_patterns: Optional list of path patterns to exclude from trampolining.
|
|
129
|
+
Paths matching these patterns will be returned as-is.
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
Ordered list of trampoline directory paths (short), with excluded paths
|
|
133
|
+
preserved as original paths in their original positions
|
|
134
|
+
|
|
135
|
+
Raises:
|
|
136
|
+
TrampolineCacheError: If trampoline generation fails
|
|
137
|
+
"""
|
|
138
|
+
# Filter out excluded paths
|
|
139
|
+
filtered_paths = []
|
|
140
|
+
excluded_indices = set()
|
|
141
|
+
|
|
142
|
+
if exclude_patterns:
|
|
143
|
+
for idx, path in enumerate(include_paths):
|
|
144
|
+
path_str = str(path)
|
|
145
|
+
excluded = False
|
|
146
|
+
|
|
147
|
+
for pattern in exclude_patterns:
|
|
148
|
+
if pattern in path_str:
|
|
149
|
+
excluded = True
|
|
150
|
+
excluded_indices.add(idx)
|
|
151
|
+
break
|
|
152
|
+
|
|
153
|
+
if not excluded:
|
|
154
|
+
filtered_paths.append(path)
|
|
155
|
+
else:
|
|
156
|
+
filtered_paths = include_paths
|
|
157
|
+
|
|
158
|
+
# Check if regeneration needed (use filtered paths for cache validation)
|
|
159
|
+
if not self.needs_regeneration(filtered_paths):
|
|
160
|
+
if self.show_progress:
|
|
161
|
+
excluded_count = len(include_paths) - len(filtered_paths)
|
|
162
|
+
if excluded_count > 0:
|
|
163
|
+
print(f"[trampolines] Using existing cache at {self.cache_root} " + f"(excluding {excluded_count} paths)")
|
|
164
|
+
else:
|
|
165
|
+
print(f"[trampolines] Using existing cache at {self.cache_root}")
|
|
166
|
+
return self._load_and_merge_trampoline_paths(include_paths, filtered_paths, excluded_indices)
|
|
167
|
+
|
|
168
|
+
if self.show_progress:
|
|
169
|
+
excluded_count = len(include_paths) - len(filtered_paths)
|
|
170
|
+
if excluded_count > 0:
|
|
171
|
+
print(f"[trampolines] Generating cache for {len(filtered_paths)} include paths " + f"(excluding {excluded_count} paths)...")
|
|
172
|
+
else:
|
|
173
|
+
print(f"[trampolines] Generating cache for {len(include_paths)} include paths...")
|
|
174
|
+
|
|
175
|
+
try:
|
|
176
|
+
# Clear existing cache
|
|
177
|
+
self._clear_cache()
|
|
178
|
+
|
|
179
|
+
# Create cache root
|
|
180
|
+
self.cache_root.mkdir(parents=True, exist_ok=True)
|
|
181
|
+
|
|
182
|
+
# Generate trampoline directories (one per non-excluded include path)
|
|
183
|
+
trampoline_paths = []
|
|
184
|
+
for idx, original_path in enumerate(filtered_paths):
|
|
185
|
+
# Create short numbered directory (001, 002, ...)
|
|
186
|
+
layer_name = f"{idx:03d}"
|
|
187
|
+
trampoline_dir = self.cache_root / layer_name
|
|
188
|
+
trampoline_dir.mkdir(parents=True, exist_ok=True)
|
|
189
|
+
|
|
190
|
+
# Generate trampolines for all headers under original_path
|
|
191
|
+
self._generate_layer_trampolines(original_path, trampoline_dir)
|
|
192
|
+
|
|
193
|
+
trampoline_paths.append(trampoline_dir)
|
|
194
|
+
|
|
195
|
+
# Save metadata
|
|
196
|
+
self._save_metadata(filtered_paths, trampoline_paths)
|
|
197
|
+
|
|
198
|
+
if self.show_progress:
|
|
199
|
+
print(f"[trampolines] Generated cache at {self.cache_root}")
|
|
200
|
+
|
|
201
|
+
# Merge trampolines with excluded paths in original positions
|
|
202
|
+
return self._merge_paths(include_paths, filtered_paths, trampoline_paths, excluded_indices)
|
|
203
|
+
|
|
204
|
+
except KeyboardInterrupt:
|
|
205
|
+
_thread.interrupt_main()
|
|
206
|
+
raise
|
|
207
|
+
except Exception as e:
|
|
208
|
+
raise TrampolineCacheError(f"Failed to generate trampoline cache: {e}") from e
|
|
209
|
+
|
|
210
|
+
def _generate_layer_trampolines(self, original_path: Path, trampoline_dir: Path) -> None:
|
|
211
|
+
"""Generate trampoline headers for a single include layer.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
original_path: Original include directory
|
|
215
|
+
trampoline_dir: Trampoline directory for this layer
|
|
216
|
+
|
|
217
|
+
Raises:
|
|
218
|
+
TrampolineCacheError: If trampoline generation fails
|
|
219
|
+
"""
|
|
220
|
+
if not original_path.exists():
|
|
221
|
+
# Skip non-existent paths (may be generated later)
|
|
222
|
+
return
|
|
223
|
+
|
|
224
|
+
# Find all header files under original_path
|
|
225
|
+
header_extensions = {".h", ".hpp", ".hxx", ".h++", ".hh"}
|
|
226
|
+
header_files = []
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
for ext in header_extensions:
|
|
230
|
+
header_files.extend(original_path.rglob(f"*{ext}"))
|
|
231
|
+
except KeyboardInterrupt:
|
|
232
|
+
_thread.interrupt_main()
|
|
233
|
+
raise
|
|
234
|
+
except Exception as e:
|
|
235
|
+
if self.show_progress:
|
|
236
|
+
print(f"[trampolines] Warning: Failed to scan {original_path}: {e}")
|
|
237
|
+
return
|
|
238
|
+
|
|
239
|
+
# Generate trampoline for each header
|
|
240
|
+
for header_file in header_files:
|
|
241
|
+
try:
|
|
242
|
+
# Calculate relative path from original_path
|
|
243
|
+
rel_path = header_file.relative_to(original_path)
|
|
244
|
+
|
|
245
|
+
# Create trampoline path
|
|
246
|
+
trampoline_file = trampoline_dir / rel_path
|
|
247
|
+
trampoline_file.parent.mkdir(parents=True, exist_ok=True)
|
|
248
|
+
|
|
249
|
+
# Generate trampoline content
|
|
250
|
+
# Use forward slashes for portability (GCC accepts both on Windows)
|
|
251
|
+
original_abs = header_file.resolve()
|
|
252
|
+
original_str = str(original_abs).replace("\\", "/")
|
|
253
|
+
|
|
254
|
+
trampoline_content = f'#pragma once\n#include "{original_str}"\n'
|
|
255
|
+
|
|
256
|
+
# Write trampoline file
|
|
257
|
+
with open(trampoline_file, "w", encoding="utf-8") as f:
|
|
258
|
+
f.write(trampoline_content)
|
|
259
|
+
|
|
260
|
+
except KeyboardInterrupt:
|
|
261
|
+
_thread.interrupt_main()
|
|
262
|
+
raise
|
|
263
|
+
except Exception as e:
|
|
264
|
+
if self.show_progress:
|
|
265
|
+
print(f"[trampolines] Warning: Failed to create trampoline for {header_file}: {e}")
|
|
266
|
+
continue
|
|
267
|
+
|
|
268
|
+
def _compute_include_hash(self, include_paths: List[Path]) -> str:
|
|
269
|
+
"""Compute hash of include path list for cache validation.
|
|
270
|
+
|
|
271
|
+
Args:
|
|
272
|
+
include_paths: Ordered list of include paths
|
|
273
|
+
|
|
274
|
+
Returns:
|
|
275
|
+
SHA256 hash of the include path list
|
|
276
|
+
"""
|
|
277
|
+
# Convert paths to strings and join with newlines
|
|
278
|
+
path_str = "\n".join(str(p.resolve()) for p in include_paths)
|
|
279
|
+
return hashlib.sha256(path_str.encode("utf-8")).hexdigest()
|
|
280
|
+
|
|
281
|
+
def _save_metadata(self, include_paths: List[Path], trampoline_paths: List[Path]) -> None:
|
|
282
|
+
"""Save cache metadata.
|
|
283
|
+
|
|
284
|
+
Args:
|
|
285
|
+
include_paths: Original include paths
|
|
286
|
+
trampoline_paths: Generated trampoline paths
|
|
287
|
+
"""
|
|
288
|
+
metadata = {
|
|
289
|
+
"version": "1.0",
|
|
290
|
+
"include_hash": self._compute_include_hash(include_paths),
|
|
291
|
+
"original_paths": [str(p.resolve()) for p in include_paths],
|
|
292
|
+
"trampoline_paths": [str(p) for p in trampoline_paths],
|
|
293
|
+
"platform": platform.system(),
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
with open(self.metadata_file, "w") as f:
|
|
297
|
+
json.dump(metadata, f, indent=2)
|
|
298
|
+
|
|
299
|
+
def _load_trampoline_paths(self, include_paths: List[Path]) -> List[Path]:
|
|
300
|
+
"""Load trampoline paths from metadata.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
include_paths: Original include paths (for validation)
|
|
304
|
+
|
|
305
|
+
Returns:
|
|
306
|
+
List of trampoline directory paths
|
|
307
|
+
"""
|
|
308
|
+
with open(self.metadata_file, "r") as f:
|
|
309
|
+
metadata = json.load(f)
|
|
310
|
+
|
|
311
|
+
return [Path(p) for p in metadata["trampoline_paths"]]
|
|
312
|
+
|
|
313
|
+
def _load_and_merge_trampoline_paths(
|
|
314
|
+
self,
|
|
315
|
+
include_paths: List[Path],
|
|
316
|
+
filtered_paths: List[Path],
|
|
317
|
+
excluded_indices: set,
|
|
318
|
+
) -> List[Path]:
|
|
319
|
+
"""Load trampoline paths and merge with excluded paths.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
include_paths: Original include paths (all)
|
|
323
|
+
filtered_paths: Filtered include paths (non-excluded)
|
|
324
|
+
excluded_indices: Set of indices that were excluded
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
List of paths with trampolines and original excluded paths
|
|
328
|
+
"""
|
|
329
|
+
trampoline_paths = self._load_trampoline_paths(filtered_paths)
|
|
330
|
+
return self._merge_paths(include_paths, filtered_paths, trampoline_paths, excluded_indices)
|
|
331
|
+
|
|
332
|
+
def _merge_paths(
|
|
333
|
+
self,
|
|
334
|
+
include_paths: List[Path],
|
|
335
|
+
filtered_paths: List[Path],
|
|
336
|
+
trampoline_paths: List[Path],
|
|
337
|
+
excluded_indices: set,
|
|
338
|
+
) -> List[Path]:
|
|
339
|
+
"""Merge trampoline paths with excluded paths in original positions.
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
include_paths: Original include paths (all)
|
|
343
|
+
filtered_paths: Filtered include paths (non-excluded)
|
|
344
|
+
trampoline_paths: Generated trampoline paths
|
|
345
|
+
excluded_indices: Set of indices that were excluded
|
|
346
|
+
|
|
347
|
+
Returns:
|
|
348
|
+
List of paths with trampolines for non-excluded and originals for excluded
|
|
349
|
+
"""
|
|
350
|
+
result = []
|
|
351
|
+
filtered_idx = 0
|
|
352
|
+
|
|
353
|
+
for idx, path in enumerate(include_paths):
|
|
354
|
+
if idx in excluded_indices:
|
|
355
|
+
# Use original path for excluded
|
|
356
|
+
result.append(path)
|
|
357
|
+
else:
|
|
358
|
+
# Use trampoline path
|
|
359
|
+
result.append(trampoline_paths[filtered_idx])
|
|
360
|
+
filtered_idx += 1
|
|
361
|
+
|
|
362
|
+
return result
|
|
363
|
+
|
|
364
|
+
def _clear_cache(self) -> None:
|
|
365
|
+
"""Clear existing trampoline cache."""
|
|
366
|
+
if self.cache_root.exists():
|
|
367
|
+
import shutil
|
|
368
|
+
|
|
369
|
+
shutil.rmtree(self.cache_root, ignore_errors=True)
|
|
370
|
+
|
|
371
|
+
def get_cache_info(self) -> Dict[str, Any]:
|
|
372
|
+
"""Get information about the trampoline cache.
|
|
373
|
+
|
|
374
|
+
Returns:
|
|
375
|
+
Dictionary with cache information
|
|
376
|
+
"""
|
|
377
|
+
info = {
|
|
378
|
+
"cache_root": str(self.cache_root),
|
|
379
|
+
"exists": self.cache_root.exists(),
|
|
380
|
+
"metadata_exists": self.metadata_file.exists(),
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
if self.metadata_file.exists():
|
|
384
|
+
try:
|
|
385
|
+
with open(self.metadata_file, "r") as f:
|
|
386
|
+
metadata = json.load(f)
|
|
387
|
+
info["metadata"] = metadata
|
|
388
|
+
except KeyboardInterrupt:
|
|
389
|
+
_thread.interrupt_main()
|
|
390
|
+
raise
|
|
391
|
+
except Exception:
|
|
392
|
+
pass
|
|
393
|
+
|
|
394
|
+
return info
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"""Library compilation utilities for fbuild.
|
|
2
|
+
|
|
3
|
+
This module handles compiling external libraries into static archives (.a files)
|
|
4
|
+
with Link-Time Optimization (LTO) support.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import subprocess
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import TYPE_CHECKING, Callable, List, Optional, Tuple
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from .library_manager import LibraryInfo
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class LibraryCompilationError(Exception):
|
|
16
|
+
"""Exception raised for library compilation errors."""
|
|
17
|
+
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class LibraryCompiler:
|
|
22
|
+
"""Handles compilation of external libraries into static archives."""
|
|
23
|
+
|
|
24
|
+
@staticmethod
|
|
25
|
+
def needs_rebuild(
|
|
26
|
+
archive_file: Path,
|
|
27
|
+
info_file: Path,
|
|
28
|
+
compiler_flags: List[str],
|
|
29
|
+
get_info_func: "Callable[[], Optional[LibraryInfo]]",
|
|
30
|
+
) -> Tuple[bool, str]:
|
|
31
|
+
"""Check if a library needs to be rebuilt.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
archive_file: Path to the .a archive file
|
|
35
|
+
info_file: Path to the info.json file
|
|
36
|
+
compiler_flags: Current compiler flags
|
|
37
|
+
get_info_func: Function to load library info from JSON
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Tuple of (needs_rebuild, reason)
|
|
41
|
+
"""
|
|
42
|
+
if not archive_file.exists():
|
|
43
|
+
return True, "Archive not found"
|
|
44
|
+
|
|
45
|
+
if not info_file.exists():
|
|
46
|
+
return True, "Info file missing"
|
|
47
|
+
|
|
48
|
+
info = get_info_func()
|
|
49
|
+
if info is None:
|
|
50
|
+
return True, "Could not load info"
|
|
51
|
+
|
|
52
|
+
# Check if compile commands changed
|
|
53
|
+
current_compile_cmd = " ".join(compiler_flags)
|
|
54
|
+
stored_compile_cmd = " ".join(info.compile_commands)
|
|
55
|
+
|
|
56
|
+
if current_compile_cmd != stored_compile_cmd:
|
|
57
|
+
return True, "Compiler flags changed"
|
|
58
|
+
|
|
59
|
+
return False, ""
|
|
60
|
+
|
|
61
|
+
@staticmethod
|
|
62
|
+
def compile_library(
|
|
63
|
+
library_name: str,
|
|
64
|
+
lib_dir: Path,
|
|
65
|
+
source_files: List[Path],
|
|
66
|
+
include_dirs: List[Path],
|
|
67
|
+
compiler_path: Path,
|
|
68
|
+
mcu: str,
|
|
69
|
+
f_cpu: str,
|
|
70
|
+
defines: List[str],
|
|
71
|
+
extra_flags: List[str],
|
|
72
|
+
show_progress: bool = True,
|
|
73
|
+
) -> Tuple[Path, List[Path], List[str]]:
|
|
74
|
+
"""Compile a library into a static archive (.a file).
|
|
75
|
+
|
|
76
|
+
This function compiles all source files in a library and creates a static
|
|
77
|
+
archive. It uses Link-Time Optimization (LTO) with -fno-fat-lto-objects
|
|
78
|
+
to generate only LTO bytecode, avoiding assembly errors with complex code.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
library_name: Name of the library
|
|
82
|
+
lib_dir: Root directory for the library
|
|
83
|
+
source_files: List of source files to compile
|
|
84
|
+
include_dirs: Include directories for compilation
|
|
85
|
+
compiler_path: Path to avr-gcc/avr-g++
|
|
86
|
+
mcu: MCU target (e.g., atmega328p)
|
|
87
|
+
f_cpu: CPU frequency (e.g., 16000000L)
|
|
88
|
+
defines: Preprocessor defines
|
|
89
|
+
extra_flags: Additional compiler flags
|
|
90
|
+
show_progress: Whether to show progress
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
Tuple of (archive_path, object_files, compile_commands)
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
LibraryCompilationError: If compilation fails
|
|
97
|
+
"""
|
|
98
|
+
try:
|
|
99
|
+
if show_progress:
|
|
100
|
+
print(f"Compiling library: {library_name}")
|
|
101
|
+
|
|
102
|
+
if not source_files:
|
|
103
|
+
raise LibraryCompilationError(f"No source files found in library '{library_name}'")
|
|
104
|
+
|
|
105
|
+
# Compile each source file
|
|
106
|
+
object_files = []
|
|
107
|
+
compile_commands = []
|
|
108
|
+
gcc_path = compiler_path.parent / "avr-gcc"
|
|
109
|
+
gxx_path = compiler_path.parent / "avr-g++"
|
|
110
|
+
|
|
111
|
+
for source in source_files:
|
|
112
|
+
# Determine compiler based on extension
|
|
113
|
+
if source.suffix in [".cpp", ".cc", ".cxx"]:
|
|
114
|
+
compiler = gxx_path
|
|
115
|
+
std_flag = "-std=gnu++11"
|
|
116
|
+
else:
|
|
117
|
+
compiler = gcc_path
|
|
118
|
+
std_flag = "-std=gnu11"
|
|
119
|
+
|
|
120
|
+
# Output object file
|
|
121
|
+
obj_file = lib_dir / f"{source.stem}.o"
|
|
122
|
+
|
|
123
|
+
# Build compile command
|
|
124
|
+
# Use -flto with -fno-fat-lto-objects to generate only LTO bytecode
|
|
125
|
+
# This avoids assembly errors with complex code like FastLED
|
|
126
|
+
# The trade-off is we must link with object files, not archives
|
|
127
|
+
cmd = [
|
|
128
|
+
str(compiler),
|
|
129
|
+
"-c",
|
|
130
|
+
"-g",
|
|
131
|
+
"-Os",
|
|
132
|
+
std_flag,
|
|
133
|
+
"-ffunction-sections",
|
|
134
|
+
"-fdata-sections",
|
|
135
|
+
"-flto",
|
|
136
|
+
"-fno-fat-lto-objects", # LTO bytecode only, no assembly
|
|
137
|
+
f"-mmcu={mcu}",
|
|
138
|
+
]
|
|
139
|
+
|
|
140
|
+
# Add defines from list (format: "KEY=value" or "KEY")
|
|
141
|
+
for define in defines:
|
|
142
|
+
cmd.append(f"-D{define}")
|
|
143
|
+
|
|
144
|
+
# Add include paths
|
|
145
|
+
for inc_path in include_dirs:
|
|
146
|
+
cmd.append(f"-I{inc_path}")
|
|
147
|
+
|
|
148
|
+
# Add extra flags
|
|
149
|
+
cmd.extend(extra_flags)
|
|
150
|
+
|
|
151
|
+
# Add source and output
|
|
152
|
+
cmd.extend(["-o", str(obj_file), str(source)])
|
|
153
|
+
|
|
154
|
+
# Store command for rebuild detection
|
|
155
|
+
compile_commands.append(" ".join(cmd))
|
|
156
|
+
|
|
157
|
+
# Compile
|
|
158
|
+
if show_progress:
|
|
159
|
+
print(f" Compiling {source.name}...")
|
|
160
|
+
|
|
161
|
+
result = subprocess.run(cmd, capture_output=True, text=True, encoding="utf-8")
|
|
162
|
+
|
|
163
|
+
if result.returncode != 0:
|
|
164
|
+
raise LibraryCompilationError(f"Failed to compile {source}:\n{result.stderr}")
|
|
165
|
+
|
|
166
|
+
object_files.append(obj_file)
|
|
167
|
+
|
|
168
|
+
# Create static archive using avr-ar
|
|
169
|
+
ar_path = compiler_path.parent / "avr-ar"
|
|
170
|
+
archive_file = lib_dir / f"lib{library_name}.a"
|
|
171
|
+
|
|
172
|
+
if show_progress:
|
|
173
|
+
print(f" Creating archive: {archive_file.name}")
|
|
174
|
+
|
|
175
|
+
# Remove old archive if exists
|
|
176
|
+
if archive_file.exists():
|
|
177
|
+
archive_file.unlink()
|
|
178
|
+
|
|
179
|
+
# Create new archive
|
|
180
|
+
cmd = [str(ar_path), "rcs", str(archive_file)] + [str(obj) for obj in object_files]
|
|
181
|
+
|
|
182
|
+
result = subprocess.run(cmd, capture_output=True, text=True, encoding="utf-8")
|
|
183
|
+
|
|
184
|
+
if result.returncode != 0:
|
|
185
|
+
raise LibraryCompilationError(f"Failed to create archive for {library_name}:\n{result.stderr}")
|
|
186
|
+
|
|
187
|
+
# Keep object files for LTO linking (don't delete them)
|
|
188
|
+
# Object files are needed for proper LTO symbol resolution
|
|
189
|
+
|
|
190
|
+
if show_progress:
|
|
191
|
+
print(f"Library '{library_name}' compiled successfully")
|
|
192
|
+
|
|
193
|
+
return archive_file, object_files, compile_commands
|
|
194
|
+
|
|
195
|
+
except subprocess.CalledProcessError as e:
|
|
196
|
+
raise LibraryCompilationError(f"Compilation failed for library '{library_name}': {e}") from e
|
|
197
|
+
except KeyboardInterrupt as ke:
|
|
198
|
+
from fbuild.interrupt_utils import handle_keyboard_interrupt_properly
|
|
199
|
+
|
|
200
|
+
handle_keyboard_interrupt_properly(ke)
|
|
201
|
+
raise # Never reached, but satisfies type checker
|
|
202
|
+
except Exception as e:
|
|
203
|
+
raise LibraryCompilationError(f"Failed to compile library '{library_name}': {e}") from e
|