fbuild 1.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fbuild/__init__.py +390 -0
- fbuild/assets/example.txt +1 -0
- fbuild/build/__init__.py +117 -0
- fbuild/build/archive_creator.py +186 -0
- fbuild/build/binary_generator.py +444 -0
- fbuild/build/build_component_factory.py +131 -0
- fbuild/build/build_info_generator.py +624 -0
- fbuild/build/build_state.py +325 -0
- fbuild/build/build_utils.py +93 -0
- fbuild/build/compilation_executor.py +422 -0
- fbuild/build/compiler.py +165 -0
- fbuild/build/compiler_avr.py +574 -0
- fbuild/build/configurable_compiler.py +664 -0
- fbuild/build/configurable_linker.py +637 -0
- fbuild/build/flag_builder.py +214 -0
- fbuild/build/library_dependency_processor.py +185 -0
- fbuild/build/linker.py +708 -0
- fbuild/build/orchestrator.py +67 -0
- fbuild/build/orchestrator_avr.py +651 -0
- fbuild/build/orchestrator_esp32.py +878 -0
- fbuild/build/orchestrator_rp2040.py +719 -0
- fbuild/build/orchestrator_stm32.py +696 -0
- fbuild/build/orchestrator_teensy.py +580 -0
- fbuild/build/source_compilation_orchestrator.py +218 -0
- fbuild/build/source_scanner.py +516 -0
- fbuild/cli.py +717 -0
- fbuild/cli_utils.py +314 -0
- fbuild/config/__init__.py +16 -0
- fbuild/config/board_config.py +542 -0
- fbuild/config/board_loader.py +92 -0
- fbuild/config/ini_parser.py +369 -0
- fbuild/config/mcu_specs.py +88 -0
- fbuild/daemon/__init__.py +42 -0
- fbuild/daemon/async_client.py +531 -0
- fbuild/daemon/client.py +1505 -0
- fbuild/daemon/compilation_queue.py +293 -0
- fbuild/daemon/configuration_lock.py +865 -0
- fbuild/daemon/daemon.py +585 -0
- fbuild/daemon/daemon_context.py +293 -0
- fbuild/daemon/error_collector.py +263 -0
- fbuild/daemon/file_cache.py +332 -0
- fbuild/daemon/firmware_ledger.py +546 -0
- fbuild/daemon/lock_manager.py +508 -0
- fbuild/daemon/logging_utils.py +149 -0
- fbuild/daemon/messages.py +957 -0
- fbuild/daemon/operation_registry.py +288 -0
- fbuild/daemon/port_state_manager.py +249 -0
- fbuild/daemon/process_tracker.py +366 -0
- fbuild/daemon/processors/__init__.py +18 -0
- fbuild/daemon/processors/build_processor.py +248 -0
- fbuild/daemon/processors/deploy_processor.py +664 -0
- fbuild/daemon/processors/install_deps_processor.py +431 -0
- fbuild/daemon/processors/locking_processor.py +777 -0
- fbuild/daemon/processors/monitor_processor.py +285 -0
- fbuild/daemon/request_processor.py +457 -0
- fbuild/daemon/shared_serial.py +819 -0
- fbuild/daemon/status_manager.py +238 -0
- fbuild/daemon/subprocess_manager.py +316 -0
- fbuild/deploy/__init__.py +21 -0
- fbuild/deploy/deployer.py +67 -0
- fbuild/deploy/deployer_esp32.py +310 -0
- fbuild/deploy/docker_utils.py +315 -0
- fbuild/deploy/monitor.py +519 -0
- fbuild/deploy/qemu_runner.py +603 -0
- fbuild/interrupt_utils.py +34 -0
- fbuild/ledger/__init__.py +52 -0
- fbuild/ledger/board_ledger.py +560 -0
- fbuild/output.py +352 -0
- fbuild/packages/__init__.py +66 -0
- fbuild/packages/archive_utils.py +1098 -0
- fbuild/packages/arduino_core.py +412 -0
- fbuild/packages/cache.py +256 -0
- fbuild/packages/concurrent_manager.py +510 -0
- fbuild/packages/downloader.py +518 -0
- fbuild/packages/fingerprint.py +423 -0
- fbuild/packages/framework_esp32.py +538 -0
- fbuild/packages/framework_rp2040.py +349 -0
- fbuild/packages/framework_stm32.py +459 -0
- fbuild/packages/framework_teensy.py +346 -0
- fbuild/packages/github_utils.py +96 -0
- fbuild/packages/header_trampoline_cache.py +394 -0
- fbuild/packages/library_compiler.py +203 -0
- fbuild/packages/library_manager.py +549 -0
- fbuild/packages/library_manager_esp32.py +725 -0
- fbuild/packages/package.py +163 -0
- fbuild/packages/platform_esp32.py +383 -0
- fbuild/packages/platform_rp2040.py +400 -0
- fbuild/packages/platform_stm32.py +581 -0
- fbuild/packages/platform_teensy.py +312 -0
- fbuild/packages/platform_utils.py +131 -0
- fbuild/packages/platformio_registry.py +369 -0
- fbuild/packages/sdk_utils.py +231 -0
- fbuild/packages/toolchain.py +436 -0
- fbuild/packages/toolchain_binaries.py +196 -0
- fbuild/packages/toolchain_esp32.py +489 -0
- fbuild/packages/toolchain_metadata.py +185 -0
- fbuild/packages/toolchain_rp2040.py +436 -0
- fbuild/packages/toolchain_stm32.py +417 -0
- fbuild/packages/toolchain_teensy.py +404 -0
- fbuild/platform_configs/esp32.json +150 -0
- fbuild/platform_configs/esp32c2.json +144 -0
- fbuild/platform_configs/esp32c3.json +143 -0
- fbuild/platform_configs/esp32c5.json +151 -0
- fbuild/platform_configs/esp32c6.json +151 -0
- fbuild/platform_configs/esp32p4.json +149 -0
- fbuild/platform_configs/esp32s3.json +151 -0
- fbuild/platform_configs/imxrt1062.json +56 -0
- fbuild/platform_configs/rp2040.json +70 -0
- fbuild/platform_configs/rp2350.json +76 -0
- fbuild/platform_configs/stm32f1.json +59 -0
- fbuild/platform_configs/stm32f4.json +63 -0
- fbuild/py.typed +0 -0
- fbuild-1.2.8.dist-info/METADATA +468 -0
- fbuild-1.2.8.dist-info/RECORD +121 -0
- fbuild-1.2.8.dist-info/WHEEL +5 -0
- fbuild-1.2.8.dist-info/entry_points.txt +5 -0
- fbuild-1.2.8.dist-info/licenses/LICENSE +21 -0
- fbuild-1.2.8.dist-info/top_level.txt +2 -0
- fbuild_lint/__init__.py +0 -0
- fbuild_lint/ruff_plugins/__init__.py +0 -0
- fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
|
@@ -0,0 +1,664 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Deploy Request Processor - Handles build + deploy operations.
|
|
3
|
+
|
|
4
|
+
This module implements the DeployRequestProcessor which executes build and
|
|
5
|
+
deployment operations for Arduino/ESP32 projects. It coordinates building
|
|
6
|
+
the firmware and then uploading it to the target device.
|
|
7
|
+
|
|
8
|
+
Enhanced in Iteration 2 with:
|
|
9
|
+
- FirmwareLedger integration to skip re-upload if firmware is unchanged
|
|
10
|
+
- Source and build flags hash tracking
|
|
11
|
+
- ConfigurationLockManager for centralized locking
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import logging
|
|
15
|
+
import sys
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import TYPE_CHECKING
|
|
18
|
+
|
|
19
|
+
from fbuild.daemon.firmware_ledger import (
|
|
20
|
+
compute_build_flags_hash,
|
|
21
|
+
compute_firmware_hash,
|
|
22
|
+
compute_source_hash,
|
|
23
|
+
)
|
|
24
|
+
from fbuild.daemon.messages import DaemonState, MonitorRequest, OperationType
|
|
25
|
+
from fbuild.daemon.port_state_manager import PortState
|
|
26
|
+
from fbuild.daemon.request_processor import RequestProcessor
|
|
27
|
+
|
|
28
|
+
if TYPE_CHECKING:
|
|
29
|
+
from fbuild.daemon.daemon_context import DaemonContext
|
|
30
|
+
from fbuild.daemon.messages import DeployRequest
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class DeployRequestProcessor(RequestProcessor):
|
|
34
|
+
"""Processor for deploy requests.
|
|
35
|
+
|
|
36
|
+
This processor handles building and deploying Arduino/ESP32 projects. It:
|
|
37
|
+
1. Reloads build modules to pick up code changes (for development)
|
|
38
|
+
2. Builds the firmware using the appropriate orchestrator
|
|
39
|
+
3. Deploys the firmware to the target device
|
|
40
|
+
4. Optionally starts monitoring after successful deployment
|
|
41
|
+
|
|
42
|
+
The processor coordinates two major phases (build + deploy) and handles
|
|
43
|
+
the complexity of transitioning to monitoring if requested.
|
|
44
|
+
|
|
45
|
+
Example:
|
|
46
|
+
>>> processor = DeployRequestProcessor()
|
|
47
|
+
>>> success = processor.process_request(deploy_request, daemon_context)
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def get_operation_type(self) -> OperationType:
|
|
51
|
+
"""Return DEPLOY operation type."""
|
|
52
|
+
return OperationType.DEPLOY
|
|
53
|
+
|
|
54
|
+
def get_required_locks(self, request: "DeployRequest", context: "DaemonContext") -> dict[str, str]:
|
|
55
|
+
"""Deploy operations require both project and port locks.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
request: The deploy request
|
|
59
|
+
context: The daemon context
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Dictionary with project and port lock requirements
|
|
63
|
+
"""
|
|
64
|
+
locks = {"project": request.project_dir}
|
|
65
|
+
if request.port:
|
|
66
|
+
locks["port"] = request.port
|
|
67
|
+
return locks
|
|
68
|
+
|
|
69
|
+
def get_starting_state(self) -> DaemonState:
|
|
70
|
+
"""Deploy starts in DEPLOYING state."""
|
|
71
|
+
return DaemonState.DEPLOYING
|
|
72
|
+
|
|
73
|
+
def get_starting_message(self, request: "DeployRequest") -> str:
|
|
74
|
+
"""Get the starting status message."""
|
|
75
|
+
return f"Deploying {request.environment}"
|
|
76
|
+
|
|
77
|
+
def get_success_message(self, request: "DeployRequest") -> str:
|
|
78
|
+
"""Get the success status message."""
|
|
79
|
+
return "Deploy successful"
|
|
80
|
+
|
|
81
|
+
def get_failure_message(self, request: "DeployRequest") -> str:
|
|
82
|
+
"""Get the failure status message."""
|
|
83
|
+
return "Deploy failed"
|
|
84
|
+
|
|
85
|
+
def execute_operation(self, request: "DeployRequest", context: "DaemonContext") -> bool:
|
|
86
|
+
"""Execute the build + deploy operation.
|
|
87
|
+
|
|
88
|
+
This is the core deploy logic extracted from the original
|
|
89
|
+
process_deploy_request function. All boilerplate (locks, status
|
|
90
|
+
updates, error handling) is handled by the base RequestProcessor.
|
|
91
|
+
|
|
92
|
+
The operation has three phases:
|
|
93
|
+
1. Check: See if firmware is already deployed (skip redeploy if unchanged)
|
|
94
|
+
2. Build: Compile the firmware
|
|
95
|
+
3. Deploy: Upload the firmware to device
|
|
96
|
+
|
|
97
|
+
If monitor_after is requested, the processor will coordinate
|
|
98
|
+
transitioning to monitoring after successful deployment.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
request: The deploy request containing project_dir, environment, etc.
|
|
102
|
+
context: The daemon context with all subsystems
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
True if deploy succeeded, False otherwise
|
|
106
|
+
"""
|
|
107
|
+
# Phase 0: Check if we can skip deployment using firmware ledger
|
|
108
|
+
skip_deploy, source_hash, build_flags_hash = self._check_firmware_ledger(request, context)
|
|
109
|
+
|
|
110
|
+
if skip_deploy and request.port:
|
|
111
|
+
logging.info(f"Firmware unchanged, skipping build and deploy for {request.port}")
|
|
112
|
+
# Update status to indicate skip
|
|
113
|
+
self._update_status(
|
|
114
|
+
context,
|
|
115
|
+
DaemonState.COMPLETED,
|
|
116
|
+
"Firmware unchanged, skipping deploy",
|
|
117
|
+
request=request,
|
|
118
|
+
operation_in_progress=False,
|
|
119
|
+
)
|
|
120
|
+
# If monitoring requested, still start it (firmware is already there)
|
|
121
|
+
if request.monitor_after:
|
|
122
|
+
self._start_monitoring(request, request.port, context)
|
|
123
|
+
return True
|
|
124
|
+
|
|
125
|
+
# Phase 1: Build firmware
|
|
126
|
+
logging.info(f"Building project: {request.project_dir}")
|
|
127
|
+
build_result = self._build_firmware(request, context)
|
|
128
|
+
if not build_result:
|
|
129
|
+
return False
|
|
130
|
+
|
|
131
|
+
# Phase 2: Deploy firmware
|
|
132
|
+
logging.info(f"Deploying to {request.port if request.port else 'auto-detected port'}")
|
|
133
|
+
used_port = self._deploy_firmware(request, context)
|
|
134
|
+
if not used_port:
|
|
135
|
+
return False
|
|
136
|
+
|
|
137
|
+
# Phase 2.5: Record deployment in firmware ledger
|
|
138
|
+
self._record_deployment(request, used_port, source_hash, build_flags_hash, context)
|
|
139
|
+
|
|
140
|
+
# Phase 3: Optional monitoring or release port state
|
|
141
|
+
if request.monitor_after and used_port:
|
|
142
|
+
# _start_monitoring handles port state release when monitoring completes
|
|
143
|
+
self._start_monitoring(request, used_port, context)
|
|
144
|
+
else:
|
|
145
|
+
# No monitoring requested - release port state now
|
|
146
|
+
if used_port:
|
|
147
|
+
context.port_state_manager.release_port(used_port)
|
|
148
|
+
|
|
149
|
+
logging.info("Deploy completed successfully")
|
|
150
|
+
return True
|
|
151
|
+
|
|
152
|
+
def _check_firmware_ledger(self, request: "DeployRequest", context: "DaemonContext") -> tuple[bool, str, str]:
|
|
153
|
+
"""Check if firmware is already deployed and unchanged.
|
|
154
|
+
|
|
155
|
+
Uses the firmware ledger to determine if we can skip the build and deploy.
|
|
156
|
+
This is a major optimization when the same firmware is deployed multiple
|
|
157
|
+
times without changes.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
request: The deploy request
|
|
161
|
+
context: The daemon context
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
Tuple of (can_skip, source_hash, build_flags_hash)
|
|
165
|
+
"""
|
|
166
|
+
if not request.port:
|
|
167
|
+
# Can't check without a known port
|
|
168
|
+
return False, "", ""
|
|
169
|
+
|
|
170
|
+
try:
|
|
171
|
+
project_path = Path(request.project_dir)
|
|
172
|
+
|
|
173
|
+
# Get source files to hash
|
|
174
|
+
source_files = self._get_source_files(project_path)
|
|
175
|
+
if not source_files:
|
|
176
|
+
logging.debug("No source files found for hashing")
|
|
177
|
+
return False, "", ""
|
|
178
|
+
|
|
179
|
+
# Compute hashes
|
|
180
|
+
source_hash = compute_source_hash(source_files)
|
|
181
|
+
build_flags_hash = compute_build_flags_hash(self._get_build_flags(project_path, request.environment))
|
|
182
|
+
|
|
183
|
+
# Check if redeploy is needed
|
|
184
|
+
needs_redeploy = context.firmware_ledger.needs_redeploy(
|
|
185
|
+
port=request.port,
|
|
186
|
+
source_hash=source_hash,
|
|
187
|
+
build_flags_hash=build_flags_hash,
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
if not needs_redeploy:
|
|
191
|
+
logging.info(f"Firmware ledger indicates no changes for {request.port}")
|
|
192
|
+
return True, source_hash, build_flags_hash
|
|
193
|
+
|
|
194
|
+
logging.debug("Source or build flags changed, redeploy needed")
|
|
195
|
+
return False, source_hash, build_flags_hash
|
|
196
|
+
|
|
197
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
198
|
+
raise
|
|
199
|
+
except Exception as e:
|
|
200
|
+
logging.warning(f"Error checking firmware ledger: {e}")
|
|
201
|
+
return False, "", ""
|
|
202
|
+
|
|
203
|
+
def _get_source_files(self, project_path: Path) -> list[Path]:
|
|
204
|
+
"""Get list of source files in the project.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
project_path: Path to the project directory
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
List of source file paths
|
|
211
|
+
"""
|
|
212
|
+
source_extensions = {".c", ".cpp", ".h", ".hpp", ".ino", ".S"}
|
|
213
|
+
source_files = []
|
|
214
|
+
|
|
215
|
+
# Check standard source directories
|
|
216
|
+
src_dirs = [
|
|
217
|
+
project_path / "src",
|
|
218
|
+
project_path / "include",
|
|
219
|
+
project_path / "lib",
|
|
220
|
+
]
|
|
221
|
+
|
|
222
|
+
# Also check for .ino files in project root
|
|
223
|
+
for f in project_path.glob("*.ino"):
|
|
224
|
+
source_files.append(f)
|
|
225
|
+
|
|
226
|
+
for src_dir in src_dirs:
|
|
227
|
+
if src_dir.exists():
|
|
228
|
+
for ext in source_extensions:
|
|
229
|
+
source_files.extend(src_dir.rglob(f"*{ext}"))
|
|
230
|
+
|
|
231
|
+
return source_files
|
|
232
|
+
|
|
233
|
+
def _get_build_flags(self, project_path: Path, environment: str) -> list[str]:
|
|
234
|
+
"""Get build flags from platformio.ini.
|
|
235
|
+
|
|
236
|
+
Args:
|
|
237
|
+
project_path: Path to the project directory
|
|
238
|
+
environment: Build environment name
|
|
239
|
+
|
|
240
|
+
Returns:
|
|
241
|
+
List of build flags
|
|
242
|
+
"""
|
|
243
|
+
try:
|
|
244
|
+
from fbuild.config.ini_parser import PlatformIOConfig
|
|
245
|
+
|
|
246
|
+
ini_path = project_path / "platformio.ini"
|
|
247
|
+
if not ini_path.exists():
|
|
248
|
+
return []
|
|
249
|
+
|
|
250
|
+
config = PlatformIOConfig(ini_path)
|
|
251
|
+
env_config = config.get_env_config(environment)
|
|
252
|
+
build_flags = env_config.get("build_flags", "")
|
|
253
|
+
|
|
254
|
+
if isinstance(build_flags, str):
|
|
255
|
+
return build_flags.split() if build_flags else []
|
|
256
|
+
return list(build_flags) if build_flags else []
|
|
257
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
258
|
+
raise
|
|
259
|
+
except Exception as e:
|
|
260
|
+
logging.warning(f"Error reading build flags: {e}")
|
|
261
|
+
return []
|
|
262
|
+
|
|
263
|
+
def _record_deployment(
|
|
264
|
+
self,
|
|
265
|
+
request: "DeployRequest",
|
|
266
|
+
port: str,
|
|
267
|
+
source_hash: str,
|
|
268
|
+
build_flags_hash: str,
|
|
269
|
+
context: "DaemonContext",
|
|
270
|
+
) -> None:
|
|
271
|
+
"""Record successful deployment in firmware ledger.
|
|
272
|
+
|
|
273
|
+
Args:
|
|
274
|
+
request: The deploy request
|
|
275
|
+
port: Port the firmware was deployed to
|
|
276
|
+
source_hash: Hash of source files
|
|
277
|
+
build_flags_hash: Hash of build flags
|
|
278
|
+
context: The daemon context
|
|
279
|
+
"""
|
|
280
|
+
try:
|
|
281
|
+
project_path = Path(request.project_dir)
|
|
282
|
+
|
|
283
|
+
# Find the firmware file
|
|
284
|
+
firmware_path = self._find_firmware_path(project_path, request.environment)
|
|
285
|
+
if not firmware_path:
|
|
286
|
+
logging.warning("Could not find firmware file for ledger recording")
|
|
287
|
+
return
|
|
288
|
+
|
|
289
|
+
# Compute firmware hash
|
|
290
|
+
firmware_hash = compute_firmware_hash(firmware_path)
|
|
291
|
+
|
|
292
|
+
# Record in ledger
|
|
293
|
+
context.firmware_ledger.record_deployment(
|
|
294
|
+
port=port,
|
|
295
|
+
firmware_hash=firmware_hash,
|
|
296
|
+
source_hash=source_hash,
|
|
297
|
+
project_dir=str(project_path),
|
|
298
|
+
environment=request.environment,
|
|
299
|
+
build_flags_hash=build_flags_hash,
|
|
300
|
+
)
|
|
301
|
+
logging.info(f"Recorded deployment in firmware ledger for {port}")
|
|
302
|
+
|
|
303
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
304
|
+
raise
|
|
305
|
+
except Exception as e:
|
|
306
|
+
logging.warning(f"Error recording deployment in ledger: {e}")
|
|
307
|
+
|
|
308
|
+
def _find_firmware_path(self, project_path: Path, environment: str) -> Path | None:
|
|
309
|
+
"""Find the firmware file for the given environment.
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
project_path: Path to the project directory
|
|
313
|
+
environment: Build environment name
|
|
314
|
+
|
|
315
|
+
Returns:
|
|
316
|
+
Path to firmware file, or None if not found
|
|
317
|
+
"""
|
|
318
|
+
# Check common firmware locations
|
|
319
|
+
build_dir = project_path / ".pio" / "build" / environment
|
|
320
|
+
if not build_dir.exists():
|
|
321
|
+
build_dir = project_path / ".fbuild" / "build" / environment
|
|
322
|
+
|
|
323
|
+
if not build_dir.exists():
|
|
324
|
+
return None
|
|
325
|
+
|
|
326
|
+
# Look for firmware files (prefer .bin, then .hex, then .elf)
|
|
327
|
+
for ext in [".bin", ".hex", ".elf"]:
|
|
328
|
+
for firmware_file in build_dir.glob(f"*{ext}"):
|
|
329
|
+
return firmware_file
|
|
330
|
+
|
|
331
|
+
return None
|
|
332
|
+
|
|
333
|
+
def _build_firmware(self, request: "DeployRequest", context: "DaemonContext") -> bool:
|
|
334
|
+
"""Build the firmware.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
request: The deploy request
|
|
338
|
+
context: The daemon context
|
|
339
|
+
|
|
340
|
+
Returns:
|
|
341
|
+
True if build succeeded, False otherwise
|
|
342
|
+
"""
|
|
343
|
+
# Update status to building
|
|
344
|
+
self._update_status(
|
|
345
|
+
context,
|
|
346
|
+
DaemonState.BUILDING,
|
|
347
|
+
f"Building {request.environment}",
|
|
348
|
+
request=request,
|
|
349
|
+
operation_type=OperationType.BUILD_AND_DEPLOY,
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
# Reload build modules to pick up code changes
|
|
353
|
+
self._reload_build_modules()
|
|
354
|
+
|
|
355
|
+
# Detect platform type from platformio.ini to select appropriate orchestrator
|
|
356
|
+
try:
|
|
357
|
+
from fbuild.config.ini_parser import PlatformIOConfig
|
|
358
|
+
|
|
359
|
+
project_path = Path(request.project_dir)
|
|
360
|
+
ini_path = project_path / "platformio.ini"
|
|
361
|
+
|
|
362
|
+
if not ini_path.exists():
|
|
363
|
+
logging.error(f"platformio.ini not found at {ini_path}")
|
|
364
|
+
return False
|
|
365
|
+
|
|
366
|
+
config = PlatformIOConfig(ini_path)
|
|
367
|
+
env_config = config.get_env_config(request.environment)
|
|
368
|
+
platform = env_config.get("platform", "").lower()
|
|
369
|
+
|
|
370
|
+
logging.info(f"Detected platform: {platform}")
|
|
371
|
+
|
|
372
|
+
except KeyboardInterrupt as ke:
|
|
373
|
+
from fbuild.interrupt_utils import handle_keyboard_interrupt_properly
|
|
374
|
+
|
|
375
|
+
handle_keyboard_interrupt_properly(ke)
|
|
376
|
+
raise # Never reached, but satisfies type checker
|
|
377
|
+
except Exception as e:
|
|
378
|
+
logging.error(f"Failed to parse platformio.ini: {e}")
|
|
379
|
+
return False
|
|
380
|
+
|
|
381
|
+
# Normalize platform name (handle both direct names and URLs)
|
|
382
|
+
# URLs like "https://.../platform-espressif32.zip" -> "espressif32"
|
|
383
|
+
# URLs like "https://.../platform-atmelavr.zip" -> "atmelavr"
|
|
384
|
+
# "raspberrypi" or "platform-raspberrypi" -> "raspberrypi"
|
|
385
|
+
platform_name = platform
|
|
386
|
+
if "platform-espressif32" in platform:
|
|
387
|
+
platform_name = "espressif32"
|
|
388
|
+
elif "platform-atmelavr" in platform or platform == "atmelavr":
|
|
389
|
+
platform_name = "atmelavr"
|
|
390
|
+
elif "platform-raspberrypi" in platform or platform == "raspberrypi":
|
|
391
|
+
platform_name = "raspberrypi"
|
|
392
|
+
|
|
393
|
+
logging.info(f"Normalized platform: {platform_name}")
|
|
394
|
+
|
|
395
|
+
# Select orchestrator based on platform
|
|
396
|
+
if platform_name == "atmelavr":
|
|
397
|
+
module_name = "fbuild.build.orchestrator_avr"
|
|
398
|
+
class_name = "BuildOrchestratorAVR"
|
|
399
|
+
elif platform_name == "espressif32":
|
|
400
|
+
module_name = "fbuild.build.orchestrator_esp32"
|
|
401
|
+
class_name = "OrchestratorESP32"
|
|
402
|
+
elif platform_name == "raspberrypi":
|
|
403
|
+
module_name = "fbuild.build.orchestrator_rp2040"
|
|
404
|
+
class_name = "OrchestratorRP2040"
|
|
405
|
+
else:
|
|
406
|
+
logging.error(f"Unsupported platform: {platform_name}")
|
|
407
|
+
return False
|
|
408
|
+
|
|
409
|
+
# Get fresh orchestrator class after module reload
|
|
410
|
+
try:
|
|
411
|
+
orchestrator_class = getattr(sys.modules[module_name], class_name)
|
|
412
|
+
except (KeyError, AttributeError) as e:
|
|
413
|
+
logging.error(f"Failed to get {class_name} from {module_name}: {e}")
|
|
414
|
+
return False
|
|
415
|
+
|
|
416
|
+
# Create orchestrator and execute build
|
|
417
|
+
# Create a Cache instance for package management
|
|
418
|
+
from fbuild.packages.cache import Cache
|
|
419
|
+
|
|
420
|
+
cache = Cache(project_dir=Path(request.project_dir))
|
|
421
|
+
|
|
422
|
+
# Initialize orchestrator with cache (ESP32 requires it, AVR accepts it)
|
|
423
|
+
orchestrator = orchestrator_class(cache=cache, verbose=False)
|
|
424
|
+
build_result = orchestrator.build(
|
|
425
|
+
project_dir=Path(request.project_dir),
|
|
426
|
+
env_name=request.environment,
|
|
427
|
+
clean=request.clean_build,
|
|
428
|
+
verbose=False,
|
|
429
|
+
)
|
|
430
|
+
|
|
431
|
+
if not build_result.success:
|
|
432
|
+
logging.error(f"Build failed: {build_result.message}")
|
|
433
|
+
self._update_status(
|
|
434
|
+
context,
|
|
435
|
+
DaemonState.FAILED,
|
|
436
|
+
f"Build failed: {build_result.message}",
|
|
437
|
+
request=request,
|
|
438
|
+
exit_code=1,
|
|
439
|
+
operation_in_progress=False,
|
|
440
|
+
)
|
|
441
|
+
return False
|
|
442
|
+
|
|
443
|
+
logging.info("Build completed successfully")
|
|
444
|
+
return True
|
|
445
|
+
|
|
446
|
+
def _deploy_firmware(self, request: "DeployRequest", context: "DaemonContext") -> str | None:
|
|
447
|
+
"""Deploy the firmware to the device.
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
request: The deploy request
|
|
451
|
+
context: The daemon context
|
|
452
|
+
|
|
453
|
+
Returns:
|
|
454
|
+
The port that was used for deployment, or None if deployment failed
|
|
455
|
+
"""
|
|
456
|
+
# Update status to deploying
|
|
457
|
+
self._update_status(
|
|
458
|
+
context,
|
|
459
|
+
DaemonState.DEPLOYING,
|
|
460
|
+
f"Deploying {request.environment}",
|
|
461
|
+
request=request,
|
|
462
|
+
operation_type=OperationType.DEPLOY,
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
# Get fresh deployer class after module reload
|
|
466
|
+
try:
|
|
467
|
+
deployer_class = getattr(sys.modules["fbuild.deploy.deployer_esp32"], "ESP32Deployer")
|
|
468
|
+
except (KeyError, AttributeError) as e:
|
|
469
|
+
logging.error(f"Failed to get ESP32Deployer class: {e}")
|
|
470
|
+
return None
|
|
471
|
+
|
|
472
|
+
# Track port state as UPLOADING before deployment starts
|
|
473
|
+
used_port = request.port
|
|
474
|
+
if used_port:
|
|
475
|
+
context.port_state_manager.acquire_port(
|
|
476
|
+
port=used_port,
|
|
477
|
+
state=PortState.UPLOADING,
|
|
478
|
+
client_pid=request.caller_pid,
|
|
479
|
+
project_dir=request.project_dir,
|
|
480
|
+
environment=request.environment,
|
|
481
|
+
operation_id=request.request_id,
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
try:
|
|
485
|
+
# Execute deploy
|
|
486
|
+
deployer = deployer_class(verbose=False)
|
|
487
|
+
deploy_result = deployer.deploy(
|
|
488
|
+
project_dir=Path(request.project_dir),
|
|
489
|
+
env_name=request.environment,
|
|
490
|
+
port=request.port,
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
if not deploy_result.success:
|
|
494
|
+
logging.error(f"Deploy failed: {deploy_result.message}")
|
|
495
|
+
self._update_status(
|
|
496
|
+
context,
|
|
497
|
+
DaemonState.FAILED,
|
|
498
|
+
f"Deploy failed: {deploy_result.message}",
|
|
499
|
+
request=request,
|
|
500
|
+
exit_code=1,
|
|
501
|
+
operation_in_progress=False,
|
|
502
|
+
)
|
|
503
|
+
# Release port state on failure
|
|
504
|
+
if used_port:
|
|
505
|
+
context.port_state_manager.release_port(used_port)
|
|
506
|
+
return None
|
|
507
|
+
|
|
508
|
+
# Update used_port with actual port if auto-detected
|
|
509
|
+
actual_port = deploy_result.port if deploy_result.port else request.port
|
|
510
|
+
|
|
511
|
+
# If port changed (auto-detected), update port state tracking
|
|
512
|
+
if actual_port and actual_port != used_port:
|
|
513
|
+
# Release old port state if we tracked one
|
|
514
|
+
if used_port:
|
|
515
|
+
context.port_state_manager.release_port(used_port)
|
|
516
|
+
# Track the actual port used
|
|
517
|
+
context.port_state_manager.acquire_port(
|
|
518
|
+
port=actual_port,
|
|
519
|
+
state=PortState.UPLOADING,
|
|
520
|
+
client_pid=request.caller_pid,
|
|
521
|
+
project_dir=request.project_dir,
|
|
522
|
+
environment=request.environment,
|
|
523
|
+
operation_id=request.request_id,
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
# Return the port that was actually used
|
|
527
|
+
return actual_port
|
|
528
|
+
|
|
529
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
530
|
+
logging.warning("Deploy interrupted by user")
|
|
531
|
+
# Release port state on interruption
|
|
532
|
+
if used_port:
|
|
533
|
+
context.port_state_manager.release_port(used_port)
|
|
534
|
+
raise
|
|
535
|
+
except Exception as e:
|
|
536
|
+
logging.error(f"Deploy exception: {e}")
|
|
537
|
+
# Release port state on exception
|
|
538
|
+
if used_port:
|
|
539
|
+
context.port_state_manager.release_port(used_port)
|
|
540
|
+
raise
|
|
541
|
+
|
|
542
|
+
def _start_monitoring(self, request: "DeployRequest", port: str, context: "DaemonContext") -> None:
|
|
543
|
+
"""Start monitoring after successful deployment.
|
|
544
|
+
|
|
545
|
+
This creates a MonitorRequest and processes it immediately.
|
|
546
|
+
Note: This is called while still holding locks, so we need to
|
|
547
|
+
release them first by returning from execute_operation.
|
|
548
|
+
|
|
549
|
+
For now, we'll just log that monitoring should start. The actual
|
|
550
|
+
implementation of post-deploy monitoring will be handled in the
|
|
551
|
+
daemon.py integration (Task 1.8).
|
|
552
|
+
|
|
553
|
+
Args:
|
|
554
|
+
request: The deploy request
|
|
555
|
+
port: The port to monitor
|
|
556
|
+
context: The daemon context
|
|
557
|
+
"""
|
|
558
|
+
logging.info(f"Monitor after deploy requested for port {port}")
|
|
559
|
+
|
|
560
|
+
# Transition port state to MONITORING
|
|
561
|
+
context.port_state_manager.update_state(port, PortState.MONITORING)
|
|
562
|
+
|
|
563
|
+
# Update status to indicate transition to monitoring
|
|
564
|
+
self._update_status(
|
|
565
|
+
context,
|
|
566
|
+
DaemonState.MONITORING,
|
|
567
|
+
"Transitioning to monitor after deploy",
|
|
568
|
+
request=request,
|
|
569
|
+
)
|
|
570
|
+
|
|
571
|
+
# Create monitor request
|
|
572
|
+
monitor_request = MonitorRequest(
|
|
573
|
+
project_dir=request.project_dir,
|
|
574
|
+
environment=request.environment,
|
|
575
|
+
port=port,
|
|
576
|
+
baud_rate=None, # Use config default
|
|
577
|
+
halt_on_error=request.monitor_halt_on_error,
|
|
578
|
+
halt_on_success=request.monitor_halt_on_success,
|
|
579
|
+
expect=request.monitor_expect,
|
|
580
|
+
timeout=request.monitor_timeout,
|
|
581
|
+
caller_pid=request.caller_pid,
|
|
582
|
+
caller_cwd=request.caller_cwd,
|
|
583
|
+
show_timestamp=request.monitor_show_timestamp,
|
|
584
|
+
request_id=request.request_id,
|
|
585
|
+
)
|
|
586
|
+
|
|
587
|
+
try:
|
|
588
|
+
# Import and use MonitorRequestProcessor to handle monitoring
|
|
589
|
+
# This will be imported at runtime to avoid circular dependencies
|
|
590
|
+
from fbuild.daemon.processors.monitor_processor import (
|
|
591
|
+
MonitorRequestProcessor,
|
|
592
|
+
)
|
|
593
|
+
|
|
594
|
+
monitor_processor = MonitorRequestProcessor()
|
|
595
|
+
# Note: This will block until monitoring completes
|
|
596
|
+
# The locks will be released by the base class after execute_operation returns
|
|
597
|
+
monitor_processor.process_request(monitor_request, context)
|
|
598
|
+
finally:
|
|
599
|
+
# Release port state when monitoring completes
|
|
600
|
+
context.port_state_manager.release_port(port)
|
|
601
|
+
|
|
602
|
+
def _reload_build_modules(self) -> None:
|
|
603
|
+
"""Reload build-related modules to pick up code changes.
|
|
604
|
+
|
|
605
|
+
This is critical for development on Windows where daemon caching prevents
|
|
606
|
+
testing code changes. Reloads key modules that are frequently modified.
|
|
607
|
+
|
|
608
|
+
Order matters: reload dependencies first, then modules that import them.
|
|
609
|
+
"""
|
|
610
|
+
import importlib
|
|
611
|
+
|
|
612
|
+
modules_to_reload = [
|
|
613
|
+
# Core utilities and packages (reload first - no dependencies)
|
|
614
|
+
"fbuild.packages.downloader",
|
|
615
|
+
"fbuild.packages.archive_utils",
|
|
616
|
+
"fbuild.packages.platformio_registry",
|
|
617
|
+
"fbuild.packages.toolchain",
|
|
618
|
+
"fbuild.packages.toolchain_esp32",
|
|
619
|
+
"fbuild.packages.arduino_core",
|
|
620
|
+
"fbuild.packages.framework_esp32",
|
|
621
|
+
"fbuild.packages.platform_esp32",
|
|
622
|
+
"fbuild.packages.library_manager",
|
|
623
|
+
"fbuild.packages.library_manager_esp32",
|
|
624
|
+
# Build system (reload second - depends on packages)
|
|
625
|
+
"fbuild.build.archive_creator",
|
|
626
|
+
"fbuild.build.compiler",
|
|
627
|
+
"fbuild.build.configurable_compiler",
|
|
628
|
+
"fbuild.build.linker",
|
|
629
|
+
"fbuild.build.configurable_linker",
|
|
630
|
+
"fbuild.build.source_scanner",
|
|
631
|
+
"fbuild.build.compilation_executor",
|
|
632
|
+
# Orchestrators (reload third - depends on build system)
|
|
633
|
+
"fbuild.build.orchestrator",
|
|
634
|
+
"fbuild.build.orchestrator_avr",
|
|
635
|
+
"fbuild.build.orchestrator_esp32",
|
|
636
|
+
# Deploy and monitor (reload with build system)
|
|
637
|
+
"fbuild.deploy.deployer",
|
|
638
|
+
"fbuild.deploy.deployer_esp32",
|
|
639
|
+
"fbuild.deploy.monitor",
|
|
640
|
+
# Top-level module packages (reload last to update __init__.py imports)
|
|
641
|
+
"fbuild.build",
|
|
642
|
+
"fbuild.deploy",
|
|
643
|
+
]
|
|
644
|
+
|
|
645
|
+
reloaded_count = 0
|
|
646
|
+
for module_name in modules_to_reload:
|
|
647
|
+
try:
|
|
648
|
+
if module_name in sys.modules:
|
|
649
|
+
# Module already loaded - reload it to pick up changes
|
|
650
|
+
importlib.reload(sys.modules[module_name])
|
|
651
|
+
reloaded_count += 1
|
|
652
|
+
else:
|
|
653
|
+
# Module not loaded yet - import it for the first time
|
|
654
|
+
__import__(module_name)
|
|
655
|
+
reloaded_count += 1
|
|
656
|
+
except KeyboardInterrupt as ke:
|
|
657
|
+
from fbuild.interrupt_utils import handle_keyboard_interrupt_properly
|
|
658
|
+
|
|
659
|
+
handle_keyboard_interrupt_properly(ke)
|
|
660
|
+
except Exception as e:
|
|
661
|
+
logging.warning(f"Failed to reload/import module {module_name}: {e}")
|
|
662
|
+
|
|
663
|
+
if reloaded_count > 0:
|
|
664
|
+
logging.info(f"Loaded/reloaded {reloaded_count} build modules")
|