fbuild 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fbuild might be problematic. Click here for more details.
- fbuild/__init__.py +0 -0
- fbuild/assets/example.txt +1 -0
- fbuild/build/__init__.py +117 -0
- fbuild/build/archive_creator.py +186 -0
- fbuild/build/binary_generator.py +444 -0
- fbuild/build/build_component_factory.py +131 -0
- fbuild/build/build_state.py +325 -0
- fbuild/build/build_utils.py +98 -0
- fbuild/build/compilation_executor.py +422 -0
- fbuild/build/compiler.py +165 -0
- fbuild/build/compiler_avr.py +574 -0
- fbuild/build/configurable_compiler.py +612 -0
- fbuild/build/configurable_linker.py +637 -0
- fbuild/build/flag_builder.py +186 -0
- fbuild/build/library_dependency_processor.py +185 -0
- fbuild/build/linker.py +708 -0
- fbuild/build/orchestrator.py +67 -0
- fbuild/build/orchestrator_avr.py +656 -0
- fbuild/build/orchestrator_esp32.py +797 -0
- fbuild/build/orchestrator_teensy.py +543 -0
- fbuild/build/source_compilation_orchestrator.py +220 -0
- fbuild/build/source_scanner.py +516 -0
- fbuild/cli.py +566 -0
- fbuild/cli_utils.py +312 -0
- fbuild/config/__init__.py +16 -0
- fbuild/config/board_config.py +457 -0
- fbuild/config/board_loader.py +92 -0
- fbuild/config/ini_parser.py +209 -0
- fbuild/config/mcu_specs.py +88 -0
- fbuild/daemon/__init__.py +34 -0
- fbuild/daemon/client.py +929 -0
- fbuild/daemon/compilation_queue.py +293 -0
- fbuild/daemon/daemon.py +474 -0
- fbuild/daemon/daemon_context.py +196 -0
- fbuild/daemon/error_collector.py +263 -0
- fbuild/daemon/file_cache.py +332 -0
- fbuild/daemon/lock_manager.py +270 -0
- fbuild/daemon/logging_utils.py +149 -0
- fbuild/daemon/messages.py +301 -0
- fbuild/daemon/operation_registry.py +288 -0
- fbuild/daemon/process_tracker.py +366 -0
- fbuild/daemon/processors/__init__.py +12 -0
- fbuild/daemon/processors/build_processor.py +157 -0
- fbuild/daemon/processors/deploy_processor.py +327 -0
- fbuild/daemon/processors/monitor_processor.py +146 -0
- fbuild/daemon/request_processor.py +401 -0
- fbuild/daemon/status_manager.py +216 -0
- fbuild/daemon/subprocess_manager.py +316 -0
- fbuild/deploy/__init__.py +17 -0
- fbuild/deploy/deployer.py +67 -0
- fbuild/deploy/deployer_esp32.py +314 -0
- fbuild/deploy/monitor.py +495 -0
- fbuild/interrupt_utils.py +34 -0
- fbuild/packages/__init__.py +53 -0
- fbuild/packages/archive_utils.py +1098 -0
- fbuild/packages/arduino_core.py +412 -0
- fbuild/packages/cache.py +249 -0
- fbuild/packages/downloader.py +366 -0
- fbuild/packages/framework_esp32.py +538 -0
- fbuild/packages/framework_teensy.py +346 -0
- fbuild/packages/github_utils.py +96 -0
- fbuild/packages/header_trampoline_cache.py +394 -0
- fbuild/packages/library_compiler.py +203 -0
- fbuild/packages/library_manager.py +549 -0
- fbuild/packages/library_manager_esp32.py +413 -0
- fbuild/packages/package.py +163 -0
- fbuild/packages/platform_esp32.py +383 -0
- fbuild/packages/platform_teensy.py +312 -0
- fbuild/packages/platform_utils.py +131 -0
- fbuild/packages/platformio_registry.py +325 -0
- fbuild/packages/sdk_utils.py +231 -0
- fbuild/packages/toolchain.py +436 -0
- fbuild/packages/toolchain_binaries.py +196 -0
- fbuild/packages/toolchain_esp32.py +484 -0
- fbuild/packages/toolchain_metadata.py +185 -0
- fbuild/packages/toolchain_teensy.py +404 -0
- fbuild/platform_configs/esp32.json +150 -0
- fbuild/platform_configs/esp32c2.json +144 -0
- fbuild/platform_configs/esp32c3.json +143 -0
- fbuild/platform_configs/esp32c5.json +151 -0
- fbuild/platform_configs/esp32c6.json +151 -0
- fbuild/platform_configs/esp32p4.json +149 -0
- fbuild/platform_configs/esp32s3.json +151 -0
- fbuild/platform_configs/imxrt1062.json +56 -0
- fbuild-1.1.0.dist-info/METADATA +447 -0
- fbuild-1.1.0.dist-info/RECORD +93 -0
- fbuild-1.1.0.dist-info/WHEEL +5 -0
- fbuild-1.1.0.dist-info/entry_points.txt +5 -0
- fbuild-1.1.0.dist-info/licenses/LICENSE +21 -0
- fbuild-1.1.0.dist-info/top_level.txt +2 -0
- fbuild_lint/__init__.py +0 -0
- fbuild_lint/ruff_plugins/__init__.py +0 -0
- fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Deploy Request Processor - Handles build + deploy operations.
|
|
3
|
+
|
|
4
|
+
This module implements the DeployRequestProcessor which executes build and
|
|
5
|
+
deployment operations for Arduino/ESP32 projects. It coordinates building
|
|
6
|
+
the firmware and then uploading it to the target device.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import sys
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import TYPE_CHECKING
|
|
13
|
+
|
|
14
|
+
from fbuild.daemon.messages import DaemonState, MonitorRequest, OperationType
|
|
15
|
+
from fbuild.daemon.request_processor import RequestProcessor
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from fbuild.daemon.daemon_context import DaemonContext
|
|
19
|
+
from fbuild.daemon.messages import DeployRequest
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DeployRequestProcessor(RequestProcessor):
|
|
23
|
+
"""Processor for deploy requests.
|
|
24
|
+
|
|
25
|
+
This processor handles building and deploying Arduino/ESP32 projects. It:
|
|
26
|
+
1. Reloads build modules to pick up code changes (for development)
|
|
27
|
+
2. Builds the firmware using the appropriate orchestrator
|
|
28
|
+
3. Deploys the firmware to the target device
|
|
29
|
+
4. Optionally starts monitoring after successful deployment
|
|
30
|
+
|
|
31
|
+
The processor coordinates two major phases (build + deploy) and handles
|
|
32
|
+
the complexity of transitioning to monitoring if requested.
|
|
33
|
+
|
|
34
|
+
Example:
|
|
35
|
+
>>> processor = DeployRequestProcessor()
|
|
36
|
+
>>> success = processor.process_request(deploy_request, daemon_context)
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def get_operation_type(self) -> OperationType:
|
|
40
|
+
"""Return DEPLOY operation type."""
|
|
41
|
+
return OperationType.DEPLOY
|
|
42
|
+
|
|
43
|
+
def get_required_locks(self, request: "DeployRequest", context: "DaemonContext") -> dict[str, str]:
|
|
44
|
+
"""Deploy operations require both project and port locks.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
request: The deploy request
|
|
48
|
+
context: The daemon context
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Dictionary with project and port lock requirements
|
|
52
|
+
"""
|
|
53
|
+
locks = {"project": request.project_dir}
|
|
54
|
+
if request.port:
|
|
55
|
+
locks["port"] = request.port
|
|
56
|
+
return locks
|
|
57
|
+
|
|
58
|
+
def get_starting_state(self) -> DaemonState:
|
|
59
|
+
"""Deploy starts in DEPLOYING state."""
|
|
60
|
+
return DaemonState.DEPLOYING
|
|
61
|
+
|
|
62
|
+
def get_starting_message(self, request: "DeployRequest") -> str:
|
|
63
|
+
"""Get the starting status message."""
|
|
64
|
+
return f"Deploying {request.environment}"
|
|
65
|
+
|
|
66
|
+
def get_success_message(self, request: "DeployRequest") -> str:
|
|
67
|
+
"""Get the success status message."""
|
|
68
|
+
return "Deploy successful"
|
|
69
|
+
|
|
70
|
+
def get_failure_message(self, request: "DeployRequest") -> str:
|
|
71
|
+
"""Get the failure status message."""
|
|
72
|
+
return "Deploy failed"
|
|
73
|
+
|
|
74
|
+
def execute_operation(self, request: "DeployRequest", context: "DaemonContext") -> bool:
|
|
75
|
+
"""Execute the build + deploy operation.
|
|
76
|
+
|
|
77
|
+
This is the core deploy logic extracted from the original
|
|
78
|
+
process_deploy_request function. All boilerplate (locks, status
|
|
79
|
+
updates, error handling) is handled by the base RequestProcessor.
|
|
80
|
+
|
|
81
|
+
The operation has two phases:
|
|
82
|
+
1. Build: Compile the firmware
|
|
83
|
+
2. Deploy: Upload the firmware to device
|
|
84
|
+
|
|
85
|
+
If monitor_after is requested, the processor will coordinate
|
|
86
|
+
transitioning to monitoring after successful deployment.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
request: The deploy request containing project_dir, environment, etc.
|
|
90
|
+
context: The daemon context with all subsystems
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
True if deploy succeeded, False otherwise
|
|
94
|
+
"""
|
|
95
|
+
# Phase 1: Build firmware
|
|
96
|
+
logging.info(f"Building project: {request.project_dir}")
|
|
97
|
+
if not self._build_firmware(request, context):
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
# Phase 2: Deploy firmware
|
|
101
|
+
logging.info(f"Deploying to {request.port if request.port else 'auto-detected port'}")
|
|
102
|
+
used_port = self._deploy_firmware(request, context)
|
|
103
|
+
if not used_port:
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
# Phase 3: Optional monitoring
|
|
107
|
+
if request.monitor_after and used_port:
|
|
108
|
+
self._start_monitoring(request, used_port, context)
|
|
109
|
+
|
|
110
|
+
logging.info("Deploy completed successfully")
|
|
111
|
+
return True
|
|
112
|
+
|
|
113
|
+
def _build_firmware(self, request: "DeployRequest", context: "DaemonContext") -> bool:
|
|
114
|
+
"""Build the firmware.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
request: The deploy request
|
|
118
|
+
context: The daemon context
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
True if build succeeded, False otherwise
|
|
122
|
+
"""
|
|
123
|
+
# Update status to building
|
|
124
|
+
self._update_status(
|
|
125
|
+
context,
|
|
126
|
+
DaemonState.BUILDING,
|
|
127
|
+
f"Building {request.environment}",
|
|
128
|
+
request=request,
|
|
129
|
+
operation_type=OperationType.BUILD_AND_DEPLOY,
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
# Reload build modules to pick up code changes
|
|
133
|
+
self._reload_build_modules()
|
|
134
|
+
|
|
135
|
+
# Get fresh orchestrator class after module reload
|
|
136
|
+
try:
|
|
137
|
+
orchestrator_class = getattr(sys.modules["fbuild.build.orchestrator_avr"], "BuildOrchestratorAVR")
|
|
138
|
+
except (KeyError, AttributeError) as e:
|
|
139
|
+
logging.error(f"Failed to get BuildOrchestratorAVR class: {e}")
|
|
140
|
+
return False
|
|
141
|
+
|
|
142
|
+
# Execute build
|
|
143
|
+
orchestrator = orchestrator_class(verbose=False)
|
|
144
|
+
build_result = orchestrator.build(
|
|
145
|
+
project_dir=Path(request.project_dir),
|
|
146
|
+
env_name=request.environment,
|
|
147
|
+
clean=request.clean_build,
|
|
148
|
+
verbose=False,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
if not build_result.success:
|
|
152
|
+
logging.error(f"Build failed: {build_result.message}")
|
|
153
|
+
self._update_status(
|
|
154
|
+
context,
|
|
155
|
+
DaemonState.FAILED,
|
|
156
|
+
f"Build failed: {build_result.message}",
|
|
157
|
+
request=request,
|
|
158
|
+
exit_code=1,
|
|
159
|
+
operation_in_progress=False,
|
|
160
|
+
)
|
|
161
|
+
return False
|
|
162
|
+
|
|
163
|
+
logging.info("Build completed successfully")
|
|
164
|
+
return True
|
|
165
|
+
|
|
166
|
+
def _deploy_firmware(self, request: "DeployRequest", context: "DaemonContext") -> str | None:
|
|
167
|
+
"""Deploy the firmware to the device.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
request: The deploy request
|
|
171
|
+
context: The daemon context
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
The port that was used for deployment, or None if deployment failed
|
|
175
|
+
"""
|
|
176
|
+
# Update status to deploying
|
|
177
|
+
self._update_status(
|
|
178
|
+
context,
|
|
179
|
+
DaemonState.DEPLOYING,
|
|
180
|
+
f"Deploying {request.environment}",
|
|
181
|
+
request=request,
|
|
182
|
+
operation_type=OperationType.DEPLOY,
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
# Get fresh deployer class after module reload
|
|
186
|
+
try:
|
|
187
|
+
deployer_class = getattr(sys.modules["fbuild.deploy.deployer_esp32"], "ESP32Deployer")
|
|
188
|
+
except (KeyError, AttributeError) as e:
|
|
189
|
+
logging.error(f"Failed to get ESP32Deployer class: {e}")
|
|
190
|
+
return None
|
|
191
|
+
|
|
192
|
+
# Execute deploy
|
|
193
|
+
deployer = deployer_class(verbose=False)
|
|
194
|
+
deploy_result = deployer.deploy(
|
|
195
|
+
project_dir=Path(request.project_dir),
|
|
196
|
+
env_name=request.environment,
|
|
197
|
+
port=request.port,
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
if not deploy_result.success:
|
|
201
|
+
logging.error(f"Deploy failed: {deploy_result.message}")
|
|
202
|
+
self._update_status(
|
|
203
|
+
context,
|
|
204
|
+
DaemonState.FAILED,
|
|
205
|
+
f"Deploy failed: {deploy_result.message}",
|
|
206
|
+
request=request,
|
|
207
|
+
exit_code=1,
|
|
208
|
+
operation_in_progress=False,
|
|
209
|
+
)
|
|
210
|
+
return None
|
|
211
|
+
|
|
212
|
+
# Return the port that was actually used
|
|
213
|
+
return deploy_result.port if deploy_result.port else request.port
|
|
214
|
+
|
|
215
|
+
def _start_monitoring(self, request: "DeployRequest", port: str, context: "DaemonContext") -> None:
|
|
216
|
+
"""Start monitoring after successful deployment.
|
|
217
|
+
|
|
218
|
+
This creates a MonitorRequest and processes it immediately.
|
|
219
|
+
Note: This is called while still holding locks, so we need to
|
|
220
|
+
release them first by returning from execute_operation.
|
|
221
|
+
|
|
222
|
+
For now, we'll just log that monitoring should start. The actual
|
|
223
|
+
implementation of post-deploy monitoring will be handled in the
|
|
224
|
+
daemon.py integration (Task 1.8).
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
request: The deploy request
|
|
228
|
+
port: The port to monitor
|
|
229
|
+
context: The daemon context
|
|
230
|
+
"""
|
|
231
|
+
logging.info(f"Monitor after deploy requested for port {port}")
|
|
232
|
+
|
|
233
|
+
# Update status to indicate transition to monitoring
|
|
234
|
+
self._update_status(
|
|
235
|
+
context,
|
|
236
|
+
DaemonState.MONITORING,
|
|
237
|
+
"Transitioning to monitor after deploy",
|
|
238
|
+
request=request,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
# Create monitor request
|
|
242
|
+
monitor_request = MonitorRequest(
|
|
243
|
+
project_dir=request.project_dir,
|
|
244
|
+
environment=request.environment,
|
|
245
|
+
port=port,
|
|
246
|
+
baud_rate=None, # Use config default
|
|
247
|
+
halt_on_error=request.monitor_halt_on_error,
|
|
248
|
+
halt_on_success=request.monitor_halt_on_success,
|
|
249
|
+
expect=request.monitor_expect,
|
|
250
|
+
timeout=request.monitor_timeout,
|
|
251
|
+
caller_pid=request.caller_pid,
|
|
252
|
+
caller_cwd=request.caller_cwd,
|
|
253
|
+
request_id=request.request_id,
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
# Import and use MonitorRequestProcessor to handle monitoring
|
|
257
|
+
# This will be imported at runtime to avoid circular dependencies
|
|
258
|
+
from fbuild.daemon.processors.monitor_processor import MonitorRequestProcessor
|
|
259
|
+
|
|
260
|
+
monitor_processor = MonitorRequestProcessor()
|
|
261
|
+
# Note: This will block until monitoring completes
|
|
262
|
+
# The locks will be released by the base class after execute_operation returns
|
|
263
|
+
monitor_processor.process_request(monitor_request, context)
|
|
264
|
+
|
|
265
|
+
def _reload_build_modules(self) -> None:
|
|
266
|
+
"""Reload build-related modules to pick up code changes.
|
|
267
|
+
|
|
268
|
+
This is critical for development on Windows where daemon caching prevents
|
|
269
|
+
testing code changes. Reloads key modules that are frequently modified.
|
|
270
|
+
|
|
271
|
+
Order matters: reload dependencies first, then modules that import them.
|
|
272
|
+
"""
|
|
273
|
+
import importlib
|
|
274
|
+
|
|
275
|
+
modules_to_reload = [
|
|
276
|
+
# Core utilities and packages (reload first - no dependencies)
|
|
277
|
+
"fbuild.packages.downloader",
|
|
278
|
+
"fbuild.packages.archive_utils",
|
|
279
|
+
"fbuild.packages.platformio_registry",
|
|
280
|
+
"fbuild.packages.toolchain",
|
|
281
|
+
"fbuild.packages.toolchain_esp32",
|
|
282
|
+
"fbuild.packages.arduino_core",
|
|
283
|
+
"fbuild.packages.framework_esp32",
|
|
284
|
+
"fbuild.packages.platform_esp32",
|
|
285
|
+
"fbuild.packages.library_manager",
|
|
286
|
+
"fbuild.packages.library_manager_esp32",
|
|
287
|
+
# Build system (reload second - depends on packages)
|
|
288
|
+
"fbuild.build.archive_creator",
|
|
289
|
+
"fbuild.build.compiler",
|
|
290
|
+
"fbuild.build.configurable_compiler",
|
|
291
|
+
"fbuild.build.linker",
|
|
292
|
+
"fbuild.build.configurable_linker",
|
|
293
|
+
"fbuild.build.source_scanner",
|
|
294
|
+
"fbuild.build.compilation_executor",
|
|
295
|
+
# Orchestrators (reload third - depends on build system)
|
|
296
|
+
"fbuild.build.orchestrator",
|
|
297
|
+
"fbuild.build.orchestrator_avr",
|
|
298
|
+
"fbuild.build.orchestrator_esp32",
|
|
299
|
+
# Deploy and monitor (reload with build system)
|
|
300
|
+
"fbuild.deploy.deployer",
|
|
301
|
+
"fbuild.deploy.deployer_esp32",
|
|
302
|
+
"fbuild.deploy.monitor",
|
|
303
|
+
# Top-level module packages (reload last to update __init__.py imports)
|
|
304
|
+
"fbuild.build",
|
|
305
|
+
"fbuild.deploy",
|
|
306
|
+
]
|
|
307
|
+
|
|
308
|
+
reloaded_count = 0
|
|
309
|
+
for module_name in modules_to_reload:
|
|
310
|
+
try:
|
|
311
|
+
if module_name in sys.modules:
|
|
312
|
+
# Module already loaded - reload it to pick up changes
|
|
313
|
+
importlib.reload(sys.modules[module_name])
|
|
314
|
+
reloaded_count += 1
|
|
315
|
+
else:
|
|
316
|
+
# Module not loaded yet - import it for the first time
|
|
317
|
+
__import__(module_name)
|
|
318
|
+
reloaded_count += 1
|
|
319
|
+
except KeyboardInterrupt as ke:
|
|
320
|
+
from fbuild.interrupt_utils import handle_keyboard_interrupt_properly
|
|
321
|
+
|
|
322
|
+
handle_keyboard_interrupt_properly(ke)
|
|
323
|
+
except Exception as e:
|
|
324
|
+
logging.warning(f"Failed to reload/import module {module_name}: {e}")
|
|
325
|
+
|
|
326
|
+
if reloaded_count > 0:
|
|
327
|
+
logging.info(f"Loaded/reloaded {reloaded_count} build modules")
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Monitor Request Processor - Handles serial monitoring operations.
|
|
3
|
+
|
|
4
|
+
This module implements the MonitorRequestProcessor which executes serial
|
|
5
|
+
monitoring operations for Arduino/ESP32 devices. It captures serial output,
|
|
6
|
+
performs pattern matching, and handles halt conditions.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import sys
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import TYPE_CHECKING
|
|
13
|
+
|
|
14
|
+
from fbuild.daemon.messages import DaemonState, OperationType
|
|
15
|
+
from fbuild.daemon.request_processor import RequestProcessor
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from fbuild.daemon.daemon_context import DaemonContext
|
|
19
|
+
from fbuild.daemon.messages import MonitorRequest
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class MonitorRequestProcessor(RequestProcessor):
|
|
23
|
+
"""Processor for monitor requests.
|
|
24
|
+
|
|
25
|
+
This processor handles serial monitoring of Arduino/ESP32 devices. It:
|
|
26
|
+
1. Connects to the specified serial port
|
|
27
|
+
2. Captures and streams output to a file
|
|
28
|
+
3. Performs pattern matching on the output
|
|
29
|
+
4. Handles halt conditions (error/success patterns)
|
|
30
|
+
5. Times out if specified
|
|
31
|
+
|
|
32
|
+
The monitor runs until:
|
|
33
|
+
- A halt pattern is matched (halt_on_error or halt_on_success)
|
|
34
|
+
- The timeout is reached
|
|
35
|
+
- The user interrupts it (Ctrl+C)
|
|
36
|
+
- An error occurs
|
|
37
|
+
|
|
38
|
+
Example:
|
|
39
|
+
>>> processor = MonitorRequestProcessor()
|
|
40
|
+
>>> success = processor.process_request(monitor_request, daemon_context)
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def get_operation_type(self) -> OperationType:
|
|
44
|
+
"""Return MONITOR operation type."""
|
|
45
|
+
return OperationType.MONITOR
|
|
46
|
+
|
|
47
|
+
def get_required_locks(self, request: "MonitorRequest", context: "DaemonContext") -> dict[str, str]:
|
|
48
|
+
"""Monitor operations require only a port lock.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
request: The monitor request
|
|
52
|
+
context: The daemon context
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
Dictionary with port lock requirement
|
|
56
|
+
"""
|
|
57
|
+
return {"port": request.port} if request.port else {}
|
|
58
|
+
|
|
59
|
+
def validate_request(self, request: "MonitorRequest", context: "DaemonContext") -> bool:
|
|
60
|
+
"""Validate that the monitor request has a port specified.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
request: The monitor request
|
|
64
|
+
context: The daemon context
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
True if request is valid (has port), False otherwise
|
|
68
|
+
"""
|
|
69
|
+
if not request.port:
|
|
70
|
+
logging.error("Monitor requires port to be specified")
|
|
71
|
+
return False
|
|
72
|
+
return True
|
|
73
|
+
|
|
74
|
+
def get_starting_state(self) -> DaemonState:
|
|
75
|
+
"""Monitor starts in MONITORING state."""
|
|
76
|
+
return DaemonState.MONITORING
|
|
77
|
+
|
|
78
|
+
def get_starting_message(self, request: "MonitorRequest") -> str:
|
|
79
|
+
"""Get the starting status message."""
|
|
80
|
+
return f"Monitoring {request.environment} on {request.port}"
|
|
81
|
+
|
|
82
|
+
def get_success_message(self, request: "MonitorRequest") -> str:
|
|
83
|
+
"""Get the success status message."""
|
|
84
|
+
return "Monitor completed"
|
|
85
|
+
|
|
86
|
+
def get_failure_message(self, request: "MonitorRequest") -> str:
|
|
87
|
+
"""Get the failure status message."""
|
|
88
|
+
return "Monitor failed"
|
|
89
|
+
|
|
90
|
+
def execute_operation(self, request: "MonitorRequest", context: "DaemonContext") -> bool:
|
|
91
|
+
"""Execute the serial monitoring operation.
|
|
92
|
+
|
|
93
|
+
This is the core monitor logic extracted from the original
|
|
94
|
+
process_monitor_request function. All boilerplate (locks, status
|
|
95
|
+
updates, error handling) is handled by the base RequestProcessor.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
request: The monitor request containing port, baud_rate, etc.
|
|
99
|
+
context: The daemon context with all subsystems
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
True if monitoring completed successfully, False otherwise
|
|
103
|
+
"""
|
|
104
|
+
logging.info(f"Starting monitor on {request.port}")
|
|
105
|
+
|
|
106
|
+
# Create output file path for streaming
|
|
107
|
+
output_file = Path(request.project_dir) / ".fbuild" / "monitor_output.txt"
|
|
108
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
109
|
+
# Clear/truncate output file before starting
|
|
110
|
+
output_file.write_text("", encoding="utf-8")
|
|
111
|
+
|
|
112
|
+
# Create summary file path
|
|
113
|
+
summary_file = Path(request.project_dir) / ".fbuild" / "monitor_summary.json"
|
|
114
|
+
# Clear old summary file
|
|
115
|
+
if summary_file.exists():
|
|
116
|
+
summary_file.unlink()
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
# Get fresh monitor class after module reload
|
|
120
|
+
# Using direct import would use cached version
|
|
121
|
+
monitor_class = getattr(sys.modules["fbuild.deploy.monitor"], "SerialMonitor")
|
|
122
|
+
except (KeyError, AttributeError) as e:
|
|
123
|
+
logging.error(f"Failed to get SerialMonitor class: {e}")
|
|
124
|
+
return False
|
|
125
|
+
|
|
126
|
+
# Create monitor and execute
|
|
127
|
+
monitor = monitor_class(verbose=False)
|
|
128
|
+
exit_code = monitor.monitor(
|
|
129
|
+
project_dir=Path(request.project_dir),
|
|
130
|
+
env_name=request.environment,
|
|
131
|
+
port=request.port,
|
|
132
|
+
baud=request.baud_rate if request.baud_rate else 115200,
|
|
133
|
+
timeout=int(request.timeout) if request.timeout is not None else None,
|
|
134
|
+
halt_on_error=request.halt_on_error,
|
|
135
|
+
halt_on_success=request.halt_on_success,
|
|
136
|
+
expect=request.expect,
|
|
137
|
+
output_file=output_file,
|
|
138
|
+
summary_file=summary_file,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
if exit_code == 0:
|
|
142
|
+
logging.info("Monitor completed successfully")
|
|
143
|
+
return True
|
|
144
|
+
else:
|
|
145
|
+
logging.error(f"Monitor failed with exit code {exit_code}")
|
|
146
|
+
return False
|