fbuild 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fbuild might be problematic. Click here for more details.

Files changed (93) hide show
  1. fbuild/__init__.py +0 -0
  2. fbuild/assets/example.txt +1 -0
  3. fbuild/build/__init__.py +117 -0
  4. fbuild/build/archive_creator.py +186 -0
  5. fbuild/build/binary_generator.py +444 -0
  6. fbuild/build/build_component_factory.py +131 -0
  7. fbuild/build/build_state.py +325 -0
  8. fbuild/build/build_utils.py +98 -0
  9. fbuild/build/compilation_executor.py +422 -0
  10. fbuild/build/compiler.py +165 -0
  11. fbuild/build/compiler_avr.py +574 -0
  12. fbuild/build/configurable_compiler.py +612 -0
  13. fbuild/build/configurable_linker.py +637 -0
  14. fbuild/build/flag_builder.py +186 -0
  15. fbuild/build/library_dependency_processor.py +185 -0
  16. fbuild/build/linker.py +708 -0
  17. fbuild/build/orchestrator.py +67 -0
  18. fbuild/build/orchestrator_avr.py +656 -0
  19. fbuild/build/orchestrator_esp32.py +797 -0
  20. fbuild/build/orchestrator_teensy.py +543 -0
  21. fbuild/build/source_compilation_orchestrator.py +220 -0
  22. fbuild/build/source_scanner.py +516 -0
  23. fbuild/cli.py +566 -0
  24. fbuild/cli_utils.py +312 -0
  25. fbuild/config/__init__.py +16 -0
  26. fbuild/config/board_config.py +457 -0
  27. fbuild/config/board_loader.py +92 -0
  28. fbuild/config/ini_parser.py +209 -0
  29. fbuild/config/mcu_specs.py +88 -0
  30. fbuild/daemon/__init__.py +34 -0
  31. fbuild/daemon/client.py +929 -0
  32. fbuild/daemon/compilation_queue.py +293 -0
  33. fbuild/daemon/daemon.py +474 -0
  34. fbuild/daemon/daemon_context.py +196 -0
  35. fbuild/daemon/error_collector.py +263 -0
  36. fbuild/daemon/file_cache.py +332 -0
  37. fbuild/daemon/lock_manager.py +270 -0
  38. fbuild/daemon/logging_utils.py +149 -0
  39. fbuild/daemon/messages.py +301 -0
  40. fbuild/daemon/operation_registry.py +288 -0
  41. fbuild/daemon/process_tracker.py +366 -0
  42. fbuild/daemon/processors/__init__.py +12 -0
  43. fbuild/daemon/processors/build_processor.py +157 -0
  44. fbuild/daemon/processors/deploy_processor.py +327 -0
  45. fbuild/daemon/processors/monitor_processor.py +146 -0
  46. fbuild/daemon/request_processor.py +401 -0
  47. fbuild/daemon/status_manager.py +216 -0
  48. fbuild/daemon/subprocess_manager.py +316 -0
  49. fbuild/deploy/__init__.py +17 -0
  50. fbuild/deploy/deployer.py +67 -0
  51. fbuild/deploy/deployer_esp32.py +314 -0
  52. fbuild/deploy/monitor.py +495 -0
  53. fbuild/interrupt_utils.py +34 -0
  54. fbuild/packages/__init__.py +53 -0
  55. fbuild/packages/archive_utils.py +1098 -0
  56. fbuild/packages/arduino_core.py +412 -0
  57. fbuild/packages/cache.py +249 -0
  58. fbuild/packages/downloader.py +366 -0
  59. fbuild/packages/framework_esp32.py +538 -0
  60. fbuild/packages/framework_teensy.py +346 -0
  61. fbuild/packages/github_utils.py +96 -0
  62. fbuild/packages/header_trampoline_cache.py +394 -0
  63. fbuild/packages/library_compiler.py +203 -0
  64. fbuild/packages/library_manager.py +549 -0
  65. fbuild/packages/library_manager_esp32.py +413 -0
  66. fbuild/packages/package.py +163 -0
  67. fbuild/packages/platform_esp32.py +383 -0
  68. fbuild/packages/platform_teensy.py +312 -0
  69. fbuild/packages/platform_utils.py +131 -0
  70. fbuild/packages/platformio_registry.py +325 -0
  71. fbuild/packages/sdk_utils.py +231 -0
  72. fbuild/packages/toolchain.py +436 -0
  73. fbuild/packages/toolchain_binaries.py +196 -0
  74. fbuild/packages/toolchain_esp32.py +484 -0
  75. fbuild/packages/toolchain_metadata.py +185 -0
  76. fbuild/packages/toolchain_teensy.py +404 -0
  77. fbuild/platform_configs/esp32.json +150 -0
  78. fbuild/platform_configs/esp32c2.json +144 -0
  79. fbuild/platform_configs/esp32c3.json +143 -0
  80. fbuild/platform_configs/esp32c5.json +151 -0
  81. fbuild/platform_configs/esp32c6.json +151 -0
  82. fbuild/platform_configs/esp32p4.json +149 -0
  83. fbuild/platform_configs/esp32s3.json +151 -0
  84. fbuild/platform_configs/imxrt1062.json +56 -0
  85. fbuild-1.1.0.dist-info/METADATA +447 -0
  86. fbuild-1.1.0.dist-info/RECORD +93 -0
  87. fbuild-1.1.0.dist-info/WHEEL +5 -0
  88. fbuild-1.1.0.dist-info/entry_points.txt +5 -0
  89. fbuild-1.1.0.dist-info/licenses/LICENSE +21 -0
  90. fbuild-1.1.0.dist-info/top_level.txt +2 -0
  91. fbuild_lint/__init__.py +0 -0
  92. fbuild_lint/ruff_plugins/__init__.py +0 -0
  93. fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
@@ -0,0 +1,366 @@
1
+ """
2
+ Process Tracking and Cleanup Module
3
+
4
+ This module manages tracking of build/deploy/monitor processes and their entire
5
+ process trees. When client processes die, orphaned process trees are automatically
6
+ cleaned up to prevent resource leaks and file locking issues.
7
+
8
+ Key features:
9
+ - Track root process + all children (recursive)
10
+ - Detect dead client processes
11
+ - Kill entire process trees recursively
12
+ - Thread-safe operations for daemon use
13
+ """
14
+
15
+ import _thread
16
+ import json
17
+ import logging
18
+ import threading
19
+ import time
20
+ from dataclasses import asdict, dataclass, field
21
+ from pathlib import Path
22
+ from typing import Any
23
+
24
+ import psutil
25
+
26
+
27
+ @dataclass
28
+ class ProcessTreeInfo:
29
+ """Information about a tracked process tree.
30
+
31
+ Attributes:
32
+ client_pid: PID of the client that initiated the operation
33
+ root_pid: PID of the root process
34
+ child_pids: List of all child PIDs (updated periodically)
35
+ request_id: Request ID
36
+ project_dir: Project directory
37
+ operation_type: Type of operation (deploy/monitor)
38
+ port: Serial port (if applicable)
39
+ started_at: Unix timestamp when tracking started
40
+ last_updated: Unix timestamp of last child PID refresh
41
+ """
42
+
43
+ client_pid: int
44
+ root_pid: int
45
+ child_pids: list[int] = field(default_factory=list)
46
+ request_id: str = ""
47
+ project_dir: str = ""
48
+ operation_type: str = ""
49
+ port: str | None = None
50
+ started_at: float = field(default_factory=time.time)
51
+ last_updated: float = field(default_factory=time.time)
52
+
53
+ def to_dict(self) -> dict[str, Any]:
54
+ """Convert to dictionary for JSON serialization."""
55
+ return asdict(self)
56
+
57
+ @classmethod
58
+ def from_dict(cls, data: dict[str, Any]) -> "ProcessTreeInfo":
59
+ """Create ProcessTreeInfo from dictionary."""
60
+ return cls(
61
+ client_pid=data["client_pid"],
62
+ root_pid=data["root_pid"],
63
+ child_pids=data.get("child_pids", []),
64
+ request_id=data.get("request_id", ""),
65
+ project_dir=data.get("project_dir", ""),
66
+ operation_type=data.get("operation_type", ""),
67
+ port=data.get("port"),
68
+ started_at=data.get("started_at", time.time()),
69
+ last_updated=data.get("last_updated", time.time()),
70
+ )
71
+
72
+
73
+ class ProcessTracker:
74
+ """Thread-safe tracker for process trees.
75
+
76
+ This class maintains a registry of active processes and provides
77
+ methods to detect and cleanup orphaned process trees.
78
+ """
79
+
80
+ def __init__(self, registry_file: Path):
81
+ """Initialize the tracker.
82
+
83
+ Args:
84
+ registry_file: Path to JSON file for persisting process trees
85
+ """
86
+ self.registry_file = registry_file
87
+ self.lock = threading.Lock()
88
+ self._registry: dict[int, ProcessTreeInfo] = {}
89
+ self._load_registry()
90
+ logging.info(f"ProcessTracker initialized with {len(self._registry)} tracked processes")
91
+
92
+ def _load_registry(self) -> None:
93
+ """Load registry from disk (if it exists)."""
94
+ if not self.registry_file.exists():
95
+ return
96
+
97
+ try:
98
+ with open(self.registry_file) as f:
99
+ data = json.load(f)
100
+
101
+ with self.lock:
102
+ self._registry = {int(client_pid): ProcessTreeInfo.from_dict(info) for client_pid, info in data.items()}
103
+
104
+ logging.info(f"Loaded {len(self._registry)} process trees from registry")
105
+ except KeyboardInterrupt:
106
+ _thread.interrupt_main()
107
+ raise
108
+ except Exception as e:
109
+ logging.warning(f"Failed to load process registry: {e}")
110
+ self._registry = {}
111
+
112
+ def _save_registry(self) -> None:
113
+ """Save registry to disk atomically."""
114
+ try:
115
+ # Prepare data for serialization
116
+ data = {str(client_pid): info.to_dict() for client_pid, info in self._registry.items()}
117
+
118
+ # Atomic write
119
+ temp_file = self.registry_file.with_suffix(".tmp")
120
+ with open(temp_file, "w") as f:
121
+ json.dump(data, f, indent=2)
122
+
123
+ temp_file.replace(self.registry_file)
124
+
125
+ except KeyboardInterrupt:
126
+ _thread.interrupt_main()
127
+ raise
128
+ except Exception as e:
129
+ logging.error(f"Failed to save process registry: {e}")
130
+
131
+ def register_process(
132
+ self,
133
+ client_pid: int,
134
+ root_pid: int,
135
+ request_id: str = "",
136
+ project_dir: str = "",
137
+ operation_type: str = "",
138
+ port: str | None = None,
139
+ ) -> None:
140
+ """Register a new process tree.
141
+
142
+ Args:
143
+ client_pid: PID of client that initiated operation
144
+ root_pid: PID of root process
145
+ request_id: Request ID (optional)
146
+ project_dir: Project directory (optional)
147
+ operation_type: Type of operation (optional)
148
+ port: Serial port (optional)
149
+ """
150
+ with self.lock:
151
+ self._registry[client_pid] = ProcessTreeInfo(
152
+ client_pid=client_pid,
153
+ root_pid=root_pid,
154
+ request_id=request_id,
155
+ project_dir=project_dir,
156
+ operation_type=operation_type,
157
+ port=port,
158
+ )
159
+
160
+ # Immediately refresh child PIDs
161
+ self._update_child_pids(client_pid)
162
+
163
+ self._save_registry()
164
+ logging.info(f"Registered process tree: client={client_pid}, root={root_pid}, children={len(self._registry[client_pid].child_pids)}, operation={operation_type}")
165
+
166
+ def unregister_process(self, client_pid: int) -> None:
167
+ """Remove a process tree from tracking.
168
+
169
+ Args:
170
+ client_pid: Client PID to remove
171
+ """
172
+ with self.lock:
173
+ if client_pid in self._registry:
174
+ info = self._registry.pop(client_pid)
175
+ logging.info(f"Unregistered process tree: client={client_pid}, root={info.root_pid}")
176
+ else:
177
+ logging.warning(f"Attempted to unregister unknown client PID: {client_pid}")
178
+
179
+ self._save_registry()
180
+
181
+ def _update_child_pids(self, client_pid: int) -> None:
182
+ """Update child PID list for a tracked process.
183
+
184
+ This method MUST be called with self.lock held.
185
+
186
+ Args:
187
+ client_pid: Client PID to update
188
+ """
189
+ if client_pid not in self._registry:
190
+ return
191
+
192
+ info = self._registry[client_pid]
193
+
194
+ try:
195
+ # Get root process
196
+ root_proc = psutil.Process(info.root_pid)
197
+
198
+ # Get ALL descendants recursively
199
+ children = root_proc.children(recursive=True)
200
+ info.child_pids = [child.pid for child in children]
201
+ info.last_updated = time.time()
202
+
203
+ except psutil.NoSuchProcess:
204
+ # Root process died - mark as empty
205
+ info.child_pids = []
206
+ info.last_updated = time.time()
207
+ except KeyboardInterrupt:
208
+ _thread.interrupt_main()
209
+ raise
210
+ except Exception as e:
211
+ logging.warning(f"Failed to update child PIDs for client={client_pid}: {e}")
212
+
213
+ def refresh_all_child_pids(self) -> None:
214
+ """Refresh child PID lists for all tracked processes."""
215
+ with self.lock:
216
+ for client_pid in list(self._registry.keys()):
217
+ self._update_child_pids(client_pid)
218
+
219
+ self._save_registry()
220
+
221
+ def cleanup_orphaned_processes(self) -> list[int]:
222
+ """Detect and kill process trees for dead clients.
223
+
224
+ Returns:
225
+ List of client PIDs that were cleaned up
226
+ """
227
+ orphaned_clients = []
228
+
229
+ with self.lock:
230
+ for client_pid, info in list(self._registry.items()):
231
+ # Check if client is still alive
232
+ if psutil.pid_exists(client_pid):
233
+ continue
234
+
235
+ # Client is dead - kill the entire process tree
236
+ logging.info(f"Client {client_pid} is dead, cleaning up process tree (root={info.root_pid}, children={len(info.child_pids)}, operation={info.operation_type})")
237
+
238
+ killed_count = self._kill_process_tree(info)
239
+ orphaned_clients.append(client_pid)
240
+
241
+ logging.info(f"Cleaned up {killed_count} processes for dead client {client_pid}")
242
+
243
+ # Remove from registry
244
+ del self._registry[client_pid]
245
+
246
+ if orphaned_clients:
247
+ logging.info(f"Orphaned clients cleaned up: {orphaned_clients}")
248
+ self._save_registry()
249
+
250
+ return orphaned_clients
251
+
252
+ def _kill_process_tree(self, info: ProcessTreeInfo) -> int:
253
+ """Kill an entire process tree (root + all children).
254
+
255
+ This method MUST be called with self.lock held.
256
+
257
+ Args:
258
+ info: ProcessTreeInfo containing root and child PIDs
259
+
260
+ Returns:
261
+ Number of processes killed
262
+ """
263
+ killed_count = 0
264
+ all_pids = info.child_pids + [info.root_pid]
265
+
266
+ # Refresh child list one last time before killing
267
+ try:
268
+ root_proc = psutil.Process(info.root_pid)
269
+ children = root_proc.children(recursive=True)
270
+ all_pids = [child.pid for child in children] + [info.root_pid]
271
+ except KeyboardInterrupt:
272
+ _thread.interrupt_main()
273
+ raise
274
+ except Exception:
275
+ pass # Use cached PID list
276
+
277
+ # Kill children first (bottom-up to avoid orphans)
278
+ processes_to_kill: list[psutil.Process] = []
279
+ for pid in reversed(all_pids): # Reverse to kill children before parents
280
+ try:
281
+ proc = psutil.Process(pid)
282
+ processes_to_kill.append(proc)
283
+ except psutil.NoSuchProcess:
284
+ pass # Already dead
285
+ except KeyboardInterrupt:
286
+ _thread.interrupt_main()
287
+ raise
288
+ except Exception as e:
289
+ logging.warning(f"Failed to get process {pid}: {e}")
290
+
291
+ logging.info(f"Terminating {len(processes_to_kill)} processes")
292
+ # Terminate all processes
293
+ for proc in processes_to_kill:
294
+ try:
295
+ proc.terminate()
296
+ killed_count += 1
297
+ except psutil.NoSuchProcess:
298
+ pass # Already dead
299
+ except KeyboardInterrupt:
300
+ _thread.interrupt_main()
301
+ raise
302
+ except Exception as e:
303
+ logging.warning(f"Failed to terminate process {proc.pid}: {e}")
304
+
305
+ # Wait for graceful termination
306
+ _gone, alive = psutil.wait_procs(processes_to_kill, timeout=3)
307
+
308
+ # Force kill any stragglers
309
+ if alive:
310
+ logging.warning(f"Force killing {len(alive)} stubborn processes")
311
+ for proc in alive:
312
+ try:
313
+ proc.kill()
314
+ logging.warning(f"Force killed stubborn process {proc.pid}")
315
+ except KeyboardInterrupt:
316
+ _thread.interrupt_main()
317
+ raise
318
+ except Exception as e:
319
+ logging.warning(f"Failed to force kill process {proc.pid}: {e}")
320
+
321
+ return killed_count
322
+
323
+ def get_tracked_clients(self) -> list[int]:
324
+ """Get list of all tracked client PIDs.
325
+
326
+ Returns:
327
+ List of client PIDs currently being tracked
328
+ """
329
+ with self.lock:
330
+ return list(self._registry.keys())
331
+
332
+ def get_process_info(self, client_pid: int) -> ProcessTreeInfo | None:
333
+ """Get process tree info for a client.
334
+
335
+ Args:
336
+ client_pid: Client PID to query
337
+
338
+ Returns:
339
+ ProcessTreeInfo if found, None otherwise
340
+ """
341
+ with self.lock:
342
+ return self._registry.get(client_pid)
343
+
344
+ def get_processes_by_port(self, port: str) -> list[ProcessTreeInfo]:
345
+ """Get all processes using a specific serial port.
346
+
347
+ Args:
348
+ port: Serial port to search for
349
+
350
+ Returns:
351
+ List of ProcessTreeInfo for processes using this port
352
+ """
353
+ with self.lock:
354
+ return [info for info in self._registry.values() if info.port == port]
355
+
356
+ def get_processes_by_project(self, project_dir: str) -> list[ProcessTreeInfo]:
357
+ """Get all processes for a specific project.
358
+
359
+ Args:
360
+ project_dir: Project directory to search for
361
+
362
+ Returns:
363
+ List of ProcessTreeInfo for processes in this project
364
+ """
365
+ with self.lock:
366
+ return [info for info in self._registry.values() if info.project_dir == project_dir]
@@ -0,0 +1,12 @@
1
+ """
2
+ Daemon Request Processors - Concrete implementations of request handling.
3
+
4
+ This package contains concrete processor implementations for different
5
+ operation types (build, deploy, monitor).
6
+ """
7
+
8
+ from fbuild.daemon.processors.build_processor import BuildRequestProcessor
9
+ from fbuild.daemon.processors.deploy_processor import DeployRequestProcessor
10
+ from fbuild.daemon.processors.monitor_processor import MonitorRequestProcessor
11
+
12
+ __all__ = ["BuildRequestProcessor", "DeployRequestProcessor", "MonitorRequestProcessor"]
@@ -0,0 +1,157 @@
1
+ """
2
+ Build Request Processor - Handles build operations.
3
+
4
+ This module implements the BuildRequestProcessor which executes build
5
+ operations for Arduino/ESP32 projects using the appropriate orchestrator.
6
+ """
7
+
8
+ import importlib
9
+ import logging
10
+ import sys
11
+ from pathlib import Path
12
+ from typing import TYPE_CHECKING
13
+
14
+ from fbuild.daemon.messages import OperationType
15
+ from fbuild.daemon.request_processor import RequestProcessor
16
+
17
+ if TYPE_CHECKING:
18
+ from fbuild.daemon.daemon_context import DaemonContext
19
+ from fbuild.daemon.messages import BuildRequest
20
+
21
+
22
+ class BuildRequestProcessor(RequestProcessor):
23
+ """Processor for build requests.
24
+
25
+ This processor handles compilation of Arduino/ESP32 projects. It:
26
+ 1. Reloads build modules to pick up code changes (for development)
27
+ 2. Creates the appropriate orchestrator (AVR or ESP32)
28
+ 3. Executes the build with the specified settings
29
+ 4. Returns success/failure based on build result
30
+
31
+ Example:
32
+ >>> processor = BuildRequestProcessor()
33
+ >>> success = processor.process_request(build_request, daemon_context)
34
+ """
35
+
36
+ def get_operation_type(self) -> OperationType:
37
+ """Return BUILD operation type."""
38
+ return OperationType.BUILD
39
+
40
+ def get_required_locks(self, request: "BuildRequest", context: "DaemonContext") -> dict[str, str]:
41
+ """Build operations require only a project lock.
42
+
43
+ Args:
44
+ request: The build request
45
+ context: The daemon context
46
+
47
+ Returns:
48
+ Dictionary with project lock requirement
49
+ """
50
+ return {"project": request.project_dir}
51
+
52
+ def execute_operation(self, request: "BuildRequest", context: "DaemonContext") -> bool:
53
+ """Execute the build operation.
54
+
55
+ This is the core build logic extracted from the original
56
+ process_build_request function. All boilerplate (locks, status
57
+ updates, error handling) is handled by the base RequestProcessor.
58
+
59
+ Args:
60
+ request: The build request containing project_dir, environment, etc.
61
+ context: The daemon context with all subsystems
62
+
63
+ Returns:
64
+ True if build succeeded, False otherwise
65
+ """
66
+ logging.info(f"Building project: {request.project_dir}")
67
+
68
+ # Reload build modules to pick up code changes
69
+ # This is critical for development on Windows where daemon caching
70
+ # prevents testing code changes
71
+ self._reload_build_modules()
72
+
73
+ # Get fresh orchestrator class after module reload
74
+ # Using direct import would use cached version
75
+ try:
76
+ orchestrator_class = getattr(sys.modules["fbuild.build.orchestrator_avr"], "BuildOrchestratorAVR")
77
+ except (KeyError, AttributeError) as e:
78
+ logging.error(f"Failed to get BuildOrchestratorAVR class: {e}")
79
+ return False
80
+
81
+ # Create orchestrator and execute build
82
+ orchestrator = orchestrator_class(verbose=request.verbose)
83
+ build_result = orchestrator.build(
84
+ project_dir=Path(request.project_dir),
85
+ env_name=request.environment,
86
+ clean=request.clean_build,
87
+ verbose=request.verbose,
88
+ )
89
+
90
+ if not build_result.success:
91
+ logging.error(f"Build failed: {build_result.message}")
92
+ return False
93
+
94
+ logging.info("Build completed successfully")
95
+ return True
96
+
97
+ def _reload_build_modules(self) -> None:
98
+ """Reload build-related modules to pick up code changes.
99
+
100
+ This is critical for development on Windows where daemon caching prevents
101
+ testing code changes. Reloads key modules that are frequently modified.
102
+
103
+ Order matters: reload dependencies first, then modules that import them.
104
+ """
105
+ modules_to_reload = [
106
+ # Core utilities and packages (reload first - no dependencies)
107
+ "fbuild.packages.downloader",
108
+ "fbuild.packages.archive_utils",
109
+ "fbuild.packages.platformio_registry",
110
+ "fbuild.packages.toolchain",
111
+ "fbuild.packages.toolchain_esp32",
112
+ "fbuild.packages.arduino_core",
113
+ "fbuild.packages.framework_esp32",
114
+ "fbuild.packages.platform_esp32",
115
+ "fbuild.packages.library_manager",
116
+ "fbuild.packages.library_manager_esp32",
117
+ # Build system (reload second - depends on packages)
118
+ "fbuild.build.archive_creator",
119
+ "fbuild.build.compiler",
120
+ "fbuild.build.configurable_compiler",
121
+ "fbuild.build.linker",
122
+ "fbuild.build.configurable_linker",
123
+ "fbuild.build.source_scanner",
124
+ "fbuild.build.compilation_executor",
125
+ # Orchestrators (reload third - depends on build system)
126
+ "fbuild.build.orchestrator",
127
+ "fbuild.build.orchestrator_avr",
128
+ "fbuild.build.orchestrator_esp32",
129
+ # Deploy and monitor (reload with build system)
130
+ "fbuild.deploy.deployer",
131
+ "fbuild.deploy.deployer_esp32",
132
+ "fbuild.deploy.monitor",
133
+ # Top-level module packages (reload last to update __init__.py imports)
134
+ "fbuild.build",
135
+ "fbuild.deploy",
136
+ ]
137
+
138
+ reloaded_count = 0
139
+ for module_name in modules_to_reload:
140
+ try:
141
+ if module_name in sys.modules:
142
+ # Module already loaded - reload it to pick up changes
143
+ importlib.reload(sys.modules[module_name])
144
+ reloaded_count += 1
145
+ else:
146
+ # Module not loaded yet - import it for the first time
147
+ __import__(module_name)
148
+ reloaded_count += 1
149
+ except KeyboardInterrupt as ke:
150
+ from fbuild.interrupt_utils import handle_keyboard_interrupt_properly
151
+
152
+ handle_keyboard_interrupt_properly(ke)
153
+ except Exception as e:
154
+ logging.warning(f"Failed to reload/import module {module_name}: {e}")
155
+
156
+ if reloaded_count > 0:
157
+ logging.info(f"Loaded/reloaded {reloaded_count} build modules")