fbuild 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fbuild might be problematic. Click here for more details.

Files changed (93) hide show
  1. fbuild/__init__.py +0 -0
  2. fbuild/assets/example.txt +1 -0
  3. fbuild/build/__init__.py +117 -0
  4. fbuild/build/archive_creator.py +186 -0
  5. fbuild/build/binary_generator.py +444 -0
  6. fbuild/build/build_component_factory.py +131 -0
  7. fbuild/build/build_state.py +325 -0
  8. fbuild/build/build_utils.py +98 -0
  9. fbuild/build/compilation_executor.py +422 -0
  10. fbuild/build/compiler.py +165 -0
  11. fbuild/build/compiler_avr.py +574 -0
  12. fbuild/build/configurable_compiler.py +612 -0
  13. fbuild/build/configurable_linker.py +637 -0
  14. fbuild/build/flag_builder.py +186 -0
  15. fbuild/build/library_dependency_processor.py +185 -0
  16. fbuild/build/linker.py +708 -0
  17. fbuild/build/orchestrator.py +67 -0
  18. fbuild/build/orchestrator_avr.py +656 -0
  19. fbuild/build/orchestrator_esp32.py +797 -0
  20. fbuild/build/orchestrator_teensy.py +543 -0
  21. fbuild/build/source_compilation_orchestrator.py +220 -0
  22. fbuild/build/source_scanner.py +516 -0
  23. fbuild/cli.py +566 -0
  24. fbuild/cli_utils.py +312 -0
  25. fbuild/config/__init__.py +16 -0
  26. fbuild/config/board_config.py +457 -0
  27. fbuild/config/board_loader.py +92 -0
  28. fbuild/config/ini_parser.py +209 -0
  29. fbuild/config/mcu_specs.py +88 -0
  30. fbuild/daemon/__init__.py +34 -0
  31. fbuild/daemon/client.py +929 -0
  32. fbuild/daemon/compilation_queue.py +293 -0
  33. fbuild/daemon/daemon.py +474 -0
  34. fbuild/daemon/daemon_context.py +196 -0
  35. fbuild/daemon/error_collector.py +263 -0
  36. fbuild/daemon/file_cache.py +332 -0
  37. fbuild/daemon/lock_manager.py +270 -0
  38. fbuild/daemon/logging_utils.py +149 -0
  39. fbuild/daemon/messages.py +301 -0
  40. fbuild/daemon/operation_registry.py +288 -0
  41. fbuild/daemon/process_tracker.py +366 -0
  42. fbuild/daemon/processors/__init__.py +12 -0
  43. fbuild/daemon/processors/build_processor.py +157 -0
  44. fbuild/daemon/processors/deploy_processor.py +327 -0
  45. fbuild/daemon/processors/monitor_processor.py +146 -0
  46. fbuild/daemon/request_processor.py +401 -0
  47. fbuild/daemon/status_manager.py +216 -0
  48. fbuild/daemon/subprocess_manager.py +316 -0
  49. fbuild/deploy/__init__.py +17 -0
  50. fbuild/deploy/deployer.py +67 -0
  51. fbuild/deploy/deployer_esp32.py +314 -0
  52. fbuild/deploy/monitor.py +495 -0
  53. fbuild/interrupt_utils.py +34 -0
  54. fbuild/packages/__init__.py +53 -0
  55. fbuild/packages/archive_utils.py +1098 -0
  56. fbuild/packages/arduino_core.py +412 -0
  57. fbuild/packages/cache.py +249 -0
  58. fbuild/packages/downloader.py +366 -0
  59. fbuild/packages/framework_esp32.py +538 -0
  60. fbuild/packages/framework_teensy.py +346 -0
  61. fbuild/packages/github_utils.py +96 -0
  62. fbuild/packages/header_trampoline_cache.py +394 -0
  63. fbuild/packages/library_compiler.py +203 -0
  64. fbuild/packages/library_manager.py +549 -0
  65. fbuild/packages/library_manager_esp32.py +413 -0
  66. fbuild/packages/package.py +163 -0
  67. fbuild/packages/platform_esp32.py +383 -0
  68. fbuild/packages/platform_teensy.py +312 -0
  69. fbuild/packages/platform_utils.py +131 -0
  70. fbuild/packages/platformio_registry.py +325 -0
  71. fbuild/packages/sdk_utils.py +231 -0
  72. fbuild/packages/toolchain.py +436 -0
  73. fbuild/packages/toolchain_binaries.py +196 -0
  74. fbuild/packages/toolchain_esp32.py +484 -0
  75. fbuild/packages/toolchain_metadata.py +185 -0
  76. fbuild/packages/toolchain_teensy.py +404 -0
  77. fbuild/platform_configs/esp32.json +150 -0
  78. fbuild/platform_configs/esp32c2.json +144 -0
  79. fbuild/platform_configs/esp32c3.json +143 -0
  80. fbuild/platform_configs/esp32c5.json +151 -0
  81. fbuild/platform_configs/esp32c6.json +151 -0
  82. fbuild/platform_configs/esp32p4.json +149 -0
  83. fbuild/platform_configs/esp32s3.json +151 -0
  84. fbuild/platform_configs/imxrt1062.json +56 -0
  85. fbuild-1.1.0.dist-info/METADATA +447 -0
  86. fbuild-1.1.0.dist-info/RECORD +93 -0
  87. fbuild-1.1.0.dist-info/WHEEL +5 -0
  88. fbuild-1.1.0.dist-info/entry_points.txt +5 -0
  89. fbuild-1.1.0.dist-info/licenses/LICENSE +21 -0
  90. fbuild-1.1.0.dist-info/top_level.txt +2 -0
  91. fbuild_lint/__init__.py +0 -0
  92. fbuild_lint/ruff_plugins/__init__.py +0 -0
  93. fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
@@ -0,0 +1,474 @@
1
+ """
2
+ fbuild Daemon - Concurrent Deploy and Monitor Management
3
+
4
+ This daemon manages deploy and monitor operations to prevent resource conflicts
5
+ when multiple operations are running. The daemon:
6
+
7
+ 1. Runs as a singleton process (enforced via PID file)
8
+ 2. Survives client termination
9
+ 3. Processes requests with appropriate locking (per-port, per-project)
10
+ 4. Provides status updates via status file
11
+ 5. Auto-shuts down after idle timeout
12
+ 6. Cleans up orphaned processes
13
+
14
+ Architecture:
15
+ Clients -> Request File -> Daemon -> Deploy/Monitor Process
16
+ | |
17
+ v v
18
+ Status File Progress Updates
19
+ """
20
+
21
+ import _thread
22
+ import logging
23
+ import multiprocessing
24
+ import os
25
+ import signal
26
+ import subprocess
27
+ import sys
28
+ import time
29
+ from logging.handlers import TimedRotatingFileHandler
30
+ from pathlib import Path
31
+
32
+ import psutil
33
+
34
+ from fbuild.daemon.daemon_context import (
35
+ DaemonContext,
36
+ cleanup_daemon_context,
37
+ create_daemon_context,
38
+ )
39
+ from fbuild.daemon.messages import (
40
+ BuildRequest,
41
+ DaemonState,
42
+ DeployRequest,
43
+ MonitorRequest,
44
+ )
45
+ from fbuild.daemon.process_tracker import ProcessTracker
46
+ from fbuild.daemon.processors.build_processor import BuildRequestProcessor
47
+ from fbuild.daemon.processors.deploy_processor import DeployRequestProcessor
48
+ from fbuild.daemon.processors.monitor_processor import MonitorRequestProcessor
49
+
50
+ # Daemon configuration
51
+ DAEMON_NAME = "fbuild_daemon"
52
+ DAEMON_DIR = Path.home() / ".fbuild" / "daemon"
53
+ PID_FILE = DAEMON_DIR / f"{DAEMON_NAME}.pid"
54
+ STATUS_FILE = DAEMON_DIR / "daemon_status.json"
55
+ BUILD_REQUEST_FILE = DAEMON_DIR / "build_request.json"
56
+ DEPLOY_REQUEST_FILE = DAEMON_DIR / "deploy_request.json"
57
+ MONITOR_REQUEST_FILE = DAEMON_DIR / "monitor_request.json"
58
+ LOG_FILE = DAEMON_DIR / "daemon.log"
59
+ PROCESS_REGISTRY_FILE = DAEMON_DIR / "process_registry.json"
60
+ FILE_CACHE_FILE = DAEMON_DIR / "file_cache.json"
61
+ ORPHAN_CHECK_INTERVAL = 5 # Check for orphaned processes every 5 seconds
62
+ IDLE_TIMEOUT = 43200 # 12 hours
63
+
64
+
65
+ def setup_logging(foreground: bool = False) -> None:
66
+ """Setup logging for daemon."""
67
+ DAEMON_DIR.mkdir(parents=True, exist_ok=True)
68
+
69
+ # Enhanced log format with function name and line number
70
+ LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - [%(funcName)s:%(lineno)d] - %(message)s"
71
+ LOG_DATEFMT = "%Y-%m-%d %H:%M:%S"
72
+
73
+ # Configure root logger
74
+ logger = logging.getLogger()
75
+ logger.setLevel(logging.INFO)
76
+
77
+ # Console handler (for foreground mode)
78
+ if foreground:
79
+ console_handler = logging.StreamHandler(sys.stdout)
80
+ console_handler.setLevel(logging.INFO)
81
+ console_formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATEFMT)
82
+ console_handler.setFormatter(console_formatter)
83
+ logger.addHandler(console_handler)
84
+
85
+ # Timed rotating file handler (always) - rotates daily at midnight
86
+ file_handler = TimedRotatingFileHandler(
87
+ str(LOG_FILE),
88
+ when="midnight", # Rotate at midnight
89
+ interval=1, # Daily rotation
90
+ backupCount=2, # Keep 2 days of backups (total 3 files)
91
+ utc=False, # Use local time
92
+ atTime=None, # Rotate exactly at midnight
93
+ )
94
+ file_handler.setLevel(logging.INFO)
95
+ file_formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATEFMT)
96
+ file_handler.setFormatter(file_formatter)
97
+ logger.addHandler(file_handler)
98
+
99
+
100
+ def read_request_file(request_file: Path, request_class: type) -> BuildRequest | DeployRequest | MonitorRequest | None:
101
+ """Read and parse request file.
102
+
103
+ Args:
104
+ request_file: Path to request file
105
+ request_class: Class to parse into (BuildRequest, DeployRequest, or MonitorRequest)
106
+
107
+ Returns:
108
+ Request object if valid, None otherwise
109
+ """
110
+ import json
111
+
112
+ if not request_file.exists():
113
+ return None
114
+
115
+ try:
116
+ with open(request_file) as f:
117
+ data = json.load(f)
118
+
119
+ # Parse into typed request
120
+ request = request_class.from_dict(data)
121
+ return request
122
+
123
+ except (json.JSONDecodeError, ValueError, TypeError) as e:
124
+ logging.error(f"Failed to parse request file {request_file}: {e}")
125
+ return None
126
+ except KeyboardInterrupt:
127
+ _thread.interrupt_main()
128
+ raise
129
+ except Exception as e:
130
+ logging.error(f"Unexpected error reading request file {request_file}: {e}")
131
+ return None
132
+
133
+
134
+ def clear_request_file(request_file: Path) -> None:
135
+ """Remove request file after processing."""
136
+ try:
137
+ request_file.unlink(missing_ok=True)
138
+ except KeyboardInterrupt:
139
+ logging.warning(f"KeyboardInterrupt while clearing request file: {request_file}")
140
+ _thread.interrupt_main()
141
+ raise
142
+ except Exception as e:
143
+ logging.error(f"Failed to clear request file {request_file}: {e}")
144
+
145
+
146
+ def should_shutdown() -> bool:
147
+ """Check if daemon should shutdown.
148
+
149
+ Returns:
150
+ True if shutdown signal detected, False otherwise
151
+ """
152
+ # Check for shutdown signal file
153
+ shutdown_file = DAEMON_DIR / "shutdown.signal"
154
+ if shutdown_file.exists():
155
+ logging.info("Shutdown signal detected")
156
+ try:
157
+ shutdown_file.unlink()
158
+ except KeyboardInterrupt:
159
+ _thread.interrupt_main()
160
+ raise
161
+ except Exception as e:
162
+ logging.warning(f"Failed to remove shutdown signal file: {e}")
163
+ return True
164
+ return False
165
+
166
+
167
+ def cleanup_stale_cancel_signals() -> None:
168
+ """Clean up stale cancel signal files (older than 5 minutes)."""
169
+ try:
170
+ signal_files = list(DAEMON_DIR.glob("cancel_*.signal"))
171
+ logging.debug(f"Found {len(signal_files)} cancel signal files")
172
+
173
+ cleaned_count = 0
174
+ for signal_file in signal_files:
175
+ try:
176
+ # Check file age
177
+ file_age = time.time() - signal_file.stat().st_mtime
178
+ if file_age > 300: # 5 minutes
179
+ logging.info(f"Cleaning up stale cancel signal: {signal_file.name} (age: {file_age:.1f}s)")
180
+ signal_file.unlink()
181
+ cleaned_count += 1
182
+ except KeyboardInterrupt:
183
+ _thread.interrupt_main()
184
+ raise
185
+ except Exception as e:
186
+ logging.warning(f"Failed to clean up {signal_file.name}: {e}")
187
+
188
+ if cleaned_count > 0:
189
+ logging.info(f"Cleaned up {cleaned_count} cancel signal files")
190
+ except KeyboardInterrupt:
191
+ _thread.interrupt_main()
192
+ raise
193
+ except Exception as e:
194
+ logging.error(f"Error during cancel signal cleanup: {e}")
195
+
196
+
197
+ def signal_handler(signum: int, frame: object, context: DaemonContext) -> None:
198
+ """Handle SIGTERM/SIGINT - refuse shutdown during operation."""
199
+ signal_name = "SIGTERM" if signum == signal.SIGTERM else "SIGINT"
200
+ logging.info(f"Signal handler invoked: received {signal_name} (signal number {signum})")
201
+
202
+ if context.status_manager.get_operation_in_progress():
203
+ logging.warning(f"Received {signal_name} during active operation. Refusing graceful shutdown.")
204
+ print(
205
+ f"\n⚠️ {signal_name} received during operation\n⚠️ Cannot shutdown gracefully while operation is active\n⚠️ Use 'kill -9 {os.getpid()}' to force termination\n",
206
+ flush=True,
207
+ )
208
+ return # Refuse shutdown
209
+ else:
210
+ logging.info(f"Received {signal_name}, shutting down gracefully (no operation in progress)")
211
+ cleanup_and_exit(context)
212
+
213
+
214
+ def cleanup_and_exit(context: DaemonContext) -> None:
215
+ """Clean up daemon state and exit."""
216
+ logging.info("Daemon shutting down")
217
+
218
+ # Shutdown subsystems
219
+ cleanup_daemon_context(context)
220
+
221
+ # Remove PID file
222
+ try:
223
+ PID_FILE.unlink(missing_ok=True)
224
+ except KeyboardInterrupt:
225
+ _thread.interrupt_main()
226
+ raise
227
+ except Exception as e:
228
+ logging.error(f"Failed to remove PID file: {e}")
229
+
230
+ # Set final status
231
+ context.status_manager.update_status(DaemonState.IDLE, "Daemon shut down")
232
+
233
+ logging.info("Cleanup complete, exiting with status 0")
234
+ sys.exit(0)
235
+
236
+
237
+ def run_daemon_loop() -> None:
238
+ """Main daemon loop: process build, deploy and monitor requests."""
239
+ daemon_pid = os.getpid()
240
+ daemon_started_at = time.time()
241
+
242
+ logging.info("Starting daemon loop...")
243
+
244
+ # Determine optimal worker pool size
245
+ try:
246
+ num_workers = multiprocessing.cpu_count()
247
+ except (ImportError, NotImplementedError) as e:
248
+ num_workers = 4 # Fallback for systems without multiprocessing
249
+ logging.warning(f"Could not detect CPU count ({e}), using fallback: {num_workers} workers")
250
+
251
+ # Create daemon context (includes status manager)
252
+ context = create_daemon_context(
253
+ daemon_pid=daemon_pid,
254
+ daemon_started_at=daemon_started_at,
255
+ num_workers=num_workers,
256
+ file_cache_path=FILE_CACHE_FILE,
257
+ status_file_path=STATUS_FILE,
258
+ )
259
+
260
+ # Write initial IDLE status IMMEDIATELY to prevent clients from reading stale status
261
+ context.status_manager.update_status(DaemonState.IDLE, "Daemon starting...")
262
+
263
+ # Initialize process tracker
264
+ process_tracker = ProcessTracker(PROCESS_REGISTRY_FILE)
265
+
266
+ # Register signal handlers
267
+ def signal_handler_wrapper(signum: int, frame: object) -> None:
268
+ signal_handler(signum, frame, context)
269
+
270
+ signal.signal(signal.SIGTERM, signal_handler_wrapper)
271
+ signal.signal(signal.SIGINT, signal_handler_wrapper)
272
+
273
+ # Create request processors
274
+ build_processor = BuildRequestProcessor()
275
+ deploy_processor = DeployRequestProcessor()
276
+ monitor_processor = MonitorRequestProcessor()
277
+
278
+ logging.info(f"Daemon started with PID {daemon_pid}")
279
+ context.status_manager.update_status(DaemonState.IDLE, "Daemon ready")
280
+
281
+ last_activity = time.time()
282
+ last_orphan_check = time.time()
283
+ last_cancel_cleanup = time.time()
284
+
285
+ logging.info("Entering main daemon loop...")
286
+ iteration_count = 0
287
+
288
+ while True:
289
+ try:
290
+ iteration_count += 1
291
+ if iteration_count % 100 == 0: # Log every 100 iterations to avoid spam
292
+ logging.debug(f"Daemon main loop iteration {iteration_count}")
293
+
294
+ # Check for shutdown signal
295
+ if should_shutdown():
296
+ logging.info("Shutdown requested via signal")
297
+ cleanup_and_exit(context)
298
+
299
+ # Check idle timeout
300
+ idle_time = time.time() - last_activity
301
+ if idle_time > IDLE_TIMEOUT:
302
+ logging.info(f"Idle timeout reached ({idle_time:.1f}s / {IDLE_TIMEOUT}s), shutting down")
303
+ cleanup_and_exit(context)
304
+
305
+ # Periodically check for and cleanup orphaned processes
306
+ if time.time() - last_orphan_check >= ORPHAN_CHECK_INTERVAL:
307
+ try:
308
+ orphaned_clients = process_tracker.cleanup_orphaned_processes()
309
+ if orphaned_clients:
310
+ logging.info(f"Cleaned up orphaned processes for {len(orphaned_clients)} dead clients: {orphaned_clients}")
311
+ last_orphan_check = time.time()
312
+ except KeyboardInterrupt:
313
+ _thread.interrupt_main()
314
+ raise
315
+ except Exception as e:
316
+ logging.error(f"Error during orphan cleanup: {e}", exc_info=True)
317
+
318
+ # Periodically cleanup stale cancel signals (every 60 seconds)
319
+ if time.time() - last_cancel_cleanup >= 60:
320
+ try:
321
+ cleanup_stale_cancel_signals()
322
+ last_cancel_cleanup = time.time()
323
+ except KeyboardInterrupt:
324
+ _thread.interrupt_main()
325
+ raise
326
+ except Exception as e:
327
+ logging.error(f"Error during cancel signal cleanup: {e}", exc_info=True)
328
+
329
+ # Check for build requests
330
+ build_request = read_request_file(BUILD_REQUEST_FILE, BuildRequest)
331
+ if build_request:
332
+ last_activity = time.time()
333
+ logging.info(f"Received build request: {build_request}")
334
+
335
+ # Mark operation in progress
336
+ context.status_manager.set_operation_in_progress(True)
337
+
338
+ # Process request
339
+ build_processor.process_request(build_request, context)
340
+
341
+ # Mark operation complete
342
+ context.status_manager.set_operation_in_progress(False)
343
+
344
+ # Clear request file
345
+ clear_request_file(BUILD_REQUEST_FILE)
346
+
347
+ # Check for deploy requests
348
+ deploy_request = read_request_file(DEPLOY_REQUEST_FILE, DeployRequest)
349
+ if deploy_request:
350
+ last_activity = time.time()
351
+ logging.info(f"Received deploy request: {deploy_request}")
352
+
353
+ # Mark operation in progress
354
+ context.status_manager.set_operation_in_progress(True)
355
+
356
+ # Process request
357
+ deploy_processor.process_request(deploy_request, context)
358
+
359
+ # Mark operation complete
360
+ context.status_manager.set_operation_in_progress(False)
361
+
362
+ # Clear request file
363
+ clear_request_file(DEPLOY_REQUEST_FILE)
364
+
365
+ # Check for monitor requests
366
+ monitor_request = read_request_file(MONITOR_REQUEST_FILE, MonitorRequest)
367
+ if monitor_request:
368
+ last_activity = time.time()
369
+ logging.info(f"Received monitor request: {monitor_request}")
370
+
371
+ # Mark operation in progress
372
+ context.status_manager.set_operation_in_progress(True)
373
+
374
+ # Process request
375
+ monitor_processor.process_request(monitor_request, context)
376
+
377
+ # Mark operation complete
378
+ context.status_manager.set_operation_in_progress(False)
379
+
380
+ # Clear request file
381
+ clear_request_file(MONITOR_REQUEST_FILE)
382
+
383
+ # Sleep briefly to avoid busy-wait
384
+ time.sleep(0.5)
385
+
386
+ except KeyboardInterrupt:
387
+ logging.warning("Daemon interrupted by user")
388
+ _thread.interrupt_main()
389
+ cleanup_and_exit(context)
390
+ except Exception as e:
391
+ logging.error(f"Daemon error: {e}", exc_info=True)
392
+ # Continue running despite errors
393
+ time.sleep(1)
394
+
395
+
396
+ def main() -> int:
397
+ """Main entry point for daemon."""
398
+ # Parse command-line arguments
399
+ foreground = "--foreground" in sys.argv
400
+
401
+ # Setup logging
402
+ setup_logging(foreground=foreground)
403
+
404
+ # Ensure daemon directory exists
405
+ DAEMON_DIR.mkdir(parents=True, exist_ok=True)
406
+
407
+ if foreground:
408
+ # Run in foreground (for debugging)
409
+ logging.info("Running in foreground mode")
410
+ # Write PID file
411
+ with open(PID_FILE, "w") as f:
412
+ f.write(str(os.getpid()))
413
+ try:
414
+ run_daemon_loop()
415
+ finally:
416
+ PID_FILE.unlink(missing_ok=True)
417
+ return 0
418
+
419
+ # Check if daemon already running
420
+ if PID_FILE.exists():
421
+ try:
422
+ with open(PID_FILE) as f:
423
+ existing_pid = int(f.read().strip())
424
+ if psutil.pid_exists(existing_pid):
425
+ logging.info(f"Daemon already running with PID {existing_pid}")
426
+ print(f"Daemon already running with PID {existing_pid}")
427
+ return 0
428
+ else:
429
+ # Stale PID file
430
+ logging.info(f"Removing stale PID file for PID {existing_pid}")
431
+ PID_FILE.unlink()
432
+ except KeyboardInterrupt:
433
+ _thread.interrupt_main()
434
+ raise
435
+ except Exception as e:
436
+ logging.warning(f"Error checking existing PID: {e}")
437
+ PID_FILE.unlink(missing_ok=True)
438
+
439
+ # Simple daemonization for cross-platform compatibility
440
+ try:
441
+ # Fork to background
442
+ if hasattr(os, "fork") and os.fork() > 0: # type: ignore[attr-defined]
443
+ # Parent process exits
444
+ return 0
445
+ except (OSError, AttributeError):
446
+ # Fork not supported (Windows) - run in background as subprocess
447
+ logging.info("Fork not supported, using subprocess")
448
+ subprocess.Popen(
449
+ [sys.executable, __file__, "--foreground"],
450
+ stdout=subprocess.DEVNULL,
451
+ stderr=subprocess.DEVNULL,
452
+ stdin=subprocess.DEVNULL,
453
+ )
454
+ return 0
455
+
456
+ # Child process continues
457
+ # Write PID file
458
+ with open(PID_FILE, "w") as f:
459
+ f.write(str(os.getpid()))
460
+
461
+ try:
462
+ run_daemon_loop()
463
+ finally:
464
+ PID_FILE.unlink(missing_ok=True)
465
+
466
+ return 0
467
+
468
+
469
+ if __name__ == "__main__":
470
+ try:
471
+ sys.exit(main())
472
+ except KeyboardInterrupt:
473
+ print("\nDaemon interrupted by user")
474
+ sys.exit(130)
@@ -0,0 +1,196 @@
1
+ """
2
+ Daemon Context - Centralized state management for fbuild daemon.
3
+
4
+ This module provides the DaemonContext class which encapsulates all daemon state
5
+ that was previously stored in global variables. This improves testability,
6
+ makes dependencies explicit, and eliminates global mutable state.
7
+ """
8
+
9
+ import threading
10
+ from dataclasses import dataclass, field
11
+ from typing import TYPE_CHECKING
12
+
13
+ from fbuild.daemon.compilation_queue import CompilationJobQueue
14
+ from fbuild.daemon.error_collector import ErrorCollector
15
+ from fbuild.daemon.file_cache import FileCache
16
+ from fbuild.daemon.lock_manager import ResourceLockManager
17
+ from fbuild.daemon.operation_registry import OperationRegistry
18
+ from fbuild.daemon.status_manager import StatusManager
19
+ from fbuild.daemon.subprocess_manager import SubprocessManager
20
+
21
+ if TYPE_CHECKING:
22
+ from pathlib import Path
23
+
24
+
25
+ @dataclass
26
+ class DaemonContext:
27
+ """Centralized context for all daemon state and subsystems.
28
+
29
+ This class replaces the 12 global variables in daemon.py with a single
30
+ context object that can be passed to functions explicitly. This improves:
31
+ - Testability: Mock the entire context in tests
32
+ - Clarity: Dependencies are explicit in function signatures
33
+ - Thread-safety: Locks are properly encapsulated
34
+ - Lifecycle: Cleanup is centralized in one place
35
+
36
+ Attributes:
37
+ daemon_pid: Process ID of the daemon
38
+ daemon_started_at: Unix timestamp when daemon was started
39
+ compilation_queue: Queue for managing parallel compilation jobs
40
+ operation_registry: Registry for tracking active/completed operations
41
+ subprocess_manager: Manager for daemon-spawned subprocesses
42
+ file_cache: Cache for file modification times
43
+ error_collector: Global error collector for operations
44
+ lock_manager: Unified resource lock manager for ports and projects
45
+ status_manager: Manager for daemon status file operations
46
+ operation_in_progress: Flag indicating if any operation is running
47
+ operation_lock: Lock protecting the operation_in_progress flag
48
+ """
49
+
50
+ # Daemon identity
51
+ daemon_pid: int
52
+ daemon_started_at: float
53
+
54
+ # Subsystems
55
+ compilation_queue: CompilationJobQueue
56
+ operation_registry: OperationRegistry
57
+ subprocess_manager: SubprocessManager
58
+ file_cache: FileCache
59
+ error_collector: ErrorCollector
60
+ lock_manager: ResourceLockManager
61
+ status_manager: StatusManager
62
+
63
+ # Operation state
64
+ operation_in_progress: bool = False
65
+ operation_lock: threading.Lock = field(default_factory=threading.Lock)
66
+
67
+
68
+ def create_daemon_context(
69
+ daemon_pid: int,
70
+ daemon_started_at: float,
71
+ num_workers: int,
72
+ file_cache_path: "Path",
73
+ status_file_path: "Path",
74
+ ) -> DaemonContext:
75
+ """Factory function to create and initialize a DaemonContext.
76
+
77
+ This function initializes all daemon subsystems and returns a fully
78
+ configured DaemonContext ready for use.
79
+
80
+ Args:
81
+ daemon_pid: Process ID of the daemon
82
+ daemon_started_at: Unix timestamp when daemon started
83
+ num_workers: Number of compilation worker threads
84
+ file_cache_path: Path to the file cache JSON file
85
+ status_file_path: Path to the status file
86
+
87
+ Returns:
88
+ Fully initialized DaemonContext
89
+
90
+ Example:
91
+ >>> import os
92
+ >>> import time
93
+ >>> from pathlib import Path
94
+ >>>
95
+ >>> context = create_daemon_context(
96
+ ... daemon_pid=os.getpid(),
97
+ ... daemon_started_at=time.time(),
98
+ ... num_workers=4,
99
+ ... file_cache_path=Path.home() / ".fbuild" / "daemon" / "file_cache.json",
100
+ ... status_file_path=Path.home() / ".fbuild" / "daemon" / "daemon_status.json"
101
+ ... )
102
+ >>> # Use context in request handlers
103
+ >>> process_build_request(request, context)
104
+ """
105
+ import logging
106
+
107
+ logging.info("Initializing daemon context...")
108
+
109
+ # Initialize compilation queue with worker pool
110
+ compilation_queue = CompilationJobQueue(num_workers=num_workers)
111
+ compilation_queue.start()
112
+ logging.info(f"Compilation queue started with {num_workers} workers")
113
+
114
+ # Initialize operation registry
115
+ logging.debug("Creating operation registry (max_history=100)...")
116
+ operation_registry = OperationRegistry(max_history=100)
117
+ logging.info("Operation registry initialized")
118
+
119
+ # Initialize subprocess manager
120
+ subprocess_manager = SubprocessManager()
121
+ logging.info("Subprocess manager initialized")
122
+
123
+ # Initialize file cache
124
+ logging.debug(f"Creating file cache (cache_file={file_cache_path})...")
125
+ file_cache = FileCache(cache_file=file_cache_path)
126
+ logging.info("File cache initialized")
127
+
128
+ # Initialize error collector
129
+ error_collector = ErrorCollector()
130
+ logging.info("Error collector initialized")
131
+
132
+ # Initialize lock manager
133
+ lock_manager = ResourceLockManager()
134
+ logging.info("Resource lock manager initialized")
135
+
136
+ # Initialize status manager
137
+ logging.debug(f"Creating status manager (status_file={status_file_path})...")
138
+ status_manager = StatusManager(
139
+ status_file=status_file_path,
140
+ daemon_pid=daemon_pid,
141
+ daemon_started_at=daemon_started_at,
142
+ )
143
+ logging.info("Status manager initialized")
144
+
145
+ # Create context
146
+ context = DaemonContext(
147
+ daemon_pid=daemon_pid,
148
+ daemon_started_at=daemon_started_at,
149
+ compilation_queue=compilation_queue,
150
+ operation_registry=operation_registry,
151
+ subprocess_manager=subprocess_manager,
152
+ file_cache=file_cache,
153
+ error_collector=error_collector,
154
+ lock_manager=lock_manager,
155
+ status_manager=status_manager,
156
+ )
157
+
158
+ logging.info("✅ Daemon context initialized successfully")
159
+ return context
160
+
161
+
162
+ def cleanup_daemon_context(context: DaemonContext) -> None:
163
+ """Cleanup and shutdown all daemon subsystems in the context.
164
+
165
+ This function should be called during daemon shutdown to ensure all
166
+ resources are properly released.
167
+
168
+ Args:
169
+ context: The DaemonContext to clean up
170
+
171
+ Example:
172
+ >>> try:
173
+ ... run_daemon(context)
174
+ ... finally:
175
+ ... cleanup_daemon_context(context)
176
+ """
177
+ import logging
178
+
179
+ logging.info("Shutting down daemon context...")
180
+
181
+ # Shutdown compilation queue
182
+ if context.compilation_queue:
183
+ try:
184
+ context.compilation_queue.shutdown()
185
+ logging.info("Compilation queue shut down")
186
+ except KeyboardInterrupt:
187
+ logging.warning("KeyboardInterrupt during compilation queue shutdown")
188
+ raise
189
+ except Exception as e:
190
+ logging.error(f"Error shutting down compilation queue: {e}")
191
+
192
+ # Log cleanup of other subsystems (they don't have explicit shutdown methods)
193
+ logging.debug("Cleaning up subprocess manager...")
194
+ logging.debug("Cleaning up error collector...")
195
+
196
+ logging.info("✅ Daemon context cleaned up")