fbuild 1.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. fbuild/__init__.py +390 -0
  2. fbuild/assets/example.txt +1 -0
  3. fbuild/build/__init__.py +117 -0
  4. fbuild/build/archive_creator.py +186 -0
  5. fbuild/build/binary_generator.py +444 -0
  6. fbuild/build/build_component_factory.py +131 -0
  7. fbuild/build/build_info_generator.py +624 -0
  8. fbuild/build/build_state.py +325 -0
  9. fbuild/build/build_utils.py +93 -0
  10. fbuild/build/compilation_executor.py +422 -0
  11. fbuild/build/compiler.py +165 -0
  12. fbuild/build/compiler_avr.py +574 -0
  13. fbuild/build/configurable_compiler.py +664 -0
  14. fbuild/build/configurable_linker.py +637 -0
  15. fbuild/build/flag_builder.py +214 -0
  16. fbuild/build/library_dependency_processor.py +185 -0
  17. fbuild/build/linker.py +708 -0
  18. fbuild/build/orchestrator.py +67 -0
  19. fbuild/build/orchestrator_avr.py +651 -0
  20. fbuild/build/orchestrator_esp32.py +878 -0
  21. fbuild/build/orchestrator_rp2040.py +719 -0
  22. fbuild/build/orchestrator_stm32.py +696 -0
  23. fbuild/build/orchestrator_teensy.py +580 -0
  24. fbuild/build/source_compilation_orchestrator.py +218 -0
  25. fbuild/build/source_scanner.py +516 -0
  26. fbuild/cli.py +717 -0
  27. fbuild/cli_utils.py +314 -0
  28. fbuild/config/__init__.py +16 -0
  29. fbuild/config/board_config.py +542 -0
  30. fbuild/config/board_loader.py +92 -0
  31. fbuild/config/ini_parser.py +369 -0
  32. fbuild/config/mcu_specs.py +88 -0
  33. fbuild/daemon/__init__.py +42 -0
  34. fbuild/daemon/async_client.py +531 -0
  35. fbuild/daemon/client.py +1505 -0
  36. fbuild/daemon/compilation_queue.py +293 -0
  37. fbuild/daemon/configuration_lock.py +865 -0
  38. fbuild/daemon/daemon.py +585 -0
  39. fbuild/daemon/daemon_context.py +293 -0
  40. fbuild/daemon/error_collector.py +263 -0
  41. fbuild/daemon/file_cache.py +332 -0
  42. fbuild/daemon/firmware_ledger.py +546 -0
  43. fbuild/daemon/lock_manager.py +508 -0
  44. fbuild/daemon/logging_utils.py +149 -0
  45. fbuild/daemon/messages.py +957 -0
  46. fbuild/daemon/operation_registry.py +288 -0
  47. fbuild/daemon/port_state_manager.py +249 -0
  48. fbuild/daemon/process_tracker.py +366 -0
  49. fbuild/daemon/processors/__init__.py +18 -0
  50. fbuild/daemon/processors/build_processor.py +248 -0
  51. fbuild/daemon/processors/deploy_processor.py +664 -0
  52. fbuild/daemon/processors/install_deps_processor.py +431 -0
  53. fbuild/daemon/processors/locking_processor.py +777 -0
  54. fbuild/daemon/processors/monitor_processor.py +285 -0
  55. fbuild/daemon/request_processor.py +457 -0
  56. fbuild/daemon/shared_serial.py +819 -0
  57. fbuild/daemon/status_manager.py +238 -0
  58. fbuild/daemon/subprocess_manager.py +316 -0
  59. fbuild/deploy/__init__.py +21 -0
  60. fbuild/deploy/deployer.py +67 -0
  61. fbuild/deploy/deployer_esp32.py +310 -0
  62. fbuild/deploy/docker_utils.py +315 -0
  63. fbuild/deploy/monitor.py +519 -0
  64. fbuild/deploy/qemu_runner.py +603 -0
  65. fbuild/interrupt_utils.py +34 -0
  66. fbuild/ledger/__init__.py +52 -0
  67. fbuild/ledger/board_ledger.py +560 -0
  68. fbuild/output.py +352 -0
  69. fbuild/packages/__init__.py +66 -0
  70. fbuild/packages/archive_utils.py +1098 -0
  71. fbuild/packages/arduino_core.py +412 -0
  72. fbuild/packages/cache.py +256 -0
  73. fbuild/packages/concurrent_manager.py +510 -0
  74. fbuild/packages/downloader.py +518 -0
  75. fbuild/packages/fingerprint.py +423 -0
  76. fbuild/packages/framework_esp32.py +538 -0
  77. fbuild/packages/framework_rp2040.py +349 -0
  78. fbuild/packages/framework_stm32.py +459 -0
  79. fbuild/packages/framework_teensy.py +346 -0
  80. fbuild/packages/github_utils.py +96 -0
  81. fbuild/packages/header_trampoline_cache.py +394 -0
  82. fbuild/packages/library_compiler.py +203 -0
  83. fbuild/packages/library_manager.py +549 -0
  84. fbuild/packages/library_manager_esp32.py +725 -0
  85. fbuild/packages/package.py +163 -0
  86. fbuild/packages/platform_esp32.py +383 -0
  87. fbuild/packages/platform_rp2040.py +400 -0
  88. fbuild/packages/platform_stm32.py +581 -0
  89. fbuild/packages/platform_teensy.py +312 -0
  90. fbuild/packages/platform_utils.py +131 -0
  91. fbuild/packages/platformio_registry.py +369 -0
  92. fbuild/packages/sdk_utils.py +231 -0
  93. fbuild/packages/toolchain.py +436 -0
  94. fbuild/packages/toolchain_binaries.py +196 -0
  95. fbuild/packages/toolchain_esp32.py +489 -0
  96. fbuild/packages/toolchain_metadata.py +185 -0
  97. fbuild/packages/toolchain_rp2040.py +436 -0
  98. fbuild/packages/toolchain_stm32.py +417 -0
  99. fbuild/packages/toolchain_teensy.py +404 -0
  100. fbuild/platform_configs/esp32.json +150 -0
  101. fbuild/platform_configs/esp32c2.json +144 -0
  102. fbuild/platform_configs/esp32c3.json +143 -0
  103. fbuild/platform_configs/esp32c5.json +151 -0
  104. fbuild/platform_configs/esp32c6.json +151 -0
  105. fbuild/platform_configs/esp32p4.json +149 -0
  106. fbuild/platform_configs/esp32s3.json +151 -0
  107. fbuild/platform_configs/imxrt1062.json +56 -0
  108. fbuild/platform_configs/rp2040.json +70 -0
  109. fbuild/platform_configs/rp2350.json +76 -0
  110. fbuild/platform_configs/stm32f1.json +59 -0
  111. fbuild/platform_configs/stm32f4.json +63 -0
  112. fbuild/py.typed +0 -0
  113. fbuild-1.2.8.dist-info/METADATA +468 -0
  114. fbuild-1.2.8.dist-info/RECORD +121 -0
  115. fbuild-1.2.8.dist-info/WHEEL +5 -0
  116. fbuild-1.2.8.dist-info/entry_points.txt +5 -0
  117. fbuild-1.2.8.dist-info/licenses/LICENSE +21 -0
  118. fbuild-1.2.8.dist-info/top_level.txt +2 -0
  119. fbuild_lint/__init__.py +0 -0
  120. fbuild_lint/ruff_plugins/__init__.py +0 -0
  121. fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
@@ -0,0 +1,508 @@
1
+ """
2
+ Resource Lock Manager - Unified lock management for daemon operations.
3
+
4
+ This module provides the ResourceLockManager class which centralizes all
5
+ lock management logic. Key features:
6
+ - Per-port and per-project locks with context managers
7
+ - Lock timeout/expiry for automatic stale lock detection
8
+ - Lock holder tracking for better error messages
9
+ - Force-release capability for stuck locks
10
+ - Automatic cleanup of stale locks
11
+ """
12
+
13
+ import logging
14
+ import threading
15
+ import time
16
+ from contextlib import contextmanager
17
+ from dataclasses import dataclass, field
18
+ from typing import Any, Iterator
19
+
20
+ # Default lock timeout: 30 minutes (for long builds)
21
+ DEFAULT_LOCK_TIMEOUT = 1800.0
22
+
23
+ # Stale lock threshold: locks older than this with no activity are candidates for cleanup
24
+ STALE_LOCK_THRESHOLD = 3600.0 # 1 hour
25
+
26
+
27
+ @dataclass
28
+ class LockInfo:
29
+ """Information about a lock for debugging, timeout detection, and cleanup.
30
+
31
+ Attributes:
32
+ lock: The actual threading.Lock object
33
+ created_at: Unix timestamp when lock was created
34
+ acquired_at: Unix timestamp when lock was last acquired (None if not held)
35
+ last_released_at: Unix timestamp when lock was last released
36
+ acquisition_count: Number of times lock has been acquired
37
+ holder_thread_id: Thread ID currently holding the lock (None if not held)
38
+ holder_operation_id: Operation ID currently holding the lock
39
+ holder_description: Human-readable description of what's holding the lock
40
+ timeout: Maximum time in seconds the lock can be held before considered stale
41
+ """
42
+
43
+ lock: threading.Lock
44
+ created_at: float = field(default_factory=time.time)
45
+ acquired_at: float | None = None
46
+ last_released_at: float | None = None
47
+ acquisition_count: int = 0
48
+ holder_thread_id: int | None = None
49
+ holder_operation_id: str | None = None
50
+ holder_description: str | None = None
51
+ timeout: float = DEFAULT_LOCK_TIMEOUT
52
+
53
+ def is_held(self) -> bool:
54
+ """Check if lock is currently held."""
55
+ return self.acquired_at is not None and self.last_released_at is None or (self.acquired_at is not None and self.last_released_at is not None and self.acquired_at > self.last_released_at)
56
+
57
+ def is_stale(self) -> bool:
58
+ """Check if lock is stale (held beyond timeout)."""
59
+ if not self.is_held():
60
+ return False
61
+ if self.acquired_at is None:
62
+ return False
63
+ hold_time = time.time() - self.acquired_at
64
+ return hold_time > self.timeout
65
+
66
+ def hold_duration(self) -> float | None:
67
+ """Get how long the lock has been held."""
68
+ if not self.is_held() or self.acquired_at is None:
69
+ return None
70
+ return time.time() - self.acquired_at
71
+
72
+ def to_dict(self) -> dict[str, Any]:
73
+ """Convert to dictionary for JSON serialization."""
74
+ return {
75
+ "created_at": self.created_at,
76
+ "acquired_at": self.acquired_at,
77
+ "last_released_at": self.last_released_at,
78
+ "acquisition_count": self.acquisition_count,
79
+ "holder_thread_id": self.holder_thread_id,
80
+ "holder_operation_id": self.holder_operation_id,
81
+ "holder_description": self.holder_description,
82
+ "timeout": self.timeout,
83
+ "is_held": self.is_held(),
84
+ "is_stale": self.is_stale(),
85
+ "hold_duration": self.hold_duration(),
86
+ }
87
+
88
+
89
+ class LockAcquisitionError(RuntimeError):
90
+ """Error raised when a lock cannot be acquired.
91
+
92
+ Provides detailed information about what's holding the lock.
93
+ """
94
+
95
+ def __init__(
96
+ self,
97
+ resource_type: str,
98
+ resource_id: str,
99
+ lock_info: LockInfo | None = None,
100
+ ):
101
+ self.resource_type = resource_type
102
+ self.resource_id = resource_id
103
+ self.lock_info = lock_info
104
+
105
+ # Build detailed error message
106
+ if lock_info is not None and lock_info.is_held():
107
+ holder_desc = lock_info.holder_description or "unknown operation"
108
+ hold_duration = lock_info.hold_duration()
109
+ duration_str = f" (held for {hold_duration:.1f}s)" if hold_duration else ""
110
+ if lock_info.is_stale():
111
+ message = (
112
+ f"{resource_type.capitalize()} lock unavailable for: {resource_id}. " + f"STALE lock held by: {holder_desc}{duration_str}. " + "Consider force-releasing with clear_stale_locks()."
113
+ )
114
+ else:
115
+ message = f"{resource_type.capitalize()} lock unavailable for: {resource_id}. " + f"Currently held by: {holder_desc}{duration_str}."
116
+ else:
117
+ message = f"{resource_type.capitalize()} lock unavailable for: {resource_id}"
118
+
119
+ super().__init__(message)
120
+
121
+
122
+ class ResourceLockManager:
123
+ """Manages per-port and per-project locks with timeout detection and cleanup.
124
+
125
+ This class provides a unified interface for managing locks that protect
126
+ shared resources (serial ports and project directories). Features:
127
+ - Context managers for automatic lock acquisition/release
128
+ - Lock timeout detection for stale lock cleanup
129
+ - Lock holder tracking for informative error messages
130
+ - Force-release capability for stuck locks
131
+ - Thread-safe operations
132
+
133
+ Example:
134
+ >>> manager = ResourceLockManager()
135
+ >>>
136
+ >>> # Acquire port lock for serial operations
137
+ >>> with manager.acquire_port_lock("COM3", operation_id="deploy_123",
138
+ ... description="Deploy to ESP32"):
139
+ ... upload_firmware_to_port("COM3")
140
+ >>>
141
+ >>> # Check for stale locks
142
+ >>> stale = manager.get_stale_locks()
143
+ >>> if stale:
144
+ ... print(f"Found {len(stale)} stale locks")
145
+ ... manager.force_release_stale_locks()
146
+ """
147
+
148
+ def __init__(self) -> None:
149
+ """Initialize the ResourceLockManager."""
150
+ self._master_lock = threading.Lock() # Protects the lock dictionaries
151
+ self._port_locks: dict[str, LockInfo] = {} # Per-port locks
152
+ self._project_locks: dict[str, LockInfo] = {} # Per-project locks
153
+
154
+ @contextmanager
155
+ def acquire_port_lock(
156
+ self,
157
+ port: str,
158
+ blocking: bool = True,
159
+ timeout: float = DEFAULT_LOCK_TIMEOUT,
160
+ operation_id: str | None = None,
161
+ description: str | None = None,
162
+ ) -> Iterator[None]:
163
+ """Acquire a lock for a specific serial port.
164
+
165
+ This ensures that only one operation can use a serial port at a time,
166
+ preventing conflicts between deploy and monitor operations.
167
+
168
+ Args:
169
+ port: Serial port identifier (e.g., "COM3", "/dev/ttyUSB0")
170
+ blocking: If True, wait for lock. If False, raise LockAcquisitionError if unavailable.
171
+ timeout: Maximum time the lock can be held before considered stale.
172
+ operation_id: Identifier for the operation holding the lock.
173
+ description: Human-readable description of what's holding the lock.
174
+
175
+ Yields:
176
+ None (the lock is held for the duration of the context)
177
+
178
+ Raises:
179
+ LockAcquisitionError: If blocking=False and lock is not available
180
+ """
181
+ lock_info = self._get_or_create_port_lock(port, timeout)
182
+ logging.debug(f"Acquiring port lock for: {port} (blocking={blocking})")
183
+
184
+ acquired = lock_info.lock.acquire(blocking=blocking)
185
+ if not acquired:
186
+ raise LockAcquisitionError("port", port, lock_info)
187
+
188
+ try:
189
+ # Record lock acquisition details
190
+ with self._master_lock:
191
+ lock_info.acquired_at = time.time()
192
+ lock_info.acquisition_count += 1
193
+ lock_info.holder_thread_id = threading.get_ident()
194
+ lock_info.holder_operation_id = operation_id
195
+ lock_info.holder_description = description or f"Operation on port {port}"
196
+ lock_info.timeout = timeout
197
+
198
+ logging.debug(f"Port lock acquired for: {port} " + f"(count={lock_info.acquisition_count}, operation={operation_id})")
199
+ yield
200
+ finally:
201
+ # Clear holder info before releasing
202
+ with self._master_lock:
203
+ lock_info.last_released_at = time.time()
204
+ lock_info.holder_thread_id = None
205
+ lock_info.holder_operation_id = None
206
+ lock_info.holder_description = None
207
+ lock_info.lock.release()
208
+ logging.debug(f"Port lock released for: {port}")
209
+
210
+ @contextmanager
211
+ def acquire_project_lock(
212
+ self,
213
+ project_dir: str,
214
+ blocking: bool = True,
215
+ timeout: float = DEFAULT_LOCK_TIMEOUT,
216
+ operation_id: str | None = None,
217
+ description: str | None = None,
218
+ ) -> Iterator[None]:
219
+ """Acquire a lock for a specific project directory.
220
+
221
+ This ensures that only one build operation can run for a project at a time,
222
+ preventing file conflicts and race conditions during compilation.
223
+
224
+ Args:
225
+ project_dir: Absolute path to project directory
226
+ blocking: If True, wait for lock. If False, raise LockAcquisitionError if unavailable.
227
+ timeout: Maximum time the lock can be held before considered stale.
228
+ operation_id: Identifier for the operation holding the lock.
229
+ description: Human-readable description of what's holding the lock.
230
+
231
+ Yields:
232
+ None (the lock is held for the duration of the context)
233
+
234
+ Raises:
235
+ LockAcquisitionError: If blocking=False and lock is not available
236
+ """
237
+ lock_info = self._get_or_create_project_lock(project_dir, timeout)
238
+ logging.debug(f"Acquiring project lock for: {project_dir} (blocking={blocking})")
239
+
240
+ acquired = lock_info.lock.acquire(blocking=blocking)
241
+ if not acquired:
242
+ raise LockAcquisitionError("project", project_dir, lock_info)
243
+
244
+ try:
245
+ # Record lock acquisition details
246
+ with self._master_lock:
247
+ lock_info.acquired_at = time.time()
248
+ lock_info.acquisition_count += 1
249
+ lock_info.holder_thread_id = threading.get_ident()
250
+ lock_info.holder_operation_id = operation_id
251
+ lock_info.holder_description = description or f"Build for {project_dir}"
252
+ lock_info.timeout = timeout
253
+
254
+ logging.debug(f"Project lock acquired for: {project_dir} " + f"(count={lock_info.acquisition_count}, operation={operation_id})")
255
+ yield
256
+ finally:
257
+ # Clear holder info before releasing
258
+ with self._master_lock:
259
+ lock_info.last_released_at = time.time()
260
+ lock_info.holder_thread_id = None
261
+ lock_info.holder_operation_id = None
262
+ lock_info.holder_description = None
263
+ lock_info.lock.release()
264
+ logging.debug(f"Project lock released for: {project_dir}")
265
+
266
+ def _get_or_create_port_lock(self, port: str, timeout: float = DEFAULT_LOCK_TIMEOUT) -> LockInfo:
267
+ """Get or create a lock for the given port."""
268
+ with self._master_lock:
269
+ if port not in self._port_locks:
270
+ self._port_locks[port] = LockInfo(lock=threading.Lock(), timeout=timeout)
271
+ return self._port_locks[port]
272
+
273
+ def _get_or_create_project_lock(self, project_dir: str, timeout: float = DEFAULT_LOCK_TIMEOUT) -> LockInfo:
274
+ """Get or create a lock for the given project directory."""
275
+ with self._master_lock:
276
+ if project_dir not in self._project_locks:
277
+ self._project_locks[project_dir] = LockInfo(lock=threading.Lock(), timeout=timeout)
278
+ return self._project_locks[project_dir]
279
+
280
+ def get_stale_locks(self) -> dict[str, list[tuple[str, LockInfo]]]:
281
+ """Get all locks that are stale (held beyond their timeout).
282
+
283
+ Returns:
284
+ Dictionary with 'port_locks' and 'project_locks' keys, each containing
285
+ a list of (resource_id, lock_info) tuples for stale locks.
286
+ """
287
+ with self._master_lock:
288
+ stale_ports = [(port, info) for port, info in self._port_locks.items() if info.is_stale()]
289
+ stale_projects = [(project, info) for project, info in self._project_locks.items() if info.is_stale()]
290
+ return {
291
+ "port_locks": stale_ports,
292
+ "project_locks": stale_projects,
293
+ }
294
+
295
+ def get_held_locks(self) -> dict[str, list[tuple[str, LockInfo]]]:
296
+ """Get all locks that are currently held.
297
+
298
+ Returns:
299
+ Dictionary with 'port_locks' and 'project_locks' keys, each containing
300
+ a list of (resource_id, lock_info) tuples for held locks.
301
+ """
302
+ with self._master_lock:
303
+ held_ports = [(port, info) for port, info in self._port_locks.items() if info.is_held()]
304
+ held_projects = [(project, info) for project, info in self._project_locks.items() if info.is_held()]
305
+ return {
306
+ "port_locks": held_ports,
307
+ "project_locks": held_projects,
308
+ }
309
+
310
+ def force_release_lock(self, resource_type: str, resource_id: str) -> bool:
311
+ """Force-release a lock (use with caution - may cause race conditions).
312
+
313
+ This should only be used to clear stale locks from stuck operations.
314
+ Force-releasing an active lock may cause data corruption.
315
+
316
+ Args:
317
+ resource_type: "port" or "project"
318
+ resource_id: The port or project directory identifier
319
+
320
+ Returns:
321
+ True if lock was force-released, False if lock not found
322
+ """
323
+ with self._master_lock:
324
+ if resource_type == "port":
325
+ locks_dict = self._port_locks
326
+ elif resource_type == "project":
327
+ locks_dict = self._project_locks
328
+ else:
329
+ logging.error(f"Unknown resource type: {resource_type}")
330
+ return False
331
+
332
+ if resource_id not in locks_dict:
333
+ logging.warning(f"Lock not found for {resource_type}: {resource_id}")
334
+ return False
335
+
336
+ lock_info = locks_dict[resource_id]
337
+ if not lock_info.is_held():
338
+ logging.info(f"Lock for {resource_type} {resource_id} is not held")
339
+ return False
340
+
341
+ # Clear holder info and mark as released
342
+ logging.warning(f"Force-releasing {resource_type} lock for: {resource_id} " + f"(was held by: {lock_info.holder_description})")
343
+ lock_info.last_released_at = time.time()
344
+ lock_info.holder_thread_id = None
345
+ lock_info.holder_operation_id = None
346
+ lock_info.holder_description = None
347
+
348
+ # Try to release the lock if it's actually held
349
+ # Note: This may fail if the lock isn't held by this thread
350
+ try:
351
+ lock_info.lock.release()
352
+ except RuntimeError:
353
+ # Lock wasn't held - this is OK for force-release
354
+ pass
355
+
356
+ return True
357
+
358
+ def force_release_stale_locks(self) -> int:
359
+ """Force-release all stale locks.
360
+
361
+ Returns:
362
+ Number of locks force-released
363
+ """
364
+ stale = self.get_stale_locks()
365
+ released = 0
366
+
367
+ for port, _ in stale["port_locks"]:
368
+ if self.force_release_lock("port", port):
369
+ released += 1
370
+
371
+ for project, _ in stale["project_locks"]:
372
+ if self.force_release_lock("project", project):
373
+ released += 1
374
+
375
+ if released > 0:
376
+ logging.info(f"Force-released {released} stale locks")
377
+
378
+ return released
379
+
380
+ def cleanup_unused_locks(self, older_than: float = STALE_LOCK_THRESHOLD) -> int:
381
+ """Clean up locks that haven't been acquired recently.
382
+
383
+ This prevents memory leaks from locks that were created for operations
384
+ that are no longer running. A lock is considered unused if it:
385
+ - Is not currently held AND
386
+ - Hasn't been acquired in the specified time period
387
+
388
+ Args:
389
+ older_than: Time in seconds. Locks not acquired in this period are removed.
390
+
391
+ Returns:
392
+ Number of locks removed
393
+ """
394
+ current_time = time.time()
395
+ removed_count = 0
396
+
397
+ with self._master_lock:
398
+ # Clean up port locks
399
+ ports_to_remove = []
400
+ for port, lock_info in self._port_locks.items():
401
+ if lock_info.is_held():
402
+ continue # Don't remove held locks
403
+
404
+ # Check last activity time
405
+ last_activity = lock_info.last_released_at or lock_info.created_at
406
+ if current_time - last_activity > older_than:
407
+ ports_to_remove.append(port)
408
+
409
+ for port in ports_to_remove:
410
+ del self._port_locks[port]
411
+ removed_count += 1
412
+ logging.debug(f"Cleaned up unused port lock: {port}")
413
+
414
+ # Clean up project locks
415
+ projects_to_remove = []
416
+ for project_dir, lock_info in self._project_locks.items():
417
+ if lock_info.is_held():
418
+ continue # Don't remove held locks
419
+
420
+ # Check last activity time
421
+ last_activity = lock_info.last_released_at or lock_info.created_at
422
+ if current_time - last_activity > older_than:
423
+ projects_to_remove.append(project_dir)
424
+
425
+ for project_dir in projects_to_remove:
426
+ del self._project_locks[project_dir]
427
+ removed_count += 1
428
+ logging.debug(f"Cleaned up unused project lock: {project_dir}")
429
+
430
+ if removed_count > 0:
431
+ logging.info(f"Cleaned up {removed_count} unused locks")
432
+
433
+ return removed_count
434
+
435
+ def get_lock_status(self) -> dict[str, dict[str, int]]:
436
+ """Get current lock status for debugging.
437
+
438
+ Returns:
439
+ Dictionary with 'port_locks' and 'project_locks' keys, each containing
440
+ a mapping of resource identifier to acquisition count.
441
+ """
442
+ with self._master_lock:
443
+ return {
444
+ "port_locks": {port: info.acquisition_count for port, info in self._port_locks.items()},
445
+ "project_locks": {project: info.acquisition_count for project, info in self._project_locks.items()},
446
+ }
447
+
448
+ def get_lock_details(self) -> dict[str, dict[str, dict[str, Any]]]:
449
+ """Get detailed lock information for debugging and status reporting.
450
+
451
+ Returns:
452
+ Dictionary with 'port_locks' and 'project_locks' keys, each containing
453
+ a mapping of resource identifier to detailed lock info dict.
454
+ """
455
+ with self._master_lock:
456
+ return {
457
+ "port_locks": {port: info.to_dict() for port, info in self._port_locks.items()},
458
+ "project_locks": {project: info.to_dict() for project, info in self._project_locks.items()},
459
+ }
460
+
461
+ def get_lock_count(self) -> dict[str, int]:
462
+ """Get the total number of locks currently tracked.
463
+
464
+ Returns:
465
+ Dictionary with 'port_locks' and 'project_locks' counts.
466
+ """
467
+ with self._master_lock:
468
+ return {
469
+ "port_locks": len(self._port_locks),
470
+ "project_locks": len(self._project_locks),
471
+ }
472
+
473
+ def clear_all_locks(self) -> int:
474
+ """Clear all locks (use with extreme caution - only for daemon restart).
475
+
476
+ This force-releases all locks and clears the lock dictionaries.
477
+ Should only be used during daemon shutdown/restart.
478
+
479
+ Returns:
480
+ Number of locks cleared
481
+ """
482
+ with self._master_lock:
483
+ count = len(self._port_locks) + len(self._project_locks)
484
+
485
+ # Force release any held locks
486
+ for port, lock_info in self._port_locks.items():
487
+ if lock_info.is_held():
488
+ logging.warning(f"Clearing held port lock: {port}")
489
+ try:
490
+ lock_info.lock.release()
491
+ except RuntimeError:
492
+ pass
493
+
494
+ for project, lock_info in self._project_locks.items():
495
+ if lock_info.is_held():
496
+ logging.warning(f"Clearing held project lock: {project}")
497
+ try:
498
+ lock_info.lock.release()
499
+ except RuntimeError:
500
+ pass
501
+
502
+ self._port_locks.clear()
503
+ self._project_locks.clear()
504
+
505
+ if count > 0:
506
+ logging.info(f"Cleared all {count} locks")
507
+
508
+ return count
@@ -0,0 +1,149 @@
1
+ """
2
+ Logging utilities for fbuild daemon.
3
+
4
+ This module provides decorators and utilities to reduce logging verbosity
5
+ while maintaining debuggability through automatic function entry/exit logging.
6
+ """
7
+
8
+ import functools
9
+ import logging
10
+ from typing import Any, Callable, TypeVar
11
+
12
+ from fbuild.interrupt_utils import handle_keyboard_interrupt_properly
13
+
14
+ # Type variable for generic decorator
15
+ F = TypeVar("F", bound=Callable[..., Any])
16
+
17
+
18
+ def log_function_calls(logger: logging.Logger | None = None, level: int = logging.DEBUG) -> Callable[[F], F]:
19
+ """Decorator to log function entry and exit.
20
+
21
+ This decorator automatically logs when a function is called and when it returns,
22
+ including the function name and arguments. This replaces the need for manual
23
+ logging statements at the beginning and end of functions.
24
+
25
+ Args:
26
+ logger: Logger instance to use (defaults to function's module logger)
27
+ level: Logging level to use (default: DEBUG)
28
+
29
+ Returns:
30
+ Decorated function
31
+
32
+ Example:
33
+ >>> @log_function_calls()
34
+ ... def my_function(arg1: str, arg2: int) -> bool:
35
+ ... # Function logic here
36
+ ... return True
37
+ """
38
+
39
+ def decorator(func: F) -> F:
40
+ # Get function's module logger if none provided
41
+ func_logger = logger or logging.getLogger(func.__module__)
42
+
43
+ @functools.wraps(func)
44
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
45
+ # Format arguments for logging (truncate long strings)
46
+ args_repr = []
47
+ for arg in args:
48
+ arg_str = repr(arg)
49
+ if len(arg_str) > 100:
50
+ arg_str = arg_str[:97] + "..."
51
+ args_repr.append(arg_str)
52
+
53
+ kwargs_repr = []
54
+ for key, value in kwargs.items():
55
+ value_str = repr(value)
56
+ if len(value_str) > 100:
57
+ value_str = value_str[:97] + "..."
58
+ kwargs_repr.append(f"{key}={value_str}")
59
+
60
+ signature = ", ".join(args_repr + kwargs_repr)
61
+
62
+ # Log function entry
63
+ func_logger.log(level, f"→ {func.__name__}({signature})")
64
+
65
+ try:
66
+ result = func(*args, **kwargs)
67
+ # Log function exit (without result to avoid noise)
68
+ func_logger.log(level, f"← {func.__name__}() completed")
69
+ return result
70
+ except KeyboardInterrupt as ke:
71
+ handle_keyboard_interrupt_properly(ke)
72
+ except Exception as e:
73
+ # Log exception exit
74
+ func_logger.log(level, f"← {func.__name__}() raised {type(e).__name__}: {e}")
75
+ raise
76
+
77
+ return wrapper # type: ignore
78
+
79
+ return decorator
80
+
81
+
82
+ def log_method_calls(logger: logging.Logger | None = None, level: int = logging.DEBUG) -> Callable[[F], F]:
83
+ """Decorator to log method entry and exit (for class methods).
84
+
85
+ Similar to log_function_calls but designed for class methods. Skips logging
86
+ the 'self' parameter to reduce noise.
87
+
88
+ Args:
89
+ logger: Logger instance to use (defaults to method's module logger)
90
+ level: Logging level to use (default: DEBUG)
91
+
92
+ Returns:
93
+ Decorated method
94
+
95
+ Example:
96
+ >>> class MyClass:
97
+ ... @log_method_calls()
98
+ ... def my_method(self, arg1: str) -> bool:
99
+ ... return True
100
+ """
101
+
102
+ def decorator(func: F) -> F:
103
+ # Get method's module logger if none provided
104
+ func_logger = logger or logging.getLogger(func.__module__)
105
+
106
+ @functools.wraps(func)
107
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
108
+ # Skip 'self' parameter (args[0])
109
+ args_repr = []
110
+ for i, arg in enumerate(args):
111
+ if i == 0: # Skip 'self'
112
+ continue
113
+ arg_str = repr(arg)
114
+ if len(arg_str) > 100:
115
+ arg_str = arg_str[:97] + "..."
116
+ args_repr.append(arg_str)
117
+
118
+ kwargs_repr = []
119
+ for key, value in kwargs.items():
120
+ value_str = repr(value)
121
+ if len(value_str) > 100:
122
+ value_str = value_str[:97] + "..."
123
+ kwargs_repr.append(f"{key}={value_str}")
124
+
125
+ signature = ", ".join(args_repr + kwargs_repr)
126
+
127
+ # Get class name if available
128
+ class_name = ""
129
+ if args and hasattr(args[0], "__class__"):
130
+ class_name = args[0].__class__.__name__ + "."
131
+
132
+ # Log method entry
133
+ func_logger.log(level, f"→ {class_name}{func.__name__}({signature})")
134
+
135
+ try:
136
+ result = func(*args, **kwargs)
137
+ # Log method exit (without result to avoid noise)
138
+ func_logger.log(level, f"← {class_name}{func.__name__}() completed")
139
+ return result
140
+ except KeyboardInterrupt as ke:
141
+ handle_keyboard_interrupt_properly(ke)
142
+ except Exception as e:
143
+ # Log exception exit
144
+ func_logger.log(level, f"← {class_name}{func.__name__}() raised {type(e).__name__}: {e}")
145
+ raise
146
+
147
+ return wrapper # type: ignore
148
+
149
+ return decorator