fbuild 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fbuild might be problematic. Click here for more details.

Files changed (93) hide show
  1. fbuild/__init__.py +0 -0
  2. fbuild/assets/example.txt +1 -0
  3. fbuild/build/__init__.py +117 -0
  4. fbuild/build/archive_creator.py +186 -0
  5. fbuild/build/binary_generator.py +444 -0
  6. fbuild/build/build_component_factory.py +131 -0
  7. fbuild/build/build_state.py +325 -0
  8. fbuild/build/build_utils.py +98 -0
  9. fbuild/build/compilation_executor.py +422 -0
  10. fbuild/build/compiler.py +165 -0
  11. fbuild/build/compiler_avr.py +574 -0
  12. fbuild/build/configurable_compiler.py +612 -0
  13. fbuild/build/configurable_linker.py +637 -0
  14. fbuild/build/flag_builder.py +186 -0
  15. fbuild/build/library_dependency_processor.py +185 -0
  16. fbuild/build/linker.py +708 -0
  17. fbuild/build/orchestrator.py +67 -0
  18. fbuild/build/orchestrator_avr.py +656 -0
  19. fbuild/build/orchestrator_esp32.py +797 -0
  20. fbuild/build/orchestrator_teensy.py +543 -0
  21. fbuild/build/source_compilation_orchestrator.py +220 -0
  22. fbuild/build/source_scanner.py +516 -0
  23. fbuild/cli.py +566 -0
  24. fbuild/cli_utils.py +312 -0
  25. fbuild/config/__init__.py +16 -0
  26. fbuild/config/board_config.py +457 -0
  27. fbuild/config/board_loader.py +92 -0
  28. fbuild/config/ini_parser.py +209 -0
  29. fbuild/config/mcu_specs.py +88 -0
  30. fbuild/daemon/__init__.py +34 -0
  31. fbuild/daemon/client.py +929 -0
  32. fbuild/daemon/compilation_queue.py +293 -0
  33. fbuild/daemon/daemon.py +474 -0
  34. fbuild/daemon/daemon_context.py +196 -0
  35. fbuild/daemon/error_collector.py +263 -0
  36. fbuild/daemon/file_cache.py +332 -0
  37. fbuild/daemon/lock_manager.py +270 -0
  38. fbuild/daemon/logging_utils.py +149 -0
  39. fbuild/daemon/messages.py +301 -0
  40. fbuild/daemon/operation_registry.py +288 -0
  41. fbuild/daemon/process_tracker.py +366 -0
  42. fbuild/daemon/processors/__init__.py +12 -0
  43. fbuild/daemon/processors/build_processor.py +157 -0
  44. fbuild/daemon/processors/deploy_processor.py +327 -0
  45. fbuild/daemon/processors/monitor_processor.py +146 -0
  46. fbuild/daemon/request_processor.py +401 -0
  47. fbuild/daemon/status_manager.py +216 -0
  48. fbuild/daemon/subprocess_manager.py +316 -0
  49. fbuild/deploy/__init__.py +17 -0
  50. fbuild/deploy/deployer.py +67 -0
  51. fbuild/deploy/deployer_esp32.py +314 -0
  52. fbuild/deploy/monitor.py +495 -0
  53. fbuild/interrupt_utils.py +34 -0
  54. fbuild/packages/__init__.py +53 -0
  55. fbuild/packages/archive_utils.py +1098 -0
  56. fbuild/packages/arduino_core.py +412 -0
  57. fbuild/packages/cache.py +249 -0
  58. fbuild/packages/downloader.py +366 -0
  59. fbuild/packages/framework_esp32.py +538 -0
  60. fbuild/packages/framework_teensy.py +346 -0
  61. fbuild/packages/github_utils.py +96 -0
  62. fbuild/packages/header_trampoline_cache.py +394 -0
  63. fbuild/packages/library_compiler.py +203 -0
  64. fbuild/packages/library_manager.py +549 -0
  65. fbuild/packages/library_manager_esp32.py +413 -0
  66. fbuild/packages/package.py +163 -0
  67. fbuild/packages/platform_esp32.py +383 -0
  68. fbuild/packages/platform_teensy.py +312 -0
  69. fbuild/packages/platform_utils.py +131 -0
  70. fbuild/packages/platformio_registry.py +325 -0
  71. fbuild/packages/sdk_utils.py +231 -0
  72. fbuild/packages/toolchain.py +436 -0
  73. fbuild/packages/toolchain_binaries.py +196 -0
  74. fbuild/packages/toolchain_esp32.py +484 -0
  75. fbuild/packages/toolchain_metadata.py +185 -0
  76. fbuild/packages/toolchain_teensy.py +404 -0
  77. fbuild/platform_configs/esp32.json +150 -0
  78. fbuild/platform_configs/esp32c2.json +144 -0
  79. fbuild/platform_configs/esp32c3.json +143 -0
  80. fbuild/platform_configs/esp32c5.json +151 -0
  81. fbuild/platform_configs/esp32c6.json +151 -0
  82. fbuild/platform_configs/esp32p4.json +149 -0
  83. fbuild/platform_configs/esp32s3.json +151 -0
  84. fbuild/platform_configs/imxrt1062.json +56 -0
  85. fbuild-1.1.0.dist-info/METADATA +447 -0
  86. fbuild-1.1.0.dist-info/RECORD +93 -0
  87. fbuild-1.1.0.dist-info/WHEEL +5 -0
  88. fbuild-1.1.0.dist-info/entry_points.txt +5 -0
  89. fbuild-1.1.0.dist-info/licenses/LICENSE +21 -0
  90. fbuild-1.1.0.dist-info/top_level.txt +2 -0
  91. fbuild_lint/__init__.py +0 -0
  92. fbuild_lint/ruff_plugins/__init__.py +0 -0
  93. fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
@@ -0,0 +1,270 @@
1
+ """
2
+ Resource Lock Manager - Unified lock management for daemon operations.
3
+
4
+ This module provides the ResourceLockManager class which centralizes all
5
+ lock management logic that was previously scattered across daemon.py.
6
+ It provides context managers for automatic lock acquisition/release and
7
+ includes cleanup for unused locks to prevent memory leaks.
8
+ """
9
+
10
+ import logging
11
+ import threading
12
+ import time
13
+ from contextlib import contextmanager
14
+ from dataclasses import dataclass, field
15
+ from typing import Iterator
16
+
17
+
18
+ @dataclass
19
+ class LockInfo:
20
+ """Information about a lock for debugging and cleanup.
21
+
22
+ Attributes:
23
+ lock: The actual threading.Lock object
24
+ created_at: Unix timestamp when lock was created
25
+ last_acquired_at: Unix timestamp when lock was last acquired
26
+ acquisition_count: Number of times lock has been acquired
27
+ """
28
+
29
+ lock: threading.Lock
30
+ created_at: float = field(default_factory=time.time)
31
+ last_acquired_at: float | None = None
32
+ acquisition_count: int = 0
33
+
34
+
35
+ class ResourceLockManager:
36
+ """Manages per-port and per-project locks with automatic cleanup.
37
+
38
+ This class provides a unified interface for managing locks that protect
39
+ shared resources (serial ports and project directories). It uses context
40
+ managers to ensure locks are always properly released and includes
41
+ periodic cleanup to prevent memory leaks from abandoned locks.
42
+
43
+ Example:
44
+ >>> manager = ResourceLockManager()
45
+ >>>
46
+ >>> # Acquire port lock for serial operations
47
+ >>> with manager.acquire_port_lock("COM3"):
48
+ ... # Perform serial operation
49
+ ... upload_firmware_to_port("COM3")
50
+ >>>
51
+ >>> # Acquire project lock for build operations
52
+ >>> with manager.acquire_project_lock("/path/to/project"):
53
+ ... # Perform build operation
54
+ ... compile_project("/path/to/project")
55
+ >>>
56
+ >>> # Cleanup old unused locks
57
+ >>> manager.cleanup_unused_locks(older_than=3600)
58
+ """
59
+
60
+ def __init__(self) -> None:
61
+ """Initialize the ResourceLockManager."""
62
+ self._master_lock = threading.Lock() # Protects the lock dictionaries
63
+ self._port_locks: dict[str, LockInfo] = {} # Per-port locks
64
+ self._project_locks: dict[str, LockInfo] = {} # Per-project locks
65
+
66
+ @contextmanager
67
+ def acquire_port_lock(self, port: str, blocking: bool = True) -> Iterator[None]:
68
+ """Acquire a lock for a specific serial port.
69
+
70
+ This ensures that only one operation can use a serial port at a time,
71
+ preventing conflicts between deploy and monitor operations.
72
+
73
+ Args:
74
+ port: Serial port identifier (e.g., "COM3", "/dev/ttyUSB0")
75
+ blocking: If True, wait for lock. If False, raise RuntimeError if unavailable.
76
+
77
+ Yields:
78
+ None (the lock is held for the duration of the context)
79
+
80
+ Raises:
81
+ RuntimeError: If blocking=False and lock is not available
82
+
83
+ Example:
84
+ >>> manager = ResourceLockManager()
85
+ >>> with manager.acquire_port_lock("COM3"):
86
+ ... # Only one thread can be here at a time for COM3
87
+ ... deploy_to_port("COM3")
88
+ """
89
+ lock_info = self._get_or_create_port_lock(port)
90
+ logging.debug(f"Acquiring port lock for: {port} (blocking={blocking})")
91
+
92
+ acquired = lock_info.lock.acquire(blocking=blocking)
93
+ if not acquired:
94
+ raise RuntimeError(f"Port lock unavailable for: {port}")
95
+
96
+ try:
97
+ lock_info.last_acquired_at = time.time()
98
+ lock_info.acquisition_count += 1
99
+ logging.debug(f"Port lock acquired for: {port} (count={lock_info.acquisition_count})")
100
+ yield
101
+ finally:
102
+ lock_info.lock.release()
103
+
104
+ @contextmanager
105
+ def acquire_project_lock(self, project_dir: str, blocking: bool = True) -> Iterator[None]:
106
+ """Acquire a lock for a specific project directory.
107
+
108
+ This ensures that only one build operation can run for a project at a time,
109
+ preventing file conflicts and race conditions during compilation.
110
+
111
+ Args:
112
+ project_dir: Absolute path to project directory
113
+ blocking: If True, wait for lock. If False, raise RuntimeError if unavailable.
114
+
115
+ Yields:
116
+ None (the lock is held for the duration of the context)
117
+
118
+ Raises:
119
+ RuntimeError: If blocking=False and lock is not available
120
+
121
+ Example:
122
+ >>> manager = ResourceLockManager()
123
+ >>> with manager.acquire_project_lock("/home/user/my_project"):
124
+ ... # Only one thread can build this project at a time
125
+ ... build_project("/home/user/my_project")
126
+ """
127
+ lock_info = self._get_or_create_project_lock(project_dir)
128
+ logging.debug(f"Acquiring project lock for: {project_dir} (blocking={blocking})")
129
+
130
+ acquired = lock_info.lock.acquire(blocking=blocking)
131
+ if not acquired:
132
+ raise RuntimeError(f"Project lock unavailable for: {project_dir}")
133
+
134
+ try:
135
+ lock_info.last_acquired_at = time.time()
136
+ lock_info.acquisition_count += 1
137
+ logging.debug(f"Project lock acquired for: {project_dir} (count={lock_info.acquisition_count})")
138
+ yield
139
+ finally:
140
+ lock_info.lock.release()
141
+
142
+ def _get_or_create_port_lock(self, port: str) -> LockInfo:
143
+ """Get or create a lock for the given port.
144
+
145
+ Thread-safe: Uses master lock to protect dictionary access.
146
+
147
+ Args:
148
+ port: Serial port identifier
149
+
150
+ Returns:
151
+ LockInfo for the port
152
+ """
153
+ with self._master_lock:
154
+ if port not in self._port_locks:
155
+ self._port_locks[port] = LockInfo(lock=threading.Lock())
156
+ return self._port_locks[port]
157
+
158
+ def _get_or_create_project_lock(self, project_dir: str) -> LockInfo:
159
+ """Get or create a lock for the given project directory.
160
+
161
+ Thread-safe: Uses master lock to protect dictionary access.
162
+
163
+ Args:
164
+ project_dir: Project directory path
165
+
166
+ Returns:
167
+ LockInfo for the project
168
+ """
169
+ with self._master_lock:
170
+ if project_dir not in self._project_locks:
171
+ self._project_locks[project_dir] = LockInfo(lock=threading.Lock())
172
+ return self._project_locks[project_dir]
173
+
174
+ def cleanup_unused_locks(self, older_than: float = 3600) -> int:
175
+ """Clean up locks that haven't been acquired recently.
176
+
177
+ This prevents memory leaks from locks that were created for operations
178
+ that are no longer running. A lock is considered unused if it hasn't
179
+ been acquired in the specified time period.
180
+
181
+ Args:
182
+ older_than: Time in seconds. Locks not acquired in this period are removed.
183
+ Default is 3600 seconds (1 hour).
184
+
185
+ Returns:
186
+ Number of locks removed
187
+
188
+ Example:
189
+ >>> manager = ResourceLockManager()
190
+ >>> # Remove locks not used in the last hour
191
+ >>> removed = manager.cleanup_unused_locks(older_than=3600)
192
+ >>> print(f"Cleaned up {removed} unused locks")
193
+ """
194
+ current_time = time.time()
195
+ removed_count = 0
196
+
197
+ with self._master_lock:
198
+ # Clean up port locks
199
+ ports_to_remove = []
200
+ for port, lock_info in self._port_locks.items():
201
+ if lock_info.last_acquired_at is None:
202
+ # Lock was created but never acquired - remove if old enough
203
+ if current_time - lock_info.created_at > older_than:
204
+ ports_to_remove.append(port)
205
+ elif current_time - lock_info.last_acquired_at > older_than:
206
+ # Lock hasn't been acquired recently
207
+ ports_to_remove.append(port)
208
+
209
+ for port in ports_to_remove:
210
+ del self._port_locks[port]
211
+ removed_count += 1
212
+
213
+ # Clean up project locks
214
+ projects_to_remove = []
215
+ for project_dir, lock_info in self._project_locks.items():
216
+ if lock_info.last_acquired_at is None:
217
+ # Lock was created but never acquired - remove if old enough
218
+ if current_time - lock_info.created_at > older_than:
219
+ projects_to_remove.append(project_dir)
220
+ elif current_time - lock_info.last_acquired_at > older_than:
221
+ # Lock hasn't been acquired recently
222
+ projects_to_remove.append(project_dir)
223
+
224
+ for project_dir in projects_to_remove:
225
+ del self._project_locks[project_dir]
226
+ removed_count += 1
227
+
228
+ if removed_count > 0:
229
+ logging.info(f"Cleaned up {removed_count} unused locks")
230
+
231
+ return removed_count
232
+
233
+ def get_lock_status(self) -> dict[str, dict[str, int]]:
234
+ """Get current lock status for debugging.
235
+
236
+ Returns a snapshot of all locks and their acquisition counts.
237
+
238
+ Returns:
239
+ Dictionary with 'port_locks' and 'project_locks' keys, each containing
240
+ a mapping of resource identifier to acquisition count.
241
+
242
+ Example:
243
+ >>> manager = ResourceLockManager()
244
+ >>> status = manager.get_lock_status()
245
+ >>> print(f"Port locks: {status['port_locks']}")
246
+ >>> print(f"Project locks: {status['project_locks']}")
247
+ """
248
+ with self._master_lock:
249
+ return {
250
+ "port_locks": {port: info.acquisition_count for port, info in self._port_locks.items()},
251
+ "project_locks": {project: info.acquisition_count for project, info in self._project_locks.items()},
252
+ }
253
+
254
+ def get_lock_count(self) -> dict[str, int]:
255
+ """Get the total number of locks currently held.
256
+
257
+ Returns:
258
+ Dictionary with 'port_locks' and 'project_locks' counts.
259
+
260
+ Example:
261
+ >>> manager = ResourceLockManager()
262
+ >>> counts = manager.get_lock_count()
263
+ >>> print(f"Total port locks: {counts['port_locks']}")
264
+ >>> print(f"Total project locks: {counts['project_locks']}")
265
+ """
266
+ with self._master_lock:
267
+ return {
268
+ "port_locks": len(self._port_locks),
269
+ "project_locks": len(self._project_locks),
270
+ }
@@ -0,0 +1,149 @@
1
+ """
2
+ Logging utilities for fbuild daemon.
3
+
4
+ This module provides decorators and utilities to reduce logging verbosity
5
+ while maintaining debuggability through automatic function entry/exit logging.
6
+ """
7
+
8
+ import functools
9
+ import logging
10
+ from typing import Any, Callable, TypeVar
11
+
12
+ from fbuild.interrupt_utils import handle_keyboard_interrupt_properly
13
+
14
+ # Type variable for generic decorator
15
+ F = TypeVar("F", bound=Callable[..., Any])
16
+
17
+
18
+ def log_function_calls(logger: logging.Logger | None = None, level: int = logging.DEBUG) -> Callable[[F], F]:
19
+ """Decorator to log function entry and exit.
20
+
21
+ This decorator automatically logs when a function is called and when it returns,
22
+ including the function name and arguments. This replaces the need for manual
23
+ logging statements at the beginning and end of functions.
24
+
25
+ Args:
26
+ logger: Logger instance to use (defaults to function's module logger)
27
+ level: Logging level to use (default: DEBUG)
28
+
29
+ Returns:
30
+ Decorated function
31
+
32
+ Example:
33
+ >>> @log_function_calls()
34
+ ... def my_function(arg1: str, arg2: int) -> bool:
35
+ ... # Function logic here
36
+ ... return True
37
+ """
38
+
39
+ def decorator(func: F) -> F:
40
+ # Get function's module logger if none provided
41
+ func_logger = logger or logging.getLogger(func.__module__)
42
+
43
+ @functools.wraps(func)
44
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
45
+ # Format arguments for logging (truncate long strings)
46
+ args_repr = []
47
+ for arg in args:
48
+ arg_str = repr(arg)
49
+ if len(arg_str) > 100:
50
+ arg_str = arg_str[:97] + "..."
51
+ args_repr.append(arg_str)
52
+
53
+ kwargs_repr = []
54
+ for key, value in kwargs.items():
55
+ value_str = repr(value)
56
+ if len(value_str) > 100:
57
+ value_str = value_str[:97] + "..."
58
+ kwargs_repr.append(f"{key}={value_str}")
59
+
60
+ signature = ", ".join(args_repr + kwargs_repr)
61
+
62
+ # Log function entry
63
+ func_logger.log(level, f"→ {func.__name__}({signature})")
64
+
65
+ try:
66
+ result = func(*args, **kwargs)
67
+ # Log function exit (without result to avoid noise)
68
+ func_logger.log(level, f"← {func.__name__}() completed")
69
+ return result
70
+ except KeyboardInterrupt as ke:
71
+ handle_keyboard_interrupt_properly(ke)
72
+ except Exception as e:
73
+ # Log exception exit
74
+ func_logger.log(level, f"← {func.__name__}() raised {type(e).__name__}: {e}")
75
+ raise
76
+
77
+ return wrapper # type: ignore
78
+
79
+ return decorator
80
+
81
+
82
+ def log_method_calls(logger: logging.Logger | None = None, level: int = logging.DEBUG) -> Callable[[F], F]:
83
+ """Decorator to log method entry and exit (for class methods).
84
+
85
+ Similar to log_function_calls but designed for class methods. Skips logging
86
+ the 'self' parameter to reduce noise.
87
+
88
+ Args:
89
+ logger: Logger instance to use (defaults to method's module logger)
90
+ level: Logging level to use (default: DEBUG)
91
+
92
+ Returns:
93
+ Decorated method
94
+
95
+ Example:
96
+ >>> class MyClass:
97
+ ... @log_method_calls()
98
+ ... def my_method(self, arg1: str) -> bool:
99
+ ... return True
100
+ """
101
+
102
+ def decorator(func: F) -> F:
103
+ # Get method's module logger if none provided
104
+ func_logger = logger or logging.getLogger(func.__module__)
105
+
106
+ @functools.wraps(func)
107
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
108
+ # Skip 'self' parameter (args[0])
109
+ args_repr = []
110
+ for i, arg in enumerate(args):
111
+ if i == 0: # Skip 'self'
112
+ continue
113
+ arg_str = repr(arg)
114
+ if len(arg_str) > 100:
115
+ arg_str = arg_str[:97] + "..."
116
+ args_repr.append(arg_str)
117
+
118
+ kwargs_repr = []
119
+ for key, value in kwargs.items():
120
+ value_str = repr(value)
121
+ if len(value_str) > 100:
122
+ value_str = value_str[:97] + "..."
123
+ kwargs_repr.append(f"{key}={value_str}")
124
+
125
+ signature = ", ".join(args_repr + kwargs_repr)
126
+
127
+ # Get class name if available
128
+ class_name = ""
129
+ if args and hasattr(args[0], "__class__"):
130
+ class_name = args[0].__class__.__name__ + "."
131
+
132
+ # Log method entry
133
+ func_logger.log(level, f"→ {class_name}{func.__name__}({signature})")
134
+
135
+ try:
136
+ result = func(*args, **kwargs)
137
+ # Log method exit (without result to avoid noise)
138
+ func_logger.log(level, f"← {class_name}{func.__name__}() completed")
139
+ return result
140
+ except KeyboardInterrupt as ke:
141
+ handle_keyboard_interrupt_properly(ke)
142
+ except Exception as e:
143
+ # Log exception exit
144
+ func_logger.log(level, f"← {class_name}{func.__name__}() raised {type(e).__name__}: {e}")
145
+ raise
146
+
147
+ return wrapper # type: ignore
148
+
149
+ return decorator