fbuild 1.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fbuild/__init__.py +390 -0
- fbuild/assets/example.txt +1 -0
- fbuild/build/__init__.py +117 -0
- fbuild/build/archive_creator.py +186 -0
- fbuild/build/binary_generator.py +444 -0
- fbuild/build/build_component_factory.py +131 -0
- fbuild/build/build_info_generator.py +624 -0
- fbuild/build/build_state.py +325 -0
- fbuild/build/build_utils.py +93 -0
- fbuild/build/compilation_executor.py +422 -0
- fbuild/build/compiler.py +165 -0
- fbuild/build/compiler_avr.py +574 -0
- fbuild/build/configurable_compiler.py +664 -0
- fbuild/build/configurable_linker.py +637 -0
- fbuild/build/flag_builder.py +214 -0
- fbuild/build/library_dependency_processor.py +185 -0
- fbuild/build/linker.py +708 -0
- fbuild/build/orchestrator.py +67 -0
- fbuild/build/orchestrator_avr.py +651 -0
- fbuild/build/orchestrator_esp32.py +878 -0
- fbuild/build/orchestrator_rp2040.py +719 -0
- fbuild/build/orchestrator_stm32.py +696 -0
- fbuild/build/orchestrator_teensy.py +580 -0
- fbuild/build/source_compilation_orchestrator.py +218 -0
- fbuild/build/source_scanner.py +516 -0
- fbuild/cli.py +717 -0
- fbuild/cli_utils.py +314 -0
- fbuild/config/__init__.py +16 -0
- fbuild/config/board_config.py +542 -0
- fbuild/config/board_loader.py +92 -0
- fbuild/config/ini_parser.py +369 -0
- fbuild/config/mcu_specs.py +88 -0
- fbuild/daemon/__init__.py +42 -0
- fbuild/daemon/async_client.py +531 -0
- fbuild/daemon/client.py +1505 -0
- fbuild/daemon/compilation_queue.py +293 -0
- fbuild/daemon/configuration_lock.py +865 -0
- fbuild/daemon/daemon.py +585 -0
- fbuild/daemon/daemon_context.py +293 -0
- fbuild/daemon/error_collector.py +263 -0
- fbuild/daemon/file_cache.py +332 -0
- fbuild/daemon/firmware_ledger.py +546 -0
- fbuild/daemon/lock_manager.py +508 -0
- fbuild/daemon/logging_utils.py +149 -0
- fbuild/daemon/messages.py +957 -0
- fbuild/daemon/operation_registry.py +288 -0
- fbuild/daemon/port_state_manager.py +249 -0
- fbuild/daemon/process_tracker.py +366 -0
- fbuild/daemon/processors/__init__.py +18 -0
- fbuild/daemon/processors/build_processor.py +248 -0
- fbuild/daemon/processors/deploy_processor.py +664 -0
- fbuild/daemon/processors/install_deps_processor.py +431 -0
- fbuild/daemon/processors/locking_processor.py +777 -0
- fbuild/daemon/processors/monitor_processor.py +285 -0
- fbuild/daemon/request_processor.py +457 -0
- fbuild/daemon/shared_serial.py +819 -0
- fbuild/daemon/status_manager.py +238 -0
- fbuild/daemon/subprocess_manager.py +316 -0
- fbuild/deploy/__init__.py +21 -0
- fbuild/deploy/deployer.py +67 -0
- fbuild/deploy/deployer_esp32.py +310 -0
- fbuild/deploy/docker_utils.py +315 -0
- fbuild/deploy/monitor.py +519 -0
- fbuild/deploy/qemu_runner.py +603 -0
- fbuild/interrupt_utils.py +34 -0
- fbuild/ledger/__init__.py +52 -0
- fbuild/ledger/board_ledger.py +560 -0
- fbuild/output.py +352 -0
- fbuild/packages/__init__.py +66 -0
- fbuild/packages/archive_utils.py +1098 -0
- fbuild/packages/arduino_core.py +412 -0
- fbuild/packages/cache.py +256 -0
- fbuild/packages/concurrent_manager.py +510 -0
- fbuild/packages/downloader.py +518 -0
- fbuild/packages/fingerprint.py +423 -0
- fbuild/packages/framework_esp32.py +538 -0
- fbuild/packages/framework_rp2040.py +349 -0
- fbuild/packages/framework_stm32.py +459 -0
- fbuild/packages/framework_teensy.py +346 -0
- fbuild/packages/github_utils.py +96 -0
- fbuild/packages/header_trampoline_cache.py +394 -0
- fbuild/packages/library_compiler.py +203 -0
- fbuild/packages/library_manager.py +549 -0
- fbuild/packages/library_manager_esp32.py +725 -0
- fbuild/packages/package.py +163 -0
- fbuild/packages/platform_esp32.py +383 -0
- fbuild/packages/platform_rp2040.py +400 -0
- fbuild/packages/platform_stm32.py +581 -0
- fbuild/packages/platform_teensy.py +312 -0
- fbuild/packages/platform_utils.py +131 -0
- fbuild/packages/platformio_registry.py +369 -0
- fbuild/packages/sdk_utils.py +231 -0
- fbuild/packages/toolchain.py +436 -0
- fbuild/packages/toolchain_binaries.py +196 -0
- fbuild/packages/toolchain_esp32.py +489 -0
- fbuild/packages/toolchain_metadata.py +185 -0
- fbuild/packages/toolchain_rp2040.py +436 -0
- fbuild/packages/toolchain_stm32.py +417 -0
- fbuild/packages/toolchain_teensy.py +404 -0
- fbuild/platform_configs/esp32.json +150 -0
- fbuild/platform_configs/esp32c2.json +144 -0
- fbuild/platform_configs/esp32c3.json +143 -0
- fbuild/platform_configs/esp32c5.json +151 -0
- fbuild/platform_configs/esp32c6.json +151 -0
- fbuild/platform_configs/esp32p4.json +149 -0
- fbuild/platform_configs/esp32s3.json +151 -0
- fbuild/platform_configs/imxrt1062.json +56 -0
- fbuild/platform_configs/rp2040.json +70 -0
- fbuild/platform_configs/rp2350.json +76 -0
- fbuild/platform_configs/stm32f1.json +59 -0
- fbuild/platform_configs/stm32f4.json +63 -0
- fbuild/py.typed +0 -0
- fbuild-1.2.8.dist-info/METADATA +468 -0
- fbuild-1.2.8.dist-info/RECORD +121 -0
- fbuild-1.2.8.dist-info/WHEEL +5 -0
- fbuild-1.2.8.dist-info/entry_points.txt +5 -0
- fbuild-1.2.8.dist-info/licenses/LICENSE +21 -0
- fbuild-1.2.8.dist-info/top_level.txt +2 -0
- fbuild_lint/__init__.py +0 -0
- fbuild_lint/ruff_plugins/__init__.py +0 -0
- fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Daemon Context - Centralized state management for fbuild daemon.
|
|
3
|
+
|
|
4
|
+
This module provides the DaemonContext class which encapsulates all daemon state
|
|
5
|
+
that was previously stored in global variables. This improves testability,
|
|
6
|
+
makes dependencies explicit, and eliminates global mutable state.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import threading
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
from fbuild.daemon.async_client import ClientConnectionManager
|
|
14
|
+
from fbuild.daemon.compilation_queue import CompilationJobQueue
|
|
15
|
+
from fbuild.daemon.configuration_lock import ConfigurationLockManager
|
|
16
|
+
from fbuild.daemon.error_collector import ErrorCollector
|
|
17
|
+
from fbuild.daemon.file_cache import FileCache
|
|
18
|
+
from fbuild.daemon.firmware_ledger import FirmwareLedger
|
|
19
|
+
from fbuild.daemon.lock_manager import ResourceLockManager
|
|
20
|
+
from fbuild.daemon.operation_registry import OperationRegistry
|
|
21
|
+
from fbuild.daemon.port_state_manager import PortStateManager
|
|
22
|
+
from fbuild.daemon.shared_serial import SharedSerialManager
|
|
23
|
+
from fbuild.daemon.status_manager import StatusManager
|
|
24
|
+
from fbuild.daemon.subprocess_manager import SubprocessManager
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class DaemonContext:
|
|
29
|
+
"""Centralized context for all daemon state and subsystems.
|
|
30
|
+
|
|
31
|
+
This class replaces the 12 global variables in daemon.py with a single
|
|
32
|
+
context object that can be passed to functions explicitly. This improves:
|
|
33
|
+
- Testability: Mock the entire context in tests
|
|
34
|
+
- Clarity: Dependencies are explicit in function signatures
|
|
35
|
+
- Thread-safety: Locks are properly encapsulated
|
|
36
|
+
- Lifecycle: Cleanup is centralized in one place
|
|
37
|
+
|
|
38
|
+
Attributes:
|
|
39
|
+
daemon_pid: Process ID of the daemon
|
|
40
|
+
daemon_started_at: Unix timestamp when daemon was started
|
|
41
|
+
compilation_queue: Queue for managing parallel compilation jobs
|
|
42
|
+
operation_registry: Registry for tracking active/completed operations
|
|
43
|
+
subprocess_manager: Manager for daemon-spawned subprocesses
|
|
44
|
+
file_cache: Cache for file modification times
|
|
45
|
+
error_collector: Global error collector for operations
|
|
46
|
+
lock_manager: Unified resource lock manager for ports and projects
|
|
47
|
+
port_state_manager: Manager for tracking COM port states
|
|
48
|
+
status_manager: Manager for daemon status file operations
|
|
49
|
+
client_manager: Manager for async client connections with heartbeat
|
|
50
|
+
configuration_lock_manager: Centralized locking for (project, env, port) configs
|
|
51
|
+
firmware_ledger: Tracks deployed firmware on devices to avoid re-upload
|
|
52
|
+
shared_serial_manager: Manages shared serial port access for multiple clients
|
|
53
|
+
operation_in_progress: Flag indicating if any operation is running
|
|
54
|
+
operation_lock: Lock protecting the operation_in_progress flag
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
# Daemon identity
|
|
58
|
+
daemon_pid: int
|
|
59
|
+
daemon_started_at: float
|
|
60
|
+
|
|
61
|
+
# Subsystems
|
|
62
|
+
compilation_queue: CompilationJobQueue
|
|
63
|
+
operation_registry: OperationRegistry
|
|
64
|
+
subprocess_manager: SubprocessManager
|
|
65
|
+
file_cache: FileCache
|
|
66
|
+
error_collector: ErrorCollector
|
|
67
|
+
lock_manager: ResourceLockManager
|
|
68
|
+
port_state_manager: PortStateManager
|
|
69
|
+
status_manager: StatusManager
|
|
70
|
+
|
|
71
|
+
# New managers for centralized locking and shared state (Iteration 1-2)
|
|
72
|
+
client_manager: ClientConnectionManager
|
|
73
|
+
configuration_lock_manager: ConfigurationLockManager
|
|
74
|
+
firmware_ledger: FirmwareLedger
|
|
75
|
+
shared_serial_manager: SharedSerialManager
|
|
76
|
+
|
|
77
|
+
# Operation state
|
|
78
|
+
operation_in_progress: bool = False
|
|
79
|
+
operation_lock: threading.Lock = field(default_factory=threading.Lock)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def create_daemon_context(
|
|
83
|
+
daemon_pid: int,
|
|
84
|
+
daemon_started_at: float,
|
|
85
|
+
num_workers: int,
|
|
86
|
+
file_cache_path: Path,
|
|
87
|
+
status_file_path: Path,
|
|
88
|
+
) -> DaemonContext:
|
|
89
|
+
"""Factory function to create and initialize a DaemonContext.
|
|
90
|
+
|
|
91
|
+
This function initializes all daemon subsystems and returns a fully
|
|
92
|
+
configured DaemonContext ready for use.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
daemon_pid: Process ID of the daemon
|
|
96
|
+
daemon_started_at: Unix timestamp when daemon started
|
|
97
|
+
num_workers: Number of compilation worker threads
|
|
98
|
+
file_cache_path: Path to the file cache JSON file
|
|
99
|
+
status_file_path: Path to the status file
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Fully initialized DaemonContext
|
|
103
|
+
|
|
104
|
+
Example:
|
|
105
|
+
>>> import os
|
|
106
|
+
>>> import time
|
|
107
|
+
>>> from pathlib import Path
|
|
108
|
+
>>>
|
|
109
|
+
>>> context = create_daemon_context(
|
|
110
|
+
... daemon_pid=os.getpid(),
|
|
111
|
+
... daemon_started_at=time.time(),
|
|
112
|
+
... num_workers=4,
|
|
113
|
+
... file_cache_path=Path.home() / ".fbuild" / "daemon" / "file_cache.json",
|
|
114
|
+
... status_file_path=Path.home() / ".fbuild" / "daemon" / "daemon_status.json"
|
|
115
|
+
... )
|
|
116
|
+
>>> # Use context in request handlers
|
|
117
|
+
>>> process_build_request(request, context)
|
|
118
|
+
"""
|
|
119
|
+
import logging
|
|
120
|
+
|
|
121
|
+
logging.info("Initializing daemon context...")
|
|
122
|
+
|
|
123
|
+
# Initialize compilation queue with worker pool
|
|
124
|
+
compilation_queue = CompilationJobQueue(num_workers=num_workers)
|
|
125
|
+
compilation_queue.start()
|
|
126
|
+
logging.info(f"Compilation queue started with {num_workers} workers")
|
|
127
|
+
|
|
128
|
+
# Initialize operation registry
|
|
129
|
+
logging.debug("Creating operation registry (max_history=100)...")
|
|
130
|
+
operation_registry = OperationRegistry(max_history=100)
|
|
131
|
+
logging.info("Operation registry initialized")
|
|
132
|
+
|
|
133
|
+
# Initialize subprocess manager
|
|
134
|
+
subprocess_manager = SubprocessManager()
|
|
135
|
+
logging.info("Subprocess manager initialized")
|
|
136
|
+
|
|
137
|
+
# Initialize file cache
|
|
138
|
+
logging.debug(f"Creating file cache (cache_file={file_cache_path})...")
|
|
139
|
+
file_cache = FileCache(cache_file=file_cache_path)
|
|
140
|
+
logging.info("File cache initialized")
|
|
141
|
+
|
|
142
|
+
# Initialize error collector
|
|
143
|
+
error_collector = ErrorCollector()
|
|
144
|
+
logging.info("Error collector initialized")
|
|
145
|
+
|
|
146
|
+
# Initialize lock manager
|
|
147
|
+
lock_manager = ResourceLockManager()
|
|
148
|
+
logging.info("Resource lock manager initialized")
|
|
149
|
+
|
|
150
|
+
# Initialize port state manager
|
|
151
|
+
port_state_manager = PortStateManager()
|
|
152
|
+
logging.info("Port state manager initialized")
|
|
153
|
+
|
|
154
|
+
# Initialize status manager (with port state manager and lock manager for status visibility)
|
|
155
|
+
logging.debug(f"Creating status manager (status_file={status_file_path})...")
|
|
156
|
+
status_manager = StatusManager(
|
|
157
|
+
status_file=status_file_path,
|
|
158
|
+
daemon_pid=daemon_pid,
|
|
159
|
+
daemon_started_at=daemon_started_at,
|
|
160
|
+
port_state_manager=port_state_manager,
|
|
161
|
+
lock_manager=lock_manager,
|
|
162
|
+
)
|
|
163
|
+
logging.info("Status manager initialized")
|
|
164
|
+
|
|
165
|
+
# Initialize new managers for centralized locking and shared state (Iteration 1-2)
|
|
166
|
+
client_manager = ClientConnectionManager()
|
|
167
|
+
logging.info("Client connection manager initialized")
|
|
168
|
+
|
|
169
|
+
configuration_lock_manager = ConfigurationLockManager()
|
|
170
|
+
logging.info("Configuration lock manager initialized")
|
|
171
|
+
|
|
172
|
+
firmware_ledger = FirmwareLedger()
|
|
173
|
+
logging.info(f"Firmware ledger initialized (path={firmware_ledger.ledger_path})")
|
|
174
|
+
|
|
175
|
+
shared_serial_manager = SharedSerialManager()
|
|
176
|
+
logging.info("Shared serial manager initialized")
|
|
177
|
+
|
|
178
|
+
# Register cleanup callbacks: when a client disconnects, release their resources
|
|
179
|
+
def on_client_disconnect(client_id: str) -> None:
|
|
180
|
+
"""Cleanup callback for when a client disconnects."""
|
|
181
|
+
logging.info(f"Cleaning up resources for disconnected client: {client_id}")
|
|
182
|
+
# Release all configuration locks held by this client
|
|
183
|
+
released = configuration_lock_manager.release_all_client_locks(client_id)
|
|
184
|
+
if released > 0:
|
|
185
|
+
logging.info(f"Released {released} configuration locks for client {client_id}")
|
|
186
|
+
# Disconnect from shared serial sessions
|
|
187
|
+
shared_serial_manager.disconnect_client(client_id)
|
|
188
|
+
|
|
189
|
+
client_manager.register_cleanup_callback(on_client_disconnect)
|
|
190
|
+
logging.info("Client cleanup callback registered")
|
|
191
|
+
|
|
192
|
+
# Create context
|
|
193
|
+
context = DaemonContext(
|
|
194
|
+
daemon_pid=daemon_pid,
|
|
195
|
+
daemon_started_at=daemon_started_at,
|
|
196
|
+
compilation_queue=compilation_queue,
|
|
197
|
+
operation_registry=operation_registry,
|
|
198
|
+
subprocess_manager=subprocess_manager,
|
|
199
|
+
file_cache=file_cache,
|
|
200
|
+
error_collector=error_collector,
|
|
201
|
+
lock_manager=lock_manager,
|
|
202
|
+
port_state_manager=port_state_manager,
|
|
203
|
+
status_manager=status_manager,
|
|
204
|
+
client_manager=client_manager,
|
|
205
|
+
configuration_lock_manager=configuration_lock_manager,
|
|
206
|
+
firmware_ledger=firmware_ledger,
|
|
207
|
+
shared_serial_manager=shared_serial_manager,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
logging.info("✅ Daemon context initialized successfully")
|
|
211
|
+
return context
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def cleanup_daemon_context(context: DaemonContext) -> None:
|
|
215
|
+
"""Cleanup and shutdown all daemon subsystems in the context.
|
|
216
|
+
|
|
217
|
+
This function should be called during daemon shutdown to ensure all
|
|
218
|
+
resources are properly released.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
context: The DaemonContext to clean up
|
|
222
|
+
|
|
223
|
+
Example:
|
|
224
|
+
>>> try:
|
|
225
|
+
... run_daemon(context)
|
|
226
|
+
... finally:
|
|
227
|
+
... cleanup_daemon_context(context)
|
|
228
|
+
"""
|
|
229
|
+
import logging
|
|
230
|
+
|
|
231
|
+
logging.info("Shutting down daemon context...")
|
|
232
|
+
|
|
233
|
+
# Shutdown shared serial manager first (closes all serial ports)
|
|
234
|
+
if context.shared_serial_manager:
|
|
235
|
+
try:
|
|
236
|
+
context.shared_serial_manager.shutdown()
|
|
237
|
+
logging.info("Shared serial manager shut down")
|
|
238
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
239
|
+
logging.warning("KeyboardInterrupt during shared serial manager shutdown")
|
|
240
|
+
raise
|
|
241
|
+
except Exception as e:
|
|
242
|
+
logging.error(f"Error shutting down shared serial manager: {e}")
|
|
243
|
+
|
|
244
|
+
# Clear all configuration locks
|
|
245
|
+
if context.configuration_lock_manager:
|
|
246
|
+
try:
|
|
247
|
+
cleared = context.configuration_lock_manager.clear_all_locks()
|
|
248
|
+
logging.info(f"Cleared {cleared} configuration locks during shutdown")
|
|
249
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
250
|
+
logging.warning("KeyboardInterrupt during configuration lock manager cleanup")
|
|
251
|
+
raise
|
|
252
|
+
except Exception as e:
|
|
253
|
+
logging.error(f"Error clearing configuration locks: {e}")
|
|
254
|
+
|
|
255
|
+
# Clear all client connections
|
|
256
|
+
if context.client_manager:
|
|
257
|
+
try:
|
|
258
|
+
cleared = context.client_manager.clear_all_clients()
|
|
259
|
+
logging.info(f"Cleared {cleared} client connections during shutdown")
|
|
260
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
261
|
+
logging.warning("KeyboardInterrupt during client manager cleanup")
|
|
262
|
+
raise
|
|
263
|
+
except Exception as e:
|
|
264
|
+
logging.error(f"Error clearing client connections: {e}")
|
|
265
|
+
|
|
266
|
+
# Shutdown compilation queue
|
|
267
|
+
if context.compilation_queue:
|
|
268
|
+
try:
|
|
269
|
+
context.compilation_queue.shutdown()
|
|
270
|
+
logging.info("Compilation queue shut down")
|
|
271
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
272
|
+
logging.warning("KeyboardInterrupt during compilation queue shutdown")
|
|
273
|
+
raise
|
|
274
|
+
except Exception as e:
|
|
275
|
+
logging.error(f"Error shutting down compilation queue: {e}")
|
|
276
|
+
|
|
277
|
+
# Clear all locks during shutdown
|
|
278
|
+
if context.lock_manager:
|
|
279
|
+
try:
|
|
280
|
+
cleared = context.lock_manager.clear_all_locks()
|
|
281
|
+
logging.info(f"Cleared {cleared} locks during shutdown")
|
|
282
|
+
except KeyboardInterrupt: # noqa: KBI002
|
|
283
|
+
logging.warning("KeyboardInterrupt during lock manager cleanup")
|
|
284
|
+
raise
|
|
285
|
+
except Exception as e:
|
|
286
|
+
logging.error(f"Error clearing locks: {e}")
|
|
287
|
+
|
|
288
|
+
# Log cleanup of other subsystems (they don't have explicit shutdown methods)
|
|
289
|
+
logging.debug("Cleaning up subprocess manager...")
|
|
290
|
+
logging.debug("Cleaning up error collector...")
|
|
291
|
+
logging.debug("Firmware ledger persists to disk - no cleanup needed")
|
|
292
|
+
|
|
293
|
+
logging.info("✅ Daemon context cleaned up")
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error Collector - Structured error collection for async operations.
|
|
3
|
+
|
|
4
|
+
This module provides error collection and aggregation for asynchronous build
|
|
5
|
+
operations, replacing simple exception handling with structured error tracking.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import threading
|
|
10
|
+
import time
|
|
11
|
+
from dataclasses import dataclass, field
|
|
12
|
+
from enum import Enum
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ErrorSeverity(Enum):
|
|
17
|
+
"""Severity level of a build error."""
|
|
18
|
+
|
|
19
|
+
WARNING = "warning"
|
|
20
|
+
ERROR = "error"
|
|
21
|
+
FATAL = "fatal"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class BuildError:
|
|
26
|
+
"""Single build error."""
|
|
27
|
+
|
|
28
|
+
severity: ErrorSeverity
|
|
29
|
+
phase: str # "download", "compile", "link", "upload"
|
|
30
|
+
file_path: Optional[str]
|
|
31
|
+
error_message: str
|
|
32
|
+
stderr: Optional[str] = None
|
|
33
|
+
stdout: Optional[str] = None
|
|
34
|
+
timestamp: float = field(default_factory=time.time)
|
|
35
|
+
|
|
36
|
+
def format(self) -> str:
|
|
37
|
+
"""Format error as human-readable string.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Formatted error message
|
|
41
|
+
"""
|
|
42
|
+
lines = [f"[{self.severity.value.upper()}] {self.phase}: {self.error_message}"]
|
|
43
|
+
|
|
44
|
+
if self.file_path:
|
|
45
|
+
lines.append(f" File: {self.file_path}")
|
|
46
|
+
|
|
47
|
+
if self.stderr:
|
|
48
|
+
# Truncate stderr to reasonable length
|
|
49
|
+
stderr_preview = self.stderr[:500]
|
|
50
|
+
if len(self.stderr) > 500:
|
|
51
|
+
stderr_preview += "... (truncated)"
|
|
52
|
+
lines.append(f" stderr: {stderr_preview}")
|
|
53
|
+
|
|
54
|
+
return "\n".join(lines)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class ErrorCollector:
|
|
58
|
+
"""Collects errors during async build operations."""
|
|
59
|
+
|
|
60
|
+
def __init__(self, max_errors: int = 100):
|
|
61
|
+
"""Initialize error collector.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
max_errors: Maximum number of errors to collect
|
|
65
|
+
"""
|
|
66
|
+
self.errors: list[BuildError] = []
|
|
67
|
+
self.lock = threading.Lock()
|
|
68
|
+
self.max_errors = max_errors
|
|
69
|
+
|
|
70
|
+
logging.debug(f"ErrorCollector initialized (max_errors={max_errors})")
|
|
71
|
+
|
|
72
|
+
def add_error(self, error: BuildError) -> None:
|
|
73
|
+
"""Add error to collection.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
error: Build error to add
|
|
77
|
+
"""
|
|
78
|
+
with self.lock:
|
|
79
|
+
if len(self.errors) >= self.max_errors:
|
|
80
|
+
logging.warning(f"ErrorCollector full ({self.max_errors} errors), dropping oldest")
|
|
81
|
+
self.errors.pop(0)
|
|
82
|
+
|
|
83
|
+
self.errors.append(error)
|
|
84
|
+
|
|
85
|
+
def get_errors(self, severity: Optional[ErrorSeverity] = None) -> list[BuildError]:
|
|
86
|
+
"""Get all errors, optionally filtered by severity.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
severity: Filter by severity (None = all errors)
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
List of build errors
|
|
93
|
+
"""
|
|
94
|
+
logging.debug(f"Retrieving errors (severity filter: {severity.value if severity else 'None'})")
|
|
95
|
+
with self.lock:
|
|
96
|
+
if severity:
|
|
97
|
+
filtered = [e for e in self.errors if e.severity == severity]
|
|
98
|
+
logging.debug(f"Filtered {len(filtered)} errors by severity {severity.value} (total: {len(self.errors)})")
|
|
99
|
+
return filtered
|
|
100
|
+
logging.debug(f"Returning all {len(self.errors)} errors")
|
|
101
|
+
return self.errors.copy()
|
|
102
|
+
|
|
103
|
+
def get_errors_by_phase(self, phase: str) -> list[BuildError]:
|
|
104
|
+
"""Get errors for a specific phase.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
phase: Phase to filter by
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
List of build errors for the phase
|
|
111
|
+
"""
|
|
112
|
+
with self.lock:
|
|
113
|
+
phase_errors = [e for e in self.errors if e.phase == phase]
|
|
114
|
+
logging.debug(f"Found {len(phase_errors)} errors in phase '{phase}' (total: {len(self.errors)})")
|
|
115
|
+
return phase_errors
|
|
116
|
+
|
|
117
|
+
def has_fatal_errors(self) -> bool:
|
|
118
|
+
"""Check if any fatal errors occurred.
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
True if fatal errors exist
|
|
122
|
+
"""
|
|
123
|
+
with self.lock:
|
|
124
|
+
has_fatal = any(e.severity == ErrorSeverity.FATAL for e in self.errors)
|
|
125
|
+
fatal_count = sum(1 for e in self.errors if e.severity == ErrorSeverity.FATAL)
|
|
126
|
+
logging.debug(f"Fatal error check result: {has_fatal} ({fatal_count} fatal errors)")
|
|
127
|
+
return has_fatal
|
|
128
|
+
|
|
129
|
+
def has_errors(self) -> bool:
|
|
130
|
+
"""Check if any errors (non-warning) occurred.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
True if errors exist
|
|
134
|
+
"""
|
|
135
|
+
logging.debug("Checking for errors (non-warning)")
|
|
136
|
+
with self.lock:
|
|
137
|
+
has_errs = any(e.severity in (ErrorSeverity.ERROR, ErrorSeverity.FATAL) for e in self.errors)
|
|
138
|
+
error_count = sum(1 for e in self.errors if e.severity in (ErrorSeverity.ERROR, ErrorSeverity.FATAL))
|
|
139
|
+
logging.debug(f"Error check result: {has_errs} ({error_count} errors or fatal)")
|
|
140
|
+
return has_errs
|
|
141
|
+
|
|
142
|
+
def has_warnings(self) -> bool:
|
|
143
|
+
"""Check if any warnings occurred.
|
|
144
|
+
|
|
145
|
+
Returns:
|
|
146
|
+
True if warnings exist
|
|
147
|
+
"""
|
|
148
|
+
with self.lock:
|
|
149
|
+
has_warn = any(e.severity == ErrorSeverity.WARNING for e in self.errors)
|
|
150
|
+
warning_count = sum(1 for e in self.errors if e.severity == ErrorSeverity.WARNING)
|
|
151
|
+
logging.debug(f"Warning check result: {has_warn} ({warning_count} warnings)")
|
|
152
|
+
return has_warn
|
|
153
|
+
|
|
154
|
+
def get_error_count(self) -> dict[str, int]:
|
|
155
|
+
"""Get count of errors by severity.
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
Dictionary with counts by severity
|
|
159
|
+
"""
|
|
160
|
+
with self.lock:
|
|
161
|
+
counts = {
|
|
162
|
+
"warnings": sum(1 for e in self.errors if e.severity == ErrorSeverity.WARNING),
|
|
163
|
+
"errors": sum(1 for e in self.errors if e.severity == ErrorSeverity.ERROR),
|
|
164
|
+
"fatal": sum(1 for e in self.errors if e.severity == ErrorSeverity.FATAL),
|
|
165
|
+
"total": len(self.errors),
|
|
166
|
+
}
|
|
167
|
+
logging.debug(f"Error counts: {counts['total']} total ({counts['fatal']} fatal, {counts['errors']} errors, {counts['warnings']} warnings)")
|
|
168
|
+
return counts
|
|
169
|
+
|
|
170
|
+
def format_errors(self, max_errors: Optional[int] = None) -> str:
|
|
171
|
+
"""Format all errors as human-readable string.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
max_errors: Maximum number of errors to include (None = all)
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Formatted error report
|
|
178
|
+
"""
|
|
179
|
+
logging.debug(f"Formatting errors (max_errors: {max_errors})")
|
|
180
|
+
with self.lock:
|
|
181
|
+
if not self.errors:
|
|
182
|
+
return "No errors"
|
|
183
|
+
|
|
184
|
+
errors_to_show = self.errors if max_errors is None else self.errors[:max_errors]
|
|
185
|
+
logging.debug(f"Formatting {len(errors_to_show)} errors (total: {len(self.errors)})")
|
|
186
|
+
lines = []
|
|
187
|
+
|
|
188
|
+
for err in errors_to_show:
|
|
189
|
+
lines.append(err.format())
|
|
190
|
+
|
|
191
|
+
if max_errors and len(self.errors) > max_errors:
|
|
192
|
+
lines.append(f"\n... and {len(self.errors) - max_errors} more errors")
|
|
193
|
+
|
|
194
|
+
# Add summary
|
|
195
|
+
counts = self.get_error_count()
|
|
196
|
+
summary = f"\nSummary: {counts['fatal']} fatal, {counts['errors']} errors, {counts['warnings']} warnings"
|
|
197
|
+
lines.append(summary)
|
|
198
|
+
|
|
199
|
+
formatted = "\n\n".join(lines)
|
|
200
|
+
logging.debug(f"Error formatting complete: {len(lines)} sections, {len(formatted)} characters")
|
|
201
|
+
return formatted
|
|
202
|
+
|
|
203
|
+
def format_summary(self) -> str:
|
|
204
|
+
"""Format a brief summary of errors.
|
|
205
|
+
|
|
206
|
+
Returns:
|
|
207
|
+
Brief error summary
|
|
208
|
+
"""
|
|
209
|
+
counts = self.get_error_count()
|
|
210
|
+
if counts["total"] == 0:
|
|
211
|
+
return "No errors"
|
|
212
|
+
|
|
213
|
+
parts = []
|
|
214
|
+
if counts["fatal"] > 0:
|
|
215
|
+
parts.append(f"{counts['fatal']} fatal")
|
|
216
|
+
if counts["errors"] > 0:
|
|
217
|
+
parts.append(f"{counts['errors']} errors")
|
|
218
|
+
if counts["warnings"] > 0:
|
|
219
|
+
parts.append(f"{counts['warnings']} warnings")
|
|
220
|
+
|
|
221
|
+
summary = ", ".join(parts)
|
|
222
|
+
return summary
|
|
223
|
+
|
|
224
|
+
def clear(self) -> None:
|
|
225
|
+
"""Clear all collected errors."""
|
|
226
|
+
with self.lock:
|
|
227
|
+
error_count = len(self.errors)
|
|
228
|
+
self.errors.clear()
|
|
229
|
+
|
|
230
|
+
if error_count > 0:
|
|
231
|
+
logging.info(f"Cleared {error_count} errors")
|
|
232
|
+
|
|
233
|
+
def get_first_fatal_error(self) -> Optional[BuildError]:
|
|
234
|
+
"""Get the first fatal error encountered.
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
First fatal error or None
|
|
238
|
+
"""
|
|
239
|
+
with self.lock:
|
|
240
|
+
for error in self.errors:
|
|
241
|
+
if error.severity == ErrorSeverity.FATAL:
|
|
242
|
+
return error
|
|
243
|
+
return None
|
|
244
|
+
|
|
245
|
+
def get_compilation_errors(self) -> list[BuildError]:
|
|
246
|
+
"""Get all compilation-phase errors.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
List of compilation errors
|
|
250
|
+
"""
|
|
251
|
+
compilation_errors = self.get_errors_by_phase("compile")
|
|
252
|
+
logging.debug(f"Found {len(compilation_errors)} compilation errors")
|
|
253
|
+
return compilation_errors
|
|
254
|
+
|
|
255
|
+
def get_link_errors(self) -> list[BuildError]:
|
|
256
|
+
"""Get all link-phase errors.
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
List of link errors
|
|
260
|
+
"""
|
|
261
|
+
link_errors = self.get_errors_by_phase("link")
|
|
262
|
+
logging.debug(f"Found {len(link_errors)} link errors")
|
|
263
|
+
return link_errors
|