fbuild 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fbuild might be problematic. Click here for more details.
- fbuild/__init__.py +0 -0
- fbuild/assets/example.txt +1 -0
- fbuild/build/__init__.py +117 -0
- fbuild/build/archive_creator.py +186 -0
- fbuild/build/binary_generator.py +444 -0
- fbuild/build/build_component_factory.py +131 -0
- fbuild/build/build_state.py +325 -0
- fbuild/build/build_utils.py +98 -0
- fbuild/build/compilation_executor.py +422 -0
- fbuild/build/compiler.py +165 -0
- fbuild/build/compiler_avr.py +574 -0
- fbuild/build/configurable_compiler.py +612 -0
- fbuild/build/configurable_linker.py +637 -0
- fbuild/build/flag_builder.py +186 -0
- fbuild/build/library_dependency_processor.py +185 -0
- fbuild/build/linker.py +708 -0
- fbuild/build/orchestrator.py +67 -0
- fbuild/build/orchestrator_avr.py +656 -0
- fbuild/build/orchestrator_esp32.py +797 -0
- fbuild/build/orchestrator_teensy.py +543 -0
- fbuild/build/source_compilation_orchestrator.py +220 -0
- fbuild/build/source_scanner.py +516 -0
- fbuild/cli.py +566 -0
- fbuild/cli_utils.py +312 -0
- fbuild/config/__init__.py +16 -0
- fbuild/config/board_config.py +457 -0
- fbuild/config/board_loader.py +92 -0
- fbuild/config/ini_parser.py +209 -0
- fbuild/config/mcu_specs.py +88 -0
- fbuild/daemon/__init__.py +34 -0
- fbuild/daemon/client.py +929 -0
- fbuild/daemon/compilation_queue.py +293 -0
- fbuild/daemon/daemon.py +474 -0
- fbuild/daemon/daemon_context.py +196 -0
- fbuild/daemon/error_collector.py +263 -0
- fbuild/daemon/file_cache.py +332 -0
- fbuild/daemon/lock_manager.py +270 -0
- fbuild/daemon/logging_utils.py +149 -0
- fbuild/daemon/messages.py +301 -0
- fbuild/daemon/operation_registry.py +288 -0
- fbuild/daemon/process_tracker.py +366 -0
- fbuild/daemon/processors/__init__.py +12 -0
- fbuild/daemon/processors/build_processor.py +157 -0
- fbuild/daemon/processors/deploy_processor.py +327 -0
- fbuild/daemon/processors/monitor_processor.py +146 -0
- fbuild/daemon/request_processor.py +401 -0
- fbuild/daemon/status_manager.py +216 -0
- fbuild/daemon/subprocess_manager.py +316 -0
- fbuild/deploy/__init__.py +17 -0
- fbuild/deploy/deployer.py +67 -0
- fbuild/deploy/deployer_esp32.py +314 -0
- fbuild/deploy/monitor.py +495 -0
- fbuild/interrupt_utils.py +34 -0
- fbuild/packages/__init__.py +53 -0
- fbuild/packages/archive_utils.py +1098 -0
- fbuild/packages/arduino_core.py +412 -0
- fbuild/packages/cache.py +249 -0
- fbuild/packages/downloader.py +366 -0
- fbuild/packages/framework_esp32.py +538 -0
- fbuild/packages/framework_teensy.py +346 -0
- fbuild/packages/github_utils.py +96 -0
- fbuild/packages/header_trampoline_cache.py +394 -0
- fbuild/packages/library_compiler.py +203 -0
- fbuild/packages/library_manager.py +549 -0
- fbuild/packages/library_manager_esp32.py +413 -0
- fbuild/packages/package.py +163 -0
- fbuild/packages/platform_esp32.py +383 -0
- fbuild/packages/platform_teensy.py +312 -0
- fbuild/packages/platform_utils.py +131 -0
- fbuild/packages/platformio_registry.py +325 -0
- fbuild/packages/sdk_utils.py +231 -0
- fbuild/packages/toolchain.py +436 -0
- fbuild/packages/toolchain_binaries.py +196 -0
- fbuild/packages/toolchain_esp32.py +484 -0
- fbuild/packages/toolchain_metadata.py +185 -0
- fbuild/packages/toolchain_teensy.py +404 -0
- fbuild/platform_configs/esp32.json +150 -0
- fbuild/platform_configs/esp32c2.json +144 -0
- fbuild/platform_configs/esp32c3.json +143 -0
- fbuild/platform_configs/esp32c5.json +151 -0
- fbuild/platform_configs/esp32c6.json +151 -0
- fbuild/platform_configs/esp32p4.json +149 -0
- fbuild/platform_configs/esp32s3.json +151 -0
- fbuild/platform_configs/imxrt1062.json +56 -0
- fbuild-1.1.0.dist-info/METADATA +447 -0
- fbuild-1.1.0.dist-info/RECORD +93 -0
- fbuild-1.1.0.dist-info/WHEEL +5 -0
- fbuild-1.1.0.dist-info/entry_points.txt +5 -0
- fbuild-1.1.0.dist-info/licenses/LICENSE +21 -0
- fbuild-1.1.0.dist-info/top_level.txt +2 -0
- fbuild_lint/__init__.py +0 -0
- fbuild_lint/ruff_plugins/__init__.py +0 -0
- fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Compilation Job Queue - Parallel compilation with worker pool.
|
|
3
|
+
|
|
4
|
+
This module provides a background compilation queue that enables parallel
|
|
5
|
+
compilation of source files using a worker thread pool. It replaces direct
|
|
6
|
+
synchronous subprocess.run() calls with asynchronous job submission.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import multiprocessing
|
|
11
|
+
import subprocess
|
|
12
|
+
import time
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from enum import Enum
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from queue import Empty, Queue
|
|
17
|
+
from threading import Lock, Thread
|
|
18
|
+
from typing import Callable, Optional
|
|
19
|
+
|
|
20
|
+
from ..interrupt_utils import handle_keyboard_interrupt_properly
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class JobState(Enum):
|
|
24
|
+
"""State of a compilation job."""
|
|
25
|
+
|
|
26
|
+
PENDING = "pending"
|
|
27
|
+
RUNNING = "running"
|
|
28
|
+
COMPLETED = "completed"
|
|
29
|
+
FAILED = "failed"
|
|
30
|
+
CANCELLED = "cancelled"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class CompilationJob:
|
|
35
|
+
"""Single compilation job."""
|
|
36
|
+
|
|
37
|
+
job_id: str
|
|
38
|
+
source_path: Path
|
|
39
|
+
output_path: Path
|
|
40
|
+
compiler_cmd: list[str] # Full command including compiler path
|
|
41
|
+
response_file: Optional[Path] = None # Response file for includes
|
|
42
|
+
state: JobState = JobState.PENDING
|
|
43
|
+
result_code: Optional[int] = None
|
|
44
|
+
stdout: str = ""
|
|
45
|
+
stderr: str = ""
|
|
46
|
+
start_time: Optional[float] = None
|
|
47
|
+
end_time: Optional[float] = None
|
|
48
|
+
|
|
49
|
+
def duration(self) -> Optional[float]:
|
|
50
|
+
"""Get job duration in seconds."""
|
|
51
|
+
if self.start_time and self.end_time:
|
|
52
|
+
return self.end_time - self.start_time
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class CompilationJobQueue:
|
|
57
|
+
"""Background compilation queue with worker pool."""
|
|
58
|
+
|
|
59
|
+
def __init__(self, num_workers: Optional[int] = None):
|
|
60
|
+
"""Initialize compilation queue.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
num_workers: Number of worker threads (default: CPU count)
|
|
64
|
+
"""
|
|
65
|
+
self.num_workers = num_workers or multiprocessing.cpu_count()
|
|
66
|
+
self.job_queue: Queue[CompilationJob] = Queue()
|
|
67
|
+
self.jobs: dict[str, CompilationJob] = {}
|
|
68
|
+
self.jobs_lock = Lock()
|
|
69
|
+
self.workers: list[Thread] = []
|
|
70
|
+
self.running = False
|
|
71
|
+
self.progress_callback: Optional[Callable[[CompilationJob], None]] = None
|
|
72
|
+
|
|
73
|
+
logging.info(f"CompilationJobQueue initialized with {self.num_workers} workers")
|
|
74
|
+
|
|
75
|
+
def start(self) -> None:
|
|
76
|
+
"""Start worker threads."""
|
|
77
|
+
if self.running:
|
|
78
|
+
logging.warning("CompilationJobQueue already running")
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
self.running = True
|
|
82
|
+
for i in range(self.num_workers):
|
|
83
|
+
worker_name = f"CompilationWorker-{i}"
|
|
84
|
+
worker = Thread(target=self._worker_loop, name=worker_name, daemon=True)
|
|
85
|
+
worker.start()
|
|
86
|
+
self.workers.append(worker)
|
|
87
|
+
|
|
88
|
+
logging.info(f"Started {self.num_workers} compilation workers")
|
|
89
|
+
|
|
90
|
+
def submit_job(self, job: CompilationJob) -> str:
|
|
91
|
+
"""Submit compilation job to queue.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
job: Compilation job to submit
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Job ID
|
|
98
|
+
"""
|
|
99
|
+
with self.jobs_lock:
|
|
100
|
+
self.jobs[job.job_id] = job
|
|
101
|
+
|
|
102
|
+
self.job_queue.put(job)
|
|
103
|
+
current_depth = self.job_queue.qsize()
|
|
104
|
+
|
|
105
|
+
if current_depth > self.num_workers * 2:
|
|
106
|
+
logging.warning(f"Queue depth high: {current_depth} pending jobs")
|
|
107
|
+
|
|
108
|
+
return job.job_id
|
|
109
|
+
|
|
110
|
+
def _worker_loop(self) -> None:
|
|
111
|
+
"""Worker thread main loop."""
|
|
112
|
+
import threading
|
|
113
|
+
|
|
114
|
+
thread_name = threading.current_thread().name
|
|
115
|
+
|
|
116
|
+
while self.running:
|
|
117
|
+
try:
|
|
118
|
+
job = self.job_queue.get(timeout=1.0)
|
|
119
|
+
self._execute_job(job)
|
|
120
|
+
except Empty:
|
|
121
|
+
continue
|
|
122
|
+
except KeyboardInterrupt as ke:
|
|
123
|
+
handle_keyboard_interrupt_properly(ke)
|
|
124
|
+
except Exception as e:
|
|
125
|
+
logging.error(f"Worker {thread_name} error: {e}", exc_info=True)
|
|
126
|
+
|
|
127
|
+
def _execute_job(self, job: CompilationJob) -> None:
|
|
128
|
+
"""Execute single compilation job.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
job: Compilation job to execute
|
|
132
|
+
"""
|
|
133
|
+
with self.jobs_lock:
|
|
134
|
+
job.state = JobState.RUNNING
|
|
135
|
+
job.start_time = time.time()
|
|
136
|
+
|
|
137
|
+
# Notify progress callback
|
|
138
|
+
if self.progress_callback:
|
|
139
|
+
try:
|
|
140
|
+
self.progress_callback(job)
|
|
141
|
+
except KeyboardInterrupt as ke:
|
|
142
|
+
handle_keyboard_interrupt_properly(ke)
|
|
143
|
+
except Exception as e:
|
|
144
|
+
logging.error(f"Progress callback error: {e}", exc_info=True)
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
# Execute compiler subprocess
|
|
148
|
+
result = subprocess.run(job.compiler_cmd, capture_output=True, text=True, timeout=60)
|
|
149
|
+
|
|
150
|
+
with self.jobs_lock:
|
|
151
|
+
job.result_code = result.returncode
|
|
152
|
+
job.stdout = result.stdout
|
|
153
|
+
job.stderr = result.stderr
|
|
154
|
+
job.end_time = time.time()
|
|
155
|
+
|
|
156
|
+
if result.returncode == 0:
|
|
157
|
+
job.state = JobState.COMPLETED
|
|
158
|
+
else:
|
|
159
|
+
job.state = JobState.FAILED
|
|
160
|
+
logging.error(f"Job {job.job_id} failed with exit code {result.returncode}: {job.source_path.name}")
|
|
161
|
+
|
|
162
|
+
except subprocess.TimeoutExpired:
|
|
163
|
+
with self.jobs_lock:
|
|
164
|
+
job.state = JobState.FAILED
|
|
165
|
+
job.stderr = "Compilation timeout (60s exceeded)"
|
|
166
|
+
job.end_time = time.time()
|
|
167
|
+
logging.error(f"Job {job.job_id} timed out after 60s: {job.source_path.name}")
|
|
168
|
+
|
|
169
|
+
except KeyboardInterrupt as ke:
|
|
170
|
+
handle_keyboard_interrupt_properly(ke)
|
|
171
|
+
|
|
172
|
+
except Exception as e:
|
|
173
|
+
with self.jobs_lock:
|
|
174
|
+
job.state = JobState.FAILED
|
|
175
|
+
job.stderr = str(e)
|
|
176
|
+
job.end_time = time.time()
|
|
177
|
+
logging.error(f"Job {job.job_id} exception: {e}", exc_info=True)
|
|
178
|
+
|
|
179
|
+
# Notify progress callback
|
|
180
|
+
if self.progress_callback:
|
|
181
|
+
try:
|
|
182
|
+
self.progress_callback(job)
|
|
183
|
+
except KeyboardInterrupt as ke:
|
|
184
|
+
handle_keyboard_interrupt_properly(ke)
|
|
185
|
+
except Exception as e:
|
|
186
|
+
logging.error(f"Progress callback error: {e}", exc_info=True)
|
|
187
|
+
|
|
188
|
+
def get_job_status(self, job_id: str) -> Optional[CompilationJob]:
|
|
189
|
+
"""Get status of a specific job.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
job_id: Job ID to query
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
Compilation job or None if not found
|
|
196
|
+
"""
|
|
197
|
+
with self.jobs_lock:
|
|
198
|
+
return self.jobs.get(job_id)
|
|
199
|
+
|
|
200
|
+
def wait_for_completion(self, job_ids: list[str], timeout: Optional[float] = None) -> bool:
|
|
201
|
+
"""Wait for all specified jobs to complete.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
job_ids: List of job IDs to wait for
|
|
205
|
+
timeout: Maximum time to wait in seconds (None = infinite)
|
|
206
|
+
|
|
207
|
+
Returns:
|
|
208
|
+
True if all jobs completed successfully, False otherwise
|
|
209
|
+
"""
|
|
210
|
+
start_time = time.time()
|
|
211
|
+
|
|
212
|
+
while True:
|
|
213
|
+
with self.jobs_lock:
|
|
214
|
+
all_done = all(self.jobs[jid].state in (JobState.COMPLETED, JobState.FAILED, JobState.CANCELLED) for jid in job_ids if jid in self.jobs)
|
|
215
|
+
if all_done:
|
|
216
|
+
success = all(self.jobs[jid].state == JobState.COMPLETED for jid in job_ids if jid in self.jobs)
|
|
217
|
+
completed_count = sum(1 for jid in job_ids if jid in self.jobs and self.jobs[jid].state == JobState.COMPLETED)
|
|
218
|
+
failed_count = sum(1 for jid in job_ids if jid in self.jobs and self.jobs[jid].state == JobState.FAILED)
|
|
219
|
+
if failed_count > 0:
|
|
220
|
+
logging.warning(f"Compilation completed: {completed_count} succeeded, {failed_count} failed")
|
|
221
|
+
return success
|
|
222
|
+
|
|
223
|
+
# Check timeout
|
|
224
|
+
if timeout and (time.time() - start_time) > timeout:
|
|
225
|
+
with self.jobs_lock:
|
|
226
|
+
remaining = sum(1 for jid in job_ids if jid in self.jobs and self.jobs[jid].state == JobState.PENDING)
|
|
227
|
+
logging.warning(f"wait_for_completion timed out after {timeout}s ({remaining} jobs still pending)")
|
|
228
|
+
return False
|
|
229
|
+
|
|
230
|
+
time.sleep(0.1)
|
|
231
|
+
|
|
232
|
+
def cancel_jobs(self, job_ids: list[str]) -> None:
|
|
233
|
+
"""Cancel pending jobs (cannot cancel running jobs).
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
job_ids: List of job IDs to cancel
|
|
237
|
+
"""
|
|
238
|
+
with self.jobs_lock:
|
|
239
|
+
cancelled_count = 0
|
|
240
|
+
for jid in job_ids:
|
|
241
|
+
if jid in self.jobs and self.jobs[jid].state == JobState.PENDING:
|
|
242
|
+
self.jobs[jid].state = JobState.CANCELLED
|
|
243
|
+
cancelled_count += 1
|
|
244
|
+
|
|
245
|
+
if cancelled_count > 0:
|
|
246
|
+
logging.info(f"Cancelled {cancelled_count} pending jobs")
|
|
247
|
+
|
|
248
|
+
def get_statistics(self) -> dict[str, int]:
|
|
249
|
+
"""Get queue statistics.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
Dictionary with job counts by state
|
|
253
|
+
"""
|
|
254
|
+
with self.jobs_lock:
|
|
255
|
+
stats = {
|
|
256
|
+
"total_jobs": len(self.jobs),
|
|
257
|
+
"pending": sum(1 for j in self.jobs.values() if j.state == JobState.PENDING),
|
|
258
|
+
"running": sum(1 for j in self.jobs.values() if j.state == JobState.RUNNING),
|
|
259
|
+
"completed": sum(1 for j in self.jobs.values() if j.state == JobState.COMPLETED),
|
|
260
|
+
"failed": sum(1 for j in self.jobs.values() if j.state == JobState.FAILED),
|
|
261
|
+
"cancelled": sum(1 for j in self.jobs.values() if j.state == JobState.CANCELLED),
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
return stats
|
|
265
|
+
|
|
266
|
+
def get_failed_jobs(self) -> list[CompilationJob]:
|
|
267
|
+
"""Get all failed jobs.
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
List of failed compilation jobs
|
|
271
|
+
"""
|
|
272
|
+
with self.jobs_lock:
|
|
273
|
+
return [j for j in self.jobs.values() if j.state == JobState.FAILED]
|
|
274
|
+
|
|
275
|
+
def clear_jobs(self) -> None:
|
|
276
|
+
"""Clear all completed/failed/cancelled jobs from registry."""
|
|
277
|
+
with self.jobs_lock:
|
|
278
|
+
to_remove = [jid for jid, job in self.jobs.items() if job.state in (JobState.COMPLETED, JobState.FAILED, JobState.CANCELLED)]
|
|
279
|
+
|
|
280
|
+
for jid in to_remove:
|
|
281
|
+
del self.jobs[jid]
|
|
282
|
+
|
|
283
|
+
def shutdown(self) -> None:
|
|
284
|
+
"""Shutdown worker pool."""
|
|
285
|
+
logging.info("Shutting down CompilationJobQueue")
|
|
286
|
+
self.running = False
|
|
287
|
+
|
|
288
|
+
for worker in self.workers:
|
|
289
|
+
worker.join(timeout=2.0)
|
|
290
|
+
if worker.is_alive():
|
|
291
|
+
logging.warning(f"Worker {worker.name} did not finish within timeout")
|
|
292
|
+
|
|
293
|
+
self.workers.clear()
|