fbuild 1.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fbuild/__init__.py +390 -0
- fbuild/assets/example.txt +1 -0
- fbuild/build/__init__.py +117 -0
- fbuild/build/archive_creator.py +186 -0
- fbuild/build/binary_generator.py +444 -0
- fbuild/build/build_component_factory.py +131 -0
- fbuild/build/build_info_generator.py +624 -0
- fbuild/build/build_state.py +325 -0
- fbuild/build/build_utils.py +93 -0
- fbuild/build/compilation_executor.py +422 -0
- fbuild/build/compiler.py +165 -0
- fbuild/build/compiler_avr.py +574 -0
- fbuild/build/configurable_compiler.py +664 -0
- fbuild/build/configurable_linker.py +637 -0
- fbuild/build/flag_builder.py +214 -0
- fbuild/build/library_dependency_processor.py +185 -0
- fbuild/build/linker.py +708 -0
- fbuild/build/orchestrator.py +67 -0
- fbuild/build/orchestrator_avr.py +651 -0
- fbuild/build/orchestrator_esp32.py +878 -0
- fbuild/build/orchestrator_rp2040.py +719 -0
- fbuild/build/orchestrator_stm32.py +696 -0
- fbuild/build/orchestrator_teensy.py +580 -0
- fbuild/build/source_compilation_orchestrator.py +218 -0
- fbuild/build/source_scanner.py +516 -0
- fbuild/cli.py +717 -0
- fbuild/cli_utils.py +314 -0
- fbuild/config/__init__.py +16 -0
- fbuild/config/board_config.py +542 -0
- fbuild/config/board_loader.py +92 -0
- fbuild/config/ini_parser.py +369 -0
- fbuild/config/mcu_specs.py +88 -0
- fbuild/daemon/__init__.py +42 -0
- fbuild/daemon/async_client.py +531 -0
- fbuild/daemon/client.py +1505 -0
- fbuild/daemon/compilation_queue.py +293 -0
- fbuild/daemon/configuration_lock.py +865 -0
- fbuild/daemon/daemon.py +585 -0
- fbuild/daemon/daemon_context.py +293 -0
- fbuild/daemon/error_collector.py +263 -0
- fbuild/daemon/file_cache.py +332 -0
- fbuild/daemon/firmware_ledger.py +546 -0
- fbuild/daemon/lock_manager.py +508 -0
- fbuild/daemon/logging_utils.py +149 -0
- fbuild/daemon/messages.py +957 -0
- fbuild/daemon/operation_registry.py +288 -0
- fbuild/daemon/port_state_manager.py +249 -0
- fbuild/daemon/process_tracker.py +366 -0
- fbuild/daemon/processors/__init__.py +18 -0
- fbuild/daemon/processors/build_processor.py +248 -0
- fbuild/daemon/processors/deploy_processor.py +664 -0
- fbuild/daemon/processors/install_deps_processor.py +431 -0
- fbuild/daemon/processors/locking_processor.py +777 -0
- fbuild/daemon/processors/monitor_processor.py +285 -0
- fbuild/daemon/request_processor.py +457 -0
- fbuild/daemon/shared_serial.py +819 -0
- fbuild/daemon/status_manager.py +238 -0
- fbuild/daemon/subprocess_manager.py +316 -0
- fbuild/deploy/__init__.py +21 -0
- fbuild/deploy/deployer.py +67 -0
- fbuild/deploy/deployer_esp32.py +310 -0
- fbuild/deploy/docker_utils.py +315 -0
- fbuild/deploy/monitor.py +519 -0
- fbuild/deploy/qemu_runner.py +603 -0
- fbuild/interrupt_utils.py +34 -0
- fbuild/ledger/__init__.py +52 -0
- fbuild/ledger/board_ledger.py +560 -0
- fbuild/output.py +352 -0
- fbuild/packages/__init__.py +66 -0
- fbuild/packages/archive_utils.py +1098 -0
- fbuild/packages/arduino_core.py +412 -0
- fbuild/packages/cache.py +256 -0
- fbuild/packages/concurrent_manager.py +510 -0
- fbuild/packages/downloader.py +518 -0
- fbuild/packages/fingerprint.py +423 -0
- fbuild/packages/framework_esp32.py +538 -0
- fbuild/packages/framework_rp2040.py +349 -0
- fbuild/packages/framework_stm32.py +459 -0
- fbuild/packages/framework_teensy.py +346 -0
- fbuild/packages/github_utils.py +96 -0
- fbuild/packages/header_trampoline_cache.py +394 -0
- fbuild/packages/library_compiler.py +203 -0
- fbuild/packages/library_manager.py +549 -0
- fbuild/packages/library_manager_esp32.py +725 -0
- fbuild/packages/package.py +163 -0
- fbuild/packages/platform_esp32.py +383 -0
- fbuild/packages/platform_rp2040.py +400 -0
- fbuild/packages/platform_stm32.py +581 -0
- fbuild/packages/platform_teensy.py +312 -0
- fbuild/packages/platform_utils.py +131 -0
- fbuild/packages/platformio_registry.py +369 -0
- fbuild/packages/sdk_utils.py +231 -0
- fbuild/packages/toolchain.py +436 -0
- fbuild/packages/toolchain_binaries.py +196 -0
- fbuild/packages/toolchain_esp32.py +489 -0
- fbuild/packages/toolchain_metadata.py +185 -0
- fbuild/packages/toolchain_rp2040.py +436 -0
- fbuild/packages/toolchain_stm32.py +417 -0
- fbuild/packages/toolchain_teensy.py +404 -0
- fbuild/platform_configs/esp32.json +150 -0
- fbuild/platform_configs/esp32c2.json +144 -0
- fbuild/platform_configs/esp32c3.json +143 -0
- fbuild/platform_configs/esp32c5.json +151 -0
- fbuild/platform_configs/esp32c6.json +151 -0
- fbuild/platform_configs/esp32p4.json +149 -0
- fbuild/platform_configs/esp32s3.json +151 -0
- fbuild/platform_configs/imxrt1062.json +56 -0
- fbuild/platform_configs/rp2040.json +70 -0
- fbuild/platform_configs/rp2350.json +76 -0
- fbuild/platform_configs/stm32f1.json +59 -0
- fbuild/platform_configs/stm32f4.json +63 -0
- fbuild/py.typed +0 -0
- fbuild-1.2.8.dist-info/METADATA +468 -0
- fbuild-1.2.8.dist-info/RECORD +121 -0
- fbuild-1.2.8.dist-info/WHEEL +5 -0
- fbuild-1.2.8.dist-info/entry_points.txt +5 -0
- fbuild-1.2.8.dist-info/licenses/LICENSE +21 -0
- fbuild-1.2.8.dist-info/top_level.txt +2 -0
- fbuild_lint/__init__.py +0 -0
- fbuild_lint/ruff_plugins/__init__.py +0 -0
- fbuild_lint/ruff_plugins/keyboard_interrupt_checker.py +158 -0
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Operation Registry - Structured operation state tracking.
|
|
3
|
+
|
|
4
|
+
This module provides a registry for tracking all daemon operations (build/deploy/monitor)
|
|
5
|
+
with structured state management, replacing the simple boolean _operation_in_progress flag.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import threading
|
|
10
|
+
import time
|
|
11
|
+
from dataclasses import dataclass, field
|
|
12
|
+
from enum import Enum
|
|
13
|
+
from typing import Any, Optional
|
|
14
|
+
|
|
15
|
+
from fbuild.daemon.messages import OperationType
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class OperationState(Enum):
|
|
19
|
+
"""State of a daemon operation."""
|
|
20
|
+
|
|
21
|
+
QUEUED = "queued"
|
|
22
|
+
RUNNING = "running"
|
|
23
|
+
COMPLETED = "completed"
|
|
24
|
+
FAILED = "failed"
|
|
25
|
+
CANCELLED = "cancelled"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class Operation:
|
|
30
|
+
"""Tracks a daemon operation (build/deploy/monitor)."""
|
|
31
|
+
|
|
32
|
+
operation_id: str
|
|
33
|
+
operation_type: OperationType
|
|
34
|
+
project_dir: str
|
|
35
|
+
environment: str
|
|
36
|
+
state: OperationState
|
|
37
|
+
request_id: str
|
|
38
|
+
caller_pid: int
|
|
39
|
+
created_at: float = field(default_factory=time.time)
|
|
40
|
+
started_at: Optional[float] = None
|
|
41
|
+
completed_at: Optional[float] = None
|
|
42
|
+
error_message: Optional[str] = None
|
|
43
|
+
result: Optional[Any] = None
|
|
44
|
+
|
|
45
|
+
# Subprocess tracking
|
|
46
|
+
subprocess_ids: list[str] = field(default_factory=list)
|
|
47
|
+
compilation_job_ids: list[str] = field(default_factory=list)
|
|
48
|
+
|
|
49
|
+
def duration(self) -> Optional[float]:
|
|
50
|
+
"""Get operation duration in seconds.
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Duration in seconds, or None if not complete
|
|
54
|
+
"""
|
|
55
|
+
if self.started_at and self.completed_at:
|
|
56
|
+
return self.completed_at - self.started_at
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
def elapsed_time(self) -> Optional[float]:
|
|
60
|
+
"""Get elapsed time since operation started.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Elapsed time in seconds, or None if not started
|
|
64
|
+
"""
|
|
65
|
+
if self.started_at:
|
|
66
|
+
return time.time() - self.started_at
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class OperationRegistry:
|
|
71
|
+
"""Registry for tracking all daemon operations."""
|
|
72
|
+
|
|
73
|
+
def __init__(self, max_history: int = 100):
|
|
74
|
+
"""Initialize operation registry.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
max_history: Maximum number of completed operations to retain
|
|
78
|
+
"""
|
|
79
|
+
self.operations: dict[str, Operation] = {}
|
|
80
|
+
self.lock = threading.Lock()
|
|
81
|
+
self.max_history = max_history
|
|
82
|
+
logging.info(f"OperationRegistry initialized (max_history={max_history})")
|
|
83
|
+
|
|
84
|
+
def register_operation(self, operation: Operation) -> str:
|
|
85
|
+
"""Register new operation.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
operation: Operation to register
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
Operation ID
|
|
92
|
+
"""
|
|
93
|
+
logging.debug(f"Operation type: {operation.operation_type.value}, project: {operation.project_dir}, env: {operation.environment}")
|
|
94
|
+
logging.debug(f"Initial state: {operation.state.value}")
|
|
95
|
+
|
|
96
|
+
with self.lock:
|
|
97
|
+
existing_count = len(self.operations)
|
|
98
|
+
self.operations[operation.operation_id] = operation
|
|
99
|
+
logging.debug(f"Operation added to registry, total operations: {existing_count} -> {len(self.operations)}")
|
|
100
|
+
self._cleanup_old_operations()
|
|
101
|
+
|
|
102
|
+
logging.info(f"Registered operation {operation.operation_id}: {operation.operation_type.value} {operation.project_dir}")
|
|
103
|
+
logging.debug(f"Active operations after registration: {len([op for op in self.operations.values() if op.state in (OperationState.QUEUED, OperationState.RUNNING)])}")
|
|
104
|
+
return operation.operation_id
|
|
105
|
+
|
|
106
|
+
def get_operation(self, operation_id: str) -> Optional[Operation]:
|
|
107
|
+
"""Get operation by ID.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
operation_id: Operation ID to query
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Operation or None if not found
|
|
114
|
+
"""
|
|
115
|
+
with self.lock:
|
|
116
|
+
op = self.operations.get(operation_id)
|
|
117
|
+
if op:
|
|
118
|
+
logging.debug(f"Found operation {operation_id}")
|
|
119
|
+
else:
|
|
120
|
+
logging.debug(f"Operation {operation_id} not found")
|
|
121
|
+
return op
|
|
122
|
+
|
|
123
|
+
def update_state(self, operation_id: str, state: OperationState, **kwargs: Any) -> None:
|
|
124
|
+
"""Update operation state.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
operation_id: Operation ID to update
|
|
128
|
+
state: New state
|
|
129
|
+
**kwargs: Additional fields to update
|
|
130
|
+
"""
|
|
131
|
+
logging.debug(f"Additional fields to update: {list(kwargs.keys())}")
|
|
132
|
+
|
|
133
|
+
with self.lock:
|
|
134
|
+
if operation_id not in self.operations:
|
|
135
|
+
logging.warning(f"Cannot update unknown operation: {operation_id}")
|
|
136
|
+
logging.debug(f"Known operations: {list(self.operations.keys())}")
|
|
137
|
+
return
|
|
138
|
+
|
|
139
|
+
op = self.operations[operation_id]
|
|
140
|
+
old_state = op.state
|
|
141
|
+
op.state = state
|
|
142
|
+
|
|
143
|
+
# Auto-update timestamps
|
|
144
|
+
if state == OperationState.RUNNING and op.started_at is None:
|
|
145
|
+
op.started_at = time.time()
|
|
146
|
+
elif state in (OperationState.COMPLETED, OperationState.FAILED, OperationState.CANCELLED):
|
|
147
|
+
if op.completed_at is None:
|
|
148
|
+
op.completed_at = time.time()
|
|
149
|
+
|
|
150
|
+
# Update additional fields
|
|
151
|
+
for key, value in kwargs.items():
|
|
152
|
+
if hasattr(op, key):
|
|
153
|
+
setattr(op, key, value)
|
|
154
|
+
|
|
155
|
+
logging.info(f"Operation {operation_id} state: {old_state.value} -> {state.value}")
|
|
156
|
+
if state in (OperationState.COMPLETED, OperationState.FAILED, OperationState.CANCELLED):
|
|
157
|
+
logging.info(
|
|
158
|
+
f"Operation {operation_id} finished: state={state.value}, type={op.operation_type.value}, duration={op.duration():.2f}s"
|
|
159
|
+
if op.duration()
|
|
160
|
+
else f"Operation {operation_id} finished: state={state.value}"
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
def get_active_operations(self) -> list[Operation]:
|
|
164
|
+
"""Get all active (running/queued) operations.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
List of active operations
|
|
168
|
+
"""
|
|
169
|
+
with self.lock:
|
|
170
|
+
active = [op for op in self.operations.values() if op.state in (OperationState.QUEUED, OperationState.RUNNING)]
|
|
171
|
+
logging.debug(f"Found {len(active)} active operations (queued or running)")
|
|
172
|
+
if active:
|
|
173
|
+
logging.info(f"Active operations: {[op.operation_id for op in active]}")
|
|
174
|
+
return active
|
|
175
|
+
|
|
176
|
+
def get_operations_by_project(self, project_dir: str) -> list[Operation]:
|
|
177
|
+
"""Get all operations for a specific project.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
project_dir: Project directory path
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
List of operations for the project
|
|
184
|
+
"""
|
|
185
|
+
with self.lock:
|
|
186
|
+
ops = [op for op in self.operations.values() if op.project_dir == project_dir]
|
|
187
|
+
logging.debug(f"Found {len(ops)} operations for project {project_dir}")
|
|
188
|
+
if ops:
|
|
189
|
+
logging.debug(f"Operation states: {[(op.operation_id, op.state.value) for op in ops]}")
|
|
190
|
+
return ops
|
|
191
|
+
|
|
192
|
+
def is_project_busy(self, project_dir: str) -> bool:
|
|
193
|
+
"""Check if a project has any active operations.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
project_dir: Project directory path
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
True if project has active operations
|
|
200
|
+
"""
|
|
201
|
+
with self.lock:
|
|
202
|
+
busy = any(op.project_dir == project_dir and op.state in (OperationState.QUEUED, OperationState.RUNNING) for op in self.operations.values())
|
|
203
|
+
return busy
|
|
204
|
+
|
|
205
|
+
def get_statistics(self) -> dict[str, int]:
|
|
206
|
+
"""Get operation statistics.
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
Dictionary with operation counts by state
|
|
210
|
+
"""
|
|
211
|
+
with self.lock:
|
|
212
|
+
stats = {
|
|
213
|
+
"total_operations": len(self.operations),
|
|
214
|
+
"queued": sum(1 for op in self.operations.values() if op.state == OperationState.QUEUED),
|
|
215
|
+
"running": sum(1 for op in self.operations.values() if op.state == OperationState.RUNNING),
|
|
216
|
+
"completed": sum(1 for op in self.operations.values() if op.state == OperationState.COMPLETED),
|
|
217
|
+
"failed": sum(1 for op in self.operations.values() if op.state == OperationState.FAILED),
|
|
218
|
+
"cancelled": sum(1 for op in self.operations.values() if op.state == OperationState.CANCELLED),
|
|
219
|
+
}
|
|
220
|
+
if stats["total_operations"] > 0:
|
|
221
|
+
success_rate = (stats["completed"] / stats["total_operations"]) * 100 if stats["total_operations"] > 0 else 0
|
|
222
|
+
logging.info(f"Operation success rate: {success_rate:.1f}% ({stats['completed']}/{stats['total_operations']})")
|
|
223
|
+
return stats
|
|
224
|
+
|
|
225
|
+
def _cleanup_old_operations(self) -> None:
|
|
226
|
+
"""Remove old completed operations beyond max_history."""
|
|
227
|
+
completed_ops = sorted(
|
|
228
|
+
[op for op in self.operations.values() if op.state in (OperationState.COMPLETED, OperationState.FAILED, OperationState.CANCELLED)],
|
|
229
|
+
key=lambda x: x.completed_at or 0,
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
logging.debug(f"Checking for old operations to cleanup: {len(completed_ops)} completed, max_history={self.max_history}")
|
|
233
|
+
|
|
234
|
+
if len(completed_ops) > self.max_history:
|
|
235
|
+
to_remove = completed_ops[: len(completed_ops) - self.max_history]
|
|
236
|
+
logging.debug(f"Removing {len(to_remove)} old operations to maintain max_history limit")
|
|
237
|
+
for op in to_remove:
|
|
238
|
+
del self.operations[op.operation_id]
|
|
239
|
+
|
|
240
|
+
logging.info(f"Cleaned up {len(to_remove)} old operations (history size: {len(completed_ops)} -> {len(completed_ops) - len(to_remove)})")
|
|
241
|
+
else:
|
|
242
|
+
logging.debug(f"No cleanup needed: {len(completed_ops)} operations within max_history={self.max_history}")
|
|
243
|
+
|
|
244
|
+
def clear_completed_operations(self, older_than_seconds: Optional[float] = None) -> int:
|
|
245
|
+
"""Clear completed operations.
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
older_than_seconds: Only clear operations older than this (None = all)
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
Number of operations cleared
|
|
252
|
+
"""
|
|
253
|
+
logging.debug(f"Clearing completed operations (older_than: {older_than_seconds}s)" if older_than_seconds else "Clearing all completed operations")
|
|
254
|
+
|
|
255
|
+
with self.lock:
|
|
256
|
+
now = time.time()
|
|
257
|
+
to_remove = []
|
|
258
|
+
total_completed = 0
|
|
259
|
+
|
|
260
|
+
for op_id, op in self.operations.items():
|
|
261
|
+
if op.state not in (
|
|
262
|
+
OperationState.COMPLETED,
|
|
263
|
+
OperationState.FAILED,
|
|
264
|
+
OperationState.CANCELLED,
|
|
265
|
+
):
|
|
266
|
+
continue
|
|
267
|
+
|
|
268
|
+
total_completed += 1
|
|
269
|
+
|
|
270
|
+
if older_than_seconds is None:
|
|
271
|
+
to_remove.append(op_id)
|
|
272
|
+
logging.debug(f"Marking operation for removal: {op_id} (no age filter)")
|
|
273
|
+
elif op.completed_at and (now - op.completed_at) > older_than_seconds:
|
|
274
|
+
age = now - op.completed_at
|
|
275
|
+
to_remove.append(op_id)
|
|
276
|
+
logging.debug(f"Marking operation for removal: {op_id} (age: {age:.1f}s > {older_than_seconds}s)")
|
|
277
|
+
|
|
278
|
+
logging.debug(f"Found {len(to_remove)} operations to remove out of {total_completed} completed")
|
|
279
|
+
|
|
280
|
+
for op_id in to_remove:
|
|
281
|
+
del self.operations[op_id]
|
|
282
|
+
|
|
283
|
+
if to_remove:
|
|
284
|
+
logging.info(f"Cleared {len(to_remove)} completed operations (remaining: {len(self.operations)})")
|
|
285
|
+
else:
|
|
286
|
+
logging.debug("No completed operations to clear")
|
|
287
|
+
|
|
288
|
+
return len(to_remove)
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Port State Manager - Tracks COM port state for visibility into daemon operations.
|
|
3
|
+
|
|
4
|
+
This module provides the PortStateManager class which tracks the state of all
|
|
5
|
+
COM ports in use by the daemon, providing visibility into which ports are in use,
|
|
6
|
+
by whom, and in what state.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import threading
|
|
11
|
+
import time
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from enum import Enum
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PortState(Enum):
|
|
18
|
+
"""State of a serial port in use by the daemon."""
|
|
19
|
+
|
|
20
|
+
AVAILABLE = "available" # Port not in use
|
|
21
|
+
UPLOADING = "uploading" # Firmware being uploaded
|
|
22
|
+
MONITORING = "monitoring" # Serial monitor active
|
|
23
|
+
RESERVED = "reserved" # Reserved but not yet active
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class PortInfo:
|
|
28
|
+
"""Information about a port currently in use.
|
|
29
|
+
|
|
30
|
+
Attributes:
|
|
31
|
+
port: Port identifier (e.g., "COM3", "/dev/ttyUSB0")
|
|
32
|
+
state: Current state of the port
|
|
33
|
+
client_pid: PID of client using the port
|
|
34
|
+
project_dir: Project being deployed
|
|
35
|
+
environment: Environment name
|
|
36
|
+
operation_id: Request ID for the operation
|
|
37
|
+
acquired_at: When port was acquired (Unix timestamp)
|
|
38
|
+
last_activity: Last activity timestamp
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
port: str
|
|
42
|
+
state: PortState = PortState.AVAILABLE
|
|
43
|
+
client_pid: int | None = None
|
|
44
|
+
project_dir: str | None = None
|
|
45
|
+
environment: str | None = None
|
|
46
|
+
operation_id: str | None = None
|
|
47
|
+
acquired_at: float | None = None
|
|
48
|
+
last_activity: float | None = None
|
|
49
|
+
|
|
50
|
+
def to_dict(self) -> dict[str, Any]:
|
|
51
|
+
"""Convert to dictionary for JSON serialization."""
|
|
52
|
+
return {
|
|
53
|
+
"port": self.port,
|
|
54
|
+
"state": self.state.value,
|
|
55
|
+
"client_pid": self.client_pid,
|
|
56
|
+
"project_dir": self.project_dir,
|
|
57
|
+
"environment": self.environment,
|
|
58
|
+
"operation_id": self.operation_id,
|
|
59
|
+
"acquired_at": self.acquired_at,
|
|
60
|
+
"last_activity": self.last_activity,
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def from_dict(cls, data: dict[str, Any]) -> "PortInfo":
|
|
65
|
+
"""Create PortInfo from dictionary."""
|
|
66
|
+
state_str = data.get("state", "available")
|
|
67
|
+
try:
|
|
68
|
+
state = PortState(state_str)
|
|
69
|
+
except ValueError:
|
|
70
|
+
state = PortState.AVAILABLE
|
|
71
|
+
|
|
72
|
+
return cls(
|
|
73
|
+
port=data["port"],
|
|
74
|
+
state=state,
|
|
75
|
+
client_pid=data.get("client_pid"),
|
|
76
|
+
project_dir=data.get("project_dir"),
|
|
77
|
+
environment=data.get("environment"),
|
|
78
|
+
operation_id=data.get("operation_id"),
|
|
79
|
+
acquired_at=data.get("acquired_at"),
|
|
80
|
+
last_activity=data.get("last_activity"),
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class PortStateManager:
|
|
85
|
+
"""Tracks state of all COM ports in use by the daemon.
|
|
86
|
+
|
|
87
|
+
This class provides visibility into which ports are being used, by which
|
|
88
|
+
clients, and in what state. It is thread-safe and can be accessed from
|
|
89
|
+
multiple request handlers concurrently.
|
|
90
|
+
|
|
91
|
+
Example:
|
|
92
|
+
>>> manager = PortStateManager()
|
|
93
|
+
>>> manager.acquire_port(
|
|
94
|
+
... port="COM3",
|
|
95
|
+
... state=PortState.UPLOADING,
|
|
96
|
+
... client_pid=12345,
|
|
97
|
+
... project_dir="/path/to/project",
|
|
98
|
+
... environment="esp32c6",
|
|
99
|
+
... operation_id="deploy_123"
|
|
100
|
+
... )
|
|
101
|
+
>>> info = manager.get_port_info("COM3")
|
|
102
|
+
>>> print(info.state) # PortState.UPLOADING
|
|
103
|
+
>>> manager.update_state("COM3", PortState.MONITORING)
|
|
104
|
+
>>> manager.release_port("COM3")
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
def __init__(self) -> None:
|
|
108
|
+
"""Initialize the PortStateManager."""
|
|
109
|
+
self._lock = threading.Lock()
|
|
110
|
+
self._ports: dict[str, PortInfo] = {}
|
|
111
|
+
|
|
112
|
+
def acquire_port(
|
|
113
|
+
self,
|
|
114
|
+
port: str,
|
|
115
|
+
state: PortState,
|
|
116
|
+
client_pid: int,
|
|
117
|
+
project_dir: str,
|
|
118
|
+
environment: str,
|
|
119
|
+
operation_id: str,
|
|
120
|
+
) -> None:
|
|
121
|
+
"""Mark a port as in use.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
port: Port identifier (e.g., "COM3", "/dev/ttyUSB0")
|
|
125
|
+
state: Initial state for the port
|
|
126
|
+
client_pid: PID of client using the port
|
|
127
|
+
project_dir: Project being deployed
|
|
128
|
+
environment: Environment name
|
|
129
|
+
operation_id: Request ID for the operation
|
|
130
|
+
"""
|
|
131
|
+
with self._lock:
|
|
132
|
+
current_time = time.time()
|
|
133
|
+
self._ports[port] = PortInfo(
|
|
134
|
+
port=port,
|
|
135
|
+
state=state,
|
|
136
|
+
client_pid=client_pid,
|
|
137
|
+
project_dir=project_dir,
|
|
138
|
+
environment=environment,
|
|
139
|
+
operation_id=operation_id,
|
|
140
|
+
acquired_at=current_time,
|
|
141
|
+
last_activity=current_time,
|
|
142
|
+
)
|
|
143
|
+
logging.debug(f"Port {port} acquired: state={state.value}, client_pid={client_pid}, operation_id={operation_id}")
|
|
144
|
+
|
|
145
|
+
def update_state(self, port: str, state: PortState) -> None:
|
|
146
|
+
"""Update state of a port (e.g., UPLOADING -> MONITORING).
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
port: Port identifier
|
|
150
|
+
state: New state for the port
|
|
151
|
+
"""
|
|
152
|
+
with self._lock:
|
|
153
|
+
if port in self._ports:
|
|
154
|
+
old_state = self._ports[port].state
|
|
155
|
+
self._ports[port].state = state
|
|
156
|
+
self._ports[port].last_activity = time.time()
|
|
157
|
+
logging.debug(f"Port {port} state updated: {old_state.value} -> {state.value}")
|
|
158
|
+
else:
|
|
159
|
+
logging.warning(f"Cannot update state for unknown port: {port}")
|
|
160
|
+
|
|
161
|
+
def release_port(self, port: str) -> None:
|
|
162
|
+
"""Release a port back to available state.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
port: Port identifier to release
|
|
166
|
+
"""
|
|
167
|
+
with self._lock:
|
|
168
|
+
if port in self._ports:
|
|
169
|
+
info = self._ports[port]
|
|
170
|
+
del self._ports[port]
|
|
171
|
+
logging.debug(f"Port {port} released (was {info.state.value}, held for {time.time() - (info.acquired_at or 0):.1f}s)")
|
|
172
|
+
else:
|
|
173
|
+
logging.warning(f"Cannot release unknown port: {port}")
|
|
174
|
+
|
|
175
|
+
def get_port_info(self, port: str) -> PortInfo | None:
|
|
176
|
+
"""Get info about a specific port.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
port: Port identifier
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
PortInfo for the port, or None if not tracked
|
|
183
|
+
"""
|
|
184
|
+
with self._lock:
|
|
185
|
+
info = self._ports.get(port)
|
|
186
|
+
if info:
|
|
187
|
+
# Return a copy to avoid race conditions
|
|
188
|
+
return PortInfo(
|
|
189
|
+
port=info.port,
|
|
190
|
+
state=info.state,
|
|
191
|
+
client_pid=info.client_pid,
|
|
192
|
+
project_dir=info.project_dir,
|
|
193
|
+
environment=info.environment,
|
|
194
|
+
operation_id=info.operation_id,
|
|
195
|
+
acquired_at=info.acquired_at,
|
|
196
|
+
last_activity=info.last_activity,
|
|
197
|
+
)
|
|
198
|
+
return None
|
|
199
|
+
|
|
200
|
+
def get_all_ports(self) -> dict[str, PortInfo]:
|
|
201
|
+
"""Get snapshot of all tracked ports.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
Dictionary mapping port names to PortInfo objects (copies)
|
|
205
|
+
"""
|
|
206
|
+
with self._lock:
|
|
207
|
+
return {
|
|
208
|
+
port: PortInfo(
|
|
209
|
+
port=info.port,
|
|
210
|
+
state=info.state,
|
|
211
|
+
client_pid=info.client_pid,
|
|
212
|
+
project_dir=info.project_dir,
|
|
213
|
+
environment=info.environment,
|
|
214
|
+
operation_id=info.operation_id,
|
|
215
|
+
acquired_at=info.acquired_at,
|
|
216
|
+
last_activity=info.last_activity,
|
|
217
|
+
)
|
|
218
|
+
for port, info in self._ports.items()
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
def is_port_available(self, port: str) -> bool:
|
|
222
|
+
"""Check if port is available for use.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
port: Port identifier
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
True if port is not tracked (available), False if in use
|
|
229
|
+
"""
|
|
230
|
+
with self._lock:
|
|
231
|
+
return port not in self._ports
|
|
232
|
+
|
|
233
|
+
def get_ports_summary(self) -> dict[str, dict[str, Any]]:
|
|
234
|
+
"""Get a summary of all port states for status reporting.
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
Dictionary mapping port names to their info as dictionaries
|
|
238
|
+
"""
|
|
239
|
+
with self._lock:
|
|
240
|
+
return {port: info.to_dict() for port, info in self._ports.items()}
|
|
241
|
+
|
|
242
|
+
def get_port_count(self) -> int:
|
|
243
|
+
"""Get the number of ports currently tracked.
|
|
244
|
+
|
|
245
|
+
Returns:
|
|
246
|
+
Number of ports in use
|
|
247
|
+
"""
|
|
248
|
+
with self._lock:
|
|
249
|
+
return len(self._ports)
|