logqbit 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of logqbit might be problematic. Click here for more details.
- logqbit/__init__.py +0 -0
- logqbit/assets/icon.svg +15 -0
- logqbit/browser.py +0 -0
- logqbit/constants.py +4 -0
- logqbit/live_plotter.py +665 -0
- logqbit/logfolder.py +256 -0
- logqbit/registry.py +229 -0
- logqbit-0.0.1.dist-info/METADATA +47 -0
- logqbit-0.0.1.dist-info/RECORD +11 -0
- logqbit-0.0.1.dist-info/WHEEL +4 -0
- logqbit-0.0.1.dist-info/entry_points.txt +3 -0
logqbit/__init__.py
ADDED
|
File without changes
|
logqbit/assets/icon.svg
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
<svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512">
|
|
2
|
+
<defs>
|
|
3
|
+
<marker id="arrow" viewBox="0 0 10 10" refX="5" refY="5" markerWidth="3" markerHeight="3"
|
|
4
|
+
orient="auto-start-reverse">
|
|
5
|
+
<path d="M 0 0 L 10 5 L 0 10 z" fill="#2DB84D" />
|
|
6
|
+
</marker>
|
|
7
|
+
</defs>
|
|
8
|
+
<g stroke="#2DB84D" stroke-width="28" stroke-linecap="round" stroke-linejoin="round" fill="none">
|
|
9
|
+
<path d="M 96 432 L 96 96" />
|
|
10
|
+
<path d="M 96 432 L 416 432" />
|
|
11
|
+
</g>
|
|
12
|
+
<polyline points="136,360 220,220 300,300 396,168" fill="none" stroke="#2DB84D" stroke-width="32"
|
|
13
|
+
stroke-linecap="round" stroke-linejoin="round" marker-end="url(#arrow)" />
|
|
14
|
+
<circle cx="300" cy="360" r="20" fill="#2DB84D" />
|
|
15
|
+
</svg>
|
logqbit/browser.py
ADDED
|
File without changes
|
logqbit/constants.py
ADDED
logqbit/live_plotter.py
ADDED
|
@@ -0,0 +1,665 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import pickle
|
|
4
|
+
import sys
|
|
5
|
+
import warnings
|
|
6
|
+
from collections import deque
|
|
7
|
+
from importlib.resources import files
|
|
8
|
+
from typing import Any, Mapping, Sequence
|
|
9
|
+
|
|
10
|
+
import pandas as pd
|
|
11
|
+
import pyqtgraph as pg
|
|
12
|
+
from PySide6.QtCore import QCoreApplication, QObject, Qt, Signal
|
|
13
|
+
from PySide6.QtGui import QIcon, QPixmap
|
|
14
|
+
from PySide6.QtNetwork import QLocalServer, QLocalSocket
|
|
15
|
+
from PySide6.QtWidgets import (
|
|
16
|
+
QApplication,
|
|
17
|
+
QComboBox,
|
|
18
|
+
QHBoxLayout,
|
|
19
|
+
QLabel,
|
|
20
|
+
QMainWindow,
|
|
21
|
+
QPushButton,
|
|
22
|
+
QStatusBar,
|
|
23
|
+
QVBoxLayout,
|
|
24
|
+
QWidget,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
from .constants import HEAD_LENGTH, LIVE_PLOTTER_PIPE_NAME
|
|
28
|
+
|
|
29
|
+
ACTIVE_COLOR = (255, 94, 0)
|
|
30
|
+
INACTIVE_COLOR = (30, 144, 255, 120)
|
|
31
|
+
DEFAULT_LINE_COUNT = 4
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _safe_float(value: Any) -> float | None:
|
|
35
|
+
if value is None:
|
|
36
|
+
return None
|
|
37
|
+
if isinstance(value, (int, float)):
|
|
38
|
+
return float(value)
|
|
39
|
+
try:
|
|
40
|
+
return float(value)
|
|
41
|
+
except (TypeError, ValueError):
|
|
42
|
+
return None
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class LivePlotterWindow(QMainWindow):
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
line_count: int = DEFAULT_LINE_COUNT,
|
|
49
|
+
socket_name: str | None = LIVE_PLOTTER_PIPE_NAME,
|
|
50
|
+
) -> None:
|
|
51
|
+
super().__init__()
|
|
52
|
+
pg.setConfigOptions(antialias=True)
|
|
53
|
+
self.setWindowTitle("LogQbit Live Plotter")
|
|
54
|
+
self.setWindowIcon(QIcon(QPixmap(str(files("logqbit") / "assets" / "icon.svg"))))
|
|
55
|
+
|
|
56
|
+
self.line_count = max(1, line_count)
|
|
57
|
+
self._active_index = 0
|
|
58
|
+
self._inactive_indices: deque[int] = deque(range(1, self.line_count))
|
|
59
|
+
self._last_stepper_values: tuple[Any, ...] | None = None
|
|
60
|
+
self._indeps: list[str] = []
|
|
61
|
+
self._stepper_keys: list[str] = []
|
|
62
|
+
self._dependent_keys: set[str] = set()
|
|
63
|
+
self._current_y_key: str | None = None
|
|
64
|
+
self._line_storage: list[list[dict[str, Any]]] = [
|
|
65
|
+
[] for _ in range(self.line_count)
|
|
66
|
+
]
|
|
67
|
+
self._show_markers = False
|
|
68
|
+
self._marker_size = 4
|
|
69
|
+
|
|
70
|
+
self._active_symbol_brush = pg.mkBrush(ACTIVE_COLOR)
|
|
71
|
+
self._inactive_symbol_brush = pg.mkBrush(INACTIVE_COLOR)
|
|
72
|
+
self._active_symbol_pen = pg.mkPen(ACTIVE_COLOR)
|
|
73
|
+
self._inactive_symbol_pen = pg.mkPen(INACTIVE_COLOR)
|
|
74
|
+
self._active_pen = pg.mkPen(color=ACTIVE_COLOR, width=2)
|
|
75
|
+
self._inactive_pen = pg.mkPen(color=INACTIVE_COLOR, width=1)
|
|
76
|
+
|
|
77
|
+
self._build_ui()
|
|
78
|
+
self._configure_plot()
|
|
79
|
+
|
|
80
|
+
self._command_server: PlotterCommandServer | None = None
|
|
81
|
+
if socket_name:
|
|
82
|
+
self._command_server = PlotterCommandServer(self, socket_name)
|
|
83
|
+
|
|
84
|
+
# ------------------------------------------------------------------
|
|
85
|
+
# Public API exposed to IPC clients
|
|
86
|
+
# ------------------------------------------------------------------
|
|
87
|
+
def set_indeps(self, indeps: Sequence[str]) -> None:
|
|
88
|
+
indep_list = [str(item) for item in indeps if str(item)]
|
|
89
|
+
if not indep_list:
|
|
90
|
+
raise ValueError("'indeps' must contain at least one non-empty key")
|
|
91
|
+
|
|
92
|
+
self._indeps = indep_list
|
|
93
|
+
self._stepper_keys = indep_list[1:]
|
|
94
|
+
self._dependent_keys.clear()
|
|
95
|
+
self._current_y_key = None
|
|
96
|
+
self._last_stepper_values = None
|
|
97
|
+
|
|
98
|
+
self.plot_widget.setLabel("bottom", indep_list[0])
|
|
99
|
+
|
|
100
|
+
for storage in self._line_storage:
|
|
101
|
+
storage.clear()
|
|
102
|
+
self._active_index = 0
|
|
103
|
+
self._inactive_indices = deque(range(1, self.line_count))
|
|
104
|
+
for idx in range(self.line_count):
|
|
105
|
+
if idx == self._active_index:
|
|
106
|
+
self._set_active(idx)
|
|
107
|
+
else:
|
|
108
|
+
self._set_inactive(idx)
|
|
109
|
+
|
|
110
|
+
self._sync_y_selector()
|
|
111
|
+
self._set_status_message("")
|
|
112
|
+
self._refresh_all_lines()
|
|
113
|
+
|
|
114
|
+
def add(
|
|
115
|
+
self,
|
|
116
|
+
record: Mapping[str, Any] | pd.Series | None = None,
|
|
117
|
+
seg: pd.DataFrame | Mapping[str, Any] | Sequence[Mapping[str, Any]] | None = None,
|
|
118
|
+
) -> None:
|
|
119
|
+
rows: list[Mapping[str, Any]] = []
|
|
120
|
+
|
|
121
|
+
if record is not None:
|
|
122
|
+
if isinstance(record, pd.Series):
|
|
123
|
+
rows.append(record.to_dict())
|
|
124
|
+
elif isinstance(record, Mapping):
|
|
125
|
+
rows.append(dict(record))
|
|
126
|
+
else:
|
|
127
|
+
raise TypeError("'record' must be a mapping or pandas Series")
|
|
128
|
+
|
|
129
|
+
if seg is not None:
|
|
130
|
+
if isinstance(seg, pd.DataFrame):
|
|
131
|
+
seg_df = seg
|
|
132
|
+
else:
|
|
133
|
+
try:
|
|
134
|
+
seg_df = pd.DataFrame(seg)
|
|
135
|
+
except (TypeError, ValueError) as exc: # pragma: no cover - defensive
|
|
136
|
+
raise ValueError("'seg' must be convertible to a pandas DataFrame") from exc
|
|
137
|
+
rows.extend(seg_df.to_dict(orient="records"))
|
|
138
|
+
|
|
139
|
+
if not rows:
|
|
140
|
+
return
|
|
141
|
+
|
|
142
|
+
for row in rows:
|
|
143
|
+
self._ingest_row(dict(row))
|
|
144
|
+
|
|
145
|
+
# ------------------------------------------------------------------
|
|
146
|
+
# Internal helpers
|
|
147
|
+
# ------------------------------------------------------------------
|
|
148
|
+
def _build_ui(self) -> None:
|
|
149
|
+
central = QWidget(self)
|
|
150
|
+
root = QVBoxLayout()
|
|
151
|
+
root.setContentsMargins(0, 0, 0, 0)
|
|
152
|
+
root.setSpacing(0)
|
|
153
|
+
|
|
154
|
+
self.plot_widget = pg.PlotWidget(background="w", parent=self)
|
|
155
|
+
self.plot_widget.showGrid(x=True, y=True, alpha=0.25)
|
|
156
|
+
root.addWidget(self.plot_widget)
|
|
157
|
+
|
|
158
|
+
controls = QHBoxLayout()
|
|
159
|
+
controls.setContentsMargins(8, 4, 8, 4)
|
|
160
|
+
controls.setSpacing(8)
|
|
161
|
+
|
|
162
|
+
self.y_selector = QComboBox(self)
|
|
163
|
+
self.y_selector.currentTextChanged.connect(self._on_y_changed)
|
|
164
|
+
self.y_selector.setSizeAdjustPolicy(QComboBox.SizeAdjustPolicy.AdjustToContents)
|
|
165
|
+
controls.addWidget(self.y_selector)
|
|
166
|
+
|
|
167
|
+
self._status_bar = QStatusBar(self)
|
|
168
|
+
self._status_bar.setSizeGripEnabled(False)
|
|
169
|
+
self._status_bar.setContentsMargins(0, 0, 0, 0)
|
|
170
|
+
self._status_bar.setStyleSheet("QStatusBar::item { border: none; }")
|
|
171
|
+
self._status_label = QLabel("", self._status_bar)
|
|
172
|
+
self._status_label.setAlignment(Qt.AlignCenter)
|
|
173
|
+
self._status_bar.addWidget(self._status_label, 1)
|
|
174
|
+
controls.addWidget(self._status_bar, 1)
|
|
175
|
+
|
|
176
|
+
self.marker_button = QPushButton("Markers Off", self)
|
|
177
|
+
self.marker_button.setCheckable(True)
|
|
178
|
+
self.marker_button.setChecked(self._show_markers)
|
|
179
|
+
self.marker_button.toggled.connect(self._on_marker_toggled)
|
|
180
|
+
controls.addWidget(self.marker_button)
|
|
181
|
+
|
|
182
|
+
root.addLayout(controls)
|
|
183
|
+
|
|
184
|
+
central.setLayout(root)
|
|
185
|
+
self.setCentralWidget(central)
|
|
186
|
+
|
|
187
|
+
self._line_items: list[pg.PlotDataItem] = []
|
|
188
|
+
for idx in range(self.line_count):
|
|
189
|
+
pen = self._active_pen if idx == self._active_index else self._inactive_pen
|
|
190
|
+
item = self.plot_widget.plot([], [], pen=pen)
|
|
191
|
+
item.setZValue(10 if idx == self._active_index else 1)
|
|
192
|
+
self._line_items.append(item)
|
|
193
|
+
|
|
194
|
+
def _configure_plot(self) -> None:
|
|
195
|
+
self.plot_widget.setLabel("left", "")
|
|
196
|
+
self.plot_widget.setLabel("bottom", "")
|
|
197
|
+
self.plot_widget.addLegend()
|
|
198
|
+
|
|
199
|
+
def _on_marker_toggled(self, checked: bool) -> None:
|
|
200
|
+
self._show_markers = checked
|
|
201
|
+
self.marker_button.setText("Markers On" if checked else "Markers Off")
|
|
202
|
+
self._refresh_all_lines()
|
|
203
|
+
|
|
204
|
+
def _sync_y_selector(self) -> tuple[bool, str | None]:
|
|
205
|
+
previous = self._current_y_key
|
|
206
|
+
keys = sorted(self._dependent_keys)
|
|
207
|
+
|
|
208
|
+
self.y_selector.blockSignals(True)
|
|
209
|
+
self.y_selector.clear()
|
|
210
|
+
for key in keys:
|
|
211
|
+
self.y_selector.addItem(key)
|
|
212
|
+
|
|
213
|
+
changed = False
|
|
214
|
+
if not keys:
|
|
215
|
+
self._current_y_key = None
|
|
216
|
+
changed = previous is not None
|
|
217
|
+
else:
|
|
218
|
+
if previous in keys:
|
|
219
|
+
index = keys.index(previous)
|
|
220
|
+
self.y_selector.setCurrentIndex(index)
|
|
221
|
+
self._current_y_key = previous
|
|
222
|
+
else:
|
|
223
|
+
self.y_selector.setCurrentIndex(0)
|
|
224
|
+
self._current_y_key = keys[0]
|
|
225
|
+
changed = previous != self._current_y_key
|
|
226
|
+
self.y_selector.blockSignals(False)
|
|
227
|
+
|
|
228
|
+
if changed:
|
|
229
|
+
self._on_y_changed(self._current_y_key or "")
|
|
230
|
+
return changed, self._current_y_key
|
|
231
|
+
|
|
232
|
+
def _on_y_changed(self, text: str) -> None:
|
|
233
|
+
if not text:
|
|
234
|
+
self._current_y_key = None
|
|
235
|
+
self.plot_widget.setLabel("left", "")
|
|
236
|
+
for item in self._line_items:
|
|
237
|
+
item.setData([], [])
|
|
238
|
+
return
|
|
239
|
+
|
|
240
|
+
self._current_y_key = text
|
|
241
|
+
self.plot_widget.setLabel("left", text)
|
|
242
|
+
self._refresh_all_lines()
|
|
243
|
+
|
|
244
|
+
def _refresh_all_lines(self) -> None:
|
|
245
|
+
for idx in range(self.line_count):
|
|
246
|
+
self._refresh_line(idx)
|
|
247
|
+
|
|
248
|
+
def _refresh_line(self, index: int) -> None:
|
|
249
|
+
item = self._line_items[index]
|
|
250
|
+
if not self._current_y_key:
|
|
251
|
+
item.setData([], [])
|
|
252
|
+
item.setSymbol(None)
|
|
253
|
+
return
|
|
254
|
+
|
|
255
|
+
storage = self._line_storage[index]
|
|
256
|
+
if not storage:
|
|
257
|
+
item.setData([], [])
|
|
258
|
+
item.setSymbol(None)
|
|
259
|
+
return
|
|
260
|
+
|
|
261
|
+
x_values: list[float] = []
|
|
262
|
+
y_values: list[float] = []
|
|
263
|
+
for point in storage:
|
|
264
|
+
y_raw = point["values"].get(self._current_y_key)
|
|
265
|
+
y_value = _safe_float(y_raw)
|
|
266
|
+
if y_value is None:
|
|
267
|
+
continue
|
|
268
|
+
x_value = _safe_float(point["x"])
|
|
269
|
+
if x_value is None:
|
|
270
|
+
continue
|
|
271
|
+
x_values.append(x_value)
|
|
272
|
+
y_values.append(y_value)
|
|
273
|
+
|
|
274
|
+
item.setData(x_values, y_values)
|
|
275
|
+
if self._show_markers and x_values:
|
|
276
|
+
active = index == self._active_index
|
|
277
|
+
item.setSymbol("o")
|
|
278
|
+
item.setSymbolSize(self._marker_size)
|
|
279
|
+
item.setSymbolBrush(self._active_symbol_brush if active else self._inactive_symbol_brush)
|
|
280
|
+
item.setSymbolPen(self._active_symbol_pen if active else self._inactive_symbol_pen)
|
|
281
|
+
else:
|
|
282
|
+
item.setSymbol(None)
|
|
283
|
+
|
|
284
|
+
def _roll_lines(self) -> None:
|
|
285
|
+
if self.line_count == 1:
|
|
286
|
+
self._line_storage[0].clear()
|
|
287
|
+
self._refresh_line(0)
|
|
288
|
+
return
|
|
289
|
+
|
|
290
|
+
current = self._active_index
|
|
291
|
+
self._set_inactive(current)
|
|
292
|
+
self._inactive_indices.appendleft(current)
|
|
293
|
+
|
|
294
|
+
new_active = self._inactive_indices.pop()
|
|
295
|
+
self._active_index = new_active
|
|
296
|
+
self._line_storage[new_active].clear()
|
|
297
|
+
self._set_active(new_active)
|
|
298
|
+
|
|
299
|
+
self._refresh_line(current)
|
|
300
|
+
self._refresh_line(new_active)
|
|
301
|
+
|
|
302
|
+
def _set_active(self, index: int) -> None:
|
|
303
|
+
item = self._line_items[index]
|
|
304
|
+
item.setPen(self._active_pen)
|
|
305
|
+
item.setZValue(10)
|
|
306
|
+
|
|
307
|
+
def _set_inactive(self, index: int) -> None:
|
|
308
|
+
item = self._line_items[index]
|
|
309
|
+
item.setPen(self._inactive_pen)
|
|
310
|
+
item.setZValue(1)
|
|
311
|
+
|
|
312
|
+
def _update_stepper_display(self, stepper: tuple[Any, ...]) -> None:
|
|
313
|
+
if not self._stepper_keys:
|
|
314
|
+
self._set_status_message("")
|
|
315
|
+
return
|
|
316
|
+
parts = [f"{key}={value}" for key, value in zip(self._stepper_keys, stepper)]
|
|
317
|
+
self._set_status_message(" | ".join(parts))
|
|
318
|
+
|
|
319
|
+
def _set_status_message(self, message: str) -> None:
|
|
320
|
+
if hasattr(self, "_status_label"):
|
|
321
|
+
self._status_label.setText(message)
|
|
322
|
+
|
|
323
|
+
def _ingest_row(self, row: Mapping[str, Any]) -> None:
|
|
324
|
+
if not self._indeps:
|
|
325
|
+
return
|
|
326
|
+
|
|
327
|
+
x_key = self._indeps[0]
|
|
328
|
+
if x_key not in row:
|
|
329
|
+
return
|
|
330
|
+
|
|
331
|
+
x_value = _safe_float(row.get(x_key))
|
|
332
|
+
if x_value is None:
|
|
333
|
+
return
|
|
334
|
+
|
|
335
|
+
stepper = tuple(row.get(key) for key in self._stepper_keys)
|
|
336
|
+
dependent_values = {
|
|
337
|
+
key: value
|
|
338
|
+
for key, value in row.items()
|
|
339
|
+
if key not in self._indeps
|
|
340
|
+
}
|
|
341
|
+
if not dependent_values:
|
|
342
|
+
return
|
|
343
|
+
|
|
344
|
+
if self._last_stepper_values is None:
|
|
345
|
+
self._last_stepper_values = stepper
|
|
346
|
+
elif stepper != self._last_stepper_values:
|
|
347
|
+
self._roll_lines()
|
|
348
|
+
self._last_stepper_values = stepper
|
|
349
|
+
|
|
350
|
+
line_idx = self._active_index
|
|
351
|
+
self._line_storage[line_idx].append({
|
|
352
|
+
"x": x_value,
|
|
353
|
+
"values": dict(dependent_values),
|
|
354
|
+
})
|
|
355
|
+
|
|
356
|
+
self._dependent_keys.update(dependent_values.keys())
|
|
357
|
+
selection_changed, _ = self._sync_y_selector()
|
|
358
|
+
if not selection_changed:
|
|
359
|
+
self._refresh_line(line_idx)
|
|
360
|
+
|
|
361
|
+
self._update_stepper_display(stepper)
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
class PlotterCommandServer(QObject):
|
|
365
|
+
def __init__(self, window: LivePlotterWindow, socket_name: str) -> None:
|
|
366
|
+
super().__init__(window)
|
|
367
|
+
self._window = window
|
|
368
|
+
self._socket_name = socket_name
|
|
369
|
+
self._server = QLocalServer(self)
|
|
370
|
+
self._connections: set[PlotterConnection] = set()
|
|
371
|
+
|
|
372
|
+
QLocalServer.removeServer(self._socket_name)
|
|
373
|
+
|
|
374
|
+
if not self._server.listen(self._socket_name):
|
|
375
|
+
warnings.warn(
|
|
376
|
+
f"LivePlotter IPC listen failed on '{self._socket_name}': {self._server.errorString()}",
|
|
377
|
+
stacklevel=2,
|
|
378
|
+
)
|
|
379
|
+
return
|
|
380
|
+
|
|
381
|
+
self._server.newConnection.connect(self._on_new_connection)
|
|
382
|
+
|
|
383
|
+
def _on_new_connection(self) -> None:
|
|
384
|
+
while self._server.hasPendingConnections():
|
|
385
|
+
socket = self._server.nextPendingConnection()
|
|
386
|
+
connection = PlotterConnection(socket, self._window, self)
|
|
387
|
+
connection.finished.connect(self._on_connection_finished)
|
|
388
|
+
self._connections.add(connection)
|
|
389
|
+
|
|
390
|
+
def _on_connection_finished(self, connection: "PlotterConnection") -> None:
|
|
391
|
+
self._connections.discard(connection)
|
|
392
|
+
connection.deleteLater()
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
class PlotterConnection(QObject):
|
|
396
|
+
finished = Signal(object)
|
|
397
|
+
|
|
398
|
+
def __init__(
|
|
399
|
+
self,
|
|
400
|
+
socket: QLocalSocket,
|
|
401
|
+
window: LivePlotterWindow,
|
|
402
|
+
parent: QObject | None = None,
|
|
403
|
+
) -> None:
|
|
404
|
+
super().__init__(parent)
|
|
405
|
+
self._socket = socket
|
|
406
|
+
self._window = window
|
|
407
|
+
self._buffer = bytearray()
|
|
408
|
+
self._expected_size: int | None = None
|
|
409
|
+
|
|
410
|
+
self._socket.readyRead.connect(self._on_ready_read)
|
|
411
|
+
self._socket.disconnected.connect(self._on_disconnected)
|
|
412
|
+
self._socket.errorOccurred.connect(self._on_error)
|
|
413
|
+
|
|
414
|
+
def _on_ready_read(self) -> None:
|
|
415
|
+
while self._socket.bytesAvailable():
|
|
416
|
+
data = self._socket.readAll()
|
|
417
|
+
if not data:
|
|
418
|
+
break
|
|
419
|
+
self._buffer.extend(bytes(data))
|
|
420
|
+
self._process_buffer()
|
|
421
|
+
|
|
422
|
+
def _process_buffer(self) -> None:
|
|
423
|
+
while True:
|
|
424
|
+
if self._expected_size is None:
|
|
425
|
+
if len(self._buffer) < HEAD_LENGTH:
|
|
426
|
+
return
|
|
427
|
+
header = bytes(self._buffer[:HEAD_LENGTH])
|
|
428
|
+
self._expected_size = int.from_bytes(header, byteorder="big", signed=False)
|
|
429
|
+
del self._buffer[:HEAD_LENGTH]
|
|
430
|
+
|
|
431
|
+
if len(self._buffer) < (self._expected_size or 0):
|
|
432
|
+
return
|
|
433
|
+
|
|
434
|
+
payload = bytes(self._buffer[: self._expected_size]) if self._expected_size else b""
|
|
435
|
+
del self._buffer[: self._expected_size or 0]
|
|
436
|
+
self._expected_size = None
|
|
437
|
+
if payload:
|
|
438
|
+
self._process_payload(payload)
|
|
439
|
+
|
|
440
|
+
def _process_payload(self, payload: bytes) -> None:
|
|
441
|
+
try:
|
|
442
|
+
message = pickle.loads(payload)
|
|
443
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
444
|
+
self._send_error("invalid_payload", f"Failed to unpickle payload: {exc}")
|
|
445
|
+
return
|
|
446
|
+
|
|
447
|
+
if not isinstance(message, dict):
|
|
448
|
+
self._send_error("invalid_message", "Payload must be a dict")
|
|
449
|
+
return
|
|
450
|
+
|
|
451
|
+
command = message.get("cmd")
|
|
452
|
+
if command == "set_indeps":
|
|
453
|
+
self._handle_set_indeps(message)
|
|
454
|
+
elif command == "add":
|
|
455
|
+
self._handle_add(message)
|
|
456
|
+
else:
|
|
457
|
+
self._send_error("unknown_command", f"Command '{command}' is not supported")
|
|
458
|
+
|
|
459
|
+
def _handle_set_indeps(self, payload: dict[str, Any]) -> None:
|
|
460
|
+
indeps = payload.get("indeps")
|
|
461
|
+
if not isinstance(indeps, Sequence):
|
|
462
|
+
self._send_error("invalid_arguments", "'indeps' must be a sequence of strings")
|
|
463
|
+
return
|
|
464
|
+
try:
|
|
465
|
+
self._window.set_indeps(list(indeps))
|
|
466
|
+
except Exception as exc: # pragma: no cover - GUI validation
|
|
467
|
+
self._send_error("execution_error", f"{type(exc).__name__}: {exc}")
|
|
468
|
+
return
|
|
469
|
+
self._send_packet({"status": "ok", "cmd": "set_indeps"})
|
|
470
|
+
|
|
471
|
+
def _handle_add(self, payload: dict[str, Any]) -> None:
|
|
472
|
+
record = payload.get("record")
|
|
473
|
+
if record is not None and not isinstance(record, (Mapping, pd.Series)):
|
|
474
|
+
self._send_error("invalid_arguments", "'record' must be a mapping or pandas Series")
|
|
475
|
+
return
|
|
476
|
+
|
|
477
|
+
seg_payload = payload.get("seg")
|
|
478
|
+
seg_df = None
|
|
479
|
+
if seg_payload is not None:
|
|
480
|
+
if isinstance(seg_payload, pd.DataFrame):
|
|
481
|
+
seg_df = seg_payload
|
|
482
|
+
else:
|
|
483
|
+
try:
|
|
484
|
+
seg_df = pd.DataFrame(seg_payload)
|
|
485
|
+
except (TypeError, ValueError) as exc:
|
|
486
|
+
self._send_error("invalid_arguments", f"'seg' conversion failed: {exc}")
|
|
487
|
+
return
|
|
488
|
+
|
|
489
|
+
try:
|
|
490
|
+
self._window.add(record=record, seg=seg_df)
|
|
491
|
+
except Exception as exc: # pragma: no cover - GUI validation
|
|
492
|
+
self._send_error("execution_error", f"{type(exc).__name__}: {exc}")
|
|
493
|
+
return
|
|
494
|
+
|
|
495
|
+
self._send_packet({"status": "ok", "cmd": "add"})
|
|
496
|
+
|
|
497
|
+
def _send_packet(self, payload: dict[str, Any]) -> None:
|
|
498
|
+
try:
|
|
499
|
+
body = pickle.dumps(payload, protocol=pickle.HIGHEST_PROTOCOL)
|
|
500
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
501
|
+
body = pickle.dumps(
|
|
502
|
+
{"status": "error", "code": "encoding_error", "message": str(exc)},
|
|
503
|
+
protocol=pickle.HIGHEST_PROTOCOL,
|
|
504
|
+
)
|
|
505
|
+
header = len(body).to_bytes(HEAD_LENGTH, byteorder="big", signed=False)
|
|
506
|
+
self._socket.write(header)
|
|
507
|
+
if body:
|
|
508
|
+
self._socket.write(body)
|
|
509
|
+
self._socket.flush()
|
|
510
|
+
|
|
511
|
+
def _send_error(self, code: str, message: str) -> None:
|
|
512
|
+
self._send_packet({"status": "error", "code": code, "message": message})
|
|
513
|
+
|
|
514
|
+
def _on_disconnected(self) -> None:
|
|
515
|
+
self.finished.emit(self)
|
|
516
|
+
self._socket.deleteLater()
|
|
517
|
+
self.deleteLater()
|
|
518
|
+
|
|
519
|
+
def _on_error(self, _error: QLocalSocket.LocalSocketError) -> None: # pragma: no cover - best effort
|
|
520
|
+
self.finished.emit(self)
|
|
521
|
+
self._socket.deleteLater()
|
|
522
|
+
self.deleteLater()
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
class LivePlotterClient:
|
|
526
|
+
def __init__(
|
|
527
|
+
self,
|
|
528
|
+
socket_name: str = LIVE_PLOTTER_PIPE_NAME,
|
|
529
|
+
*,
|
|
530
|
+
timeout_ms: int = 5000,
|
|
531
|
+
) -> None:
|
|
532
|
+
self._socket_name = socket_name
|
|
533
|
+
self._timeout_ms = timeout_ms
|
|
534
|
+
self._socket: QLocalSocket | None = None
|
|
535
|
+
self._owns_app = False
|
|
536
|
+
self._app: QCoreApplication | None = None
|
|
537
|
+
|
|
538
|
+
def connect(self) -> None:
|
|
539
|
+
self._ensure_app()
|
|
540
|
+
if self._socket is not None and self._socket.state() == QLocalSocket.ConnectedState:
|
|
541
|
+
return
|
|
542
|
+
|
|
543
|
+
socket = QLocalSocket()
|
|
544
|
+
socket.connectToServer(self._socket_name)
|
|
545
|
+
if not socket.waitForConnected(self._timeout_ms):
|
|
546
|
+
error = socket.errorString()
|
|
547
|
+
socket.deleteLater()
|
|
548
|
+
raise ConnectionError(f"Could not connect to LivePlotter server '{self._socket_name}': {error}")
|
|
549
|
+
|
|
550
|
+
self._socket = socket
|
|
551
|
+
|
|
552
|
+
def close(self) -> None:
|
|
553
|
+
if self._socket is None:
|
|
554
|
+
return
|
|
555
|
+
self._socket.disconnectFromServer()
|
|
556
|
+
self._socket.waitForDisconnected(100)
|
|
557
|
+
self._socket.deleteLater()
|
|
558
|
+
self._socket = None
|
|
559
|
+
|
|
560
|
+
# Context manager helpers -------------------------------------------------
|
|
561
|
+
def __enter__(self) -> "LivePlotterClient":
|
|
562
|
+
self.connect()
|
|
563
|
+
return self
|
|
564
|
+
|
|
565
|
+
def __exit__(self, exc_type, exc, tb) -> None:
|
|
566
|
+
self.close()
|
|
567
|
+
|
|
568
|
+
# IPC commands ------------------------------------------------------------
|
|
569
|
+
def set_indeps(self, indeps: Sequence[str]) -> None:
|
|
570
|
+
payload = {"cmd": "set_indeps", "indeps": list(indeps)}
|
|
571
|
+
self._invoke(payload)
|
|
572
|
+
|
|
573
|
+
def add(
|
|
574
|
+
self,
|
|
575
|
+
*,
|
|
576
|
+
record: Mapping[str, Any] | pd.Series | None = None,
|
|
577
|
+
seg: pd.DataFrame | Mapping[str, Any] | Sequence[Mapping[str, Any]] | None = None,
|
|
578
|
+
) -> None:
|
|
579
|
+
seg_payload: Any = None
|
|
580
|
+
if seg is not None and not isinstance(seg, pd.DataFrame):
|
|
581
|
+
try:
|
|
582
|
+
seg_payload = pd.DataFrame(seg)
|
|
583
|
+
except (TypeError, ValueError) as exc:
|
|
584
|
+
raise ValueError("'seg' must be convertible to a pandas DataFrame") from exc
|
|
585
|
+
else:
|
|
586
|
+
seg_payload = seg
|
|
587
|
+
|
|
588
|
+
if record is not None and not isinstance(record, (Mapping, pd.Series)):
|
|
589
|
+
raise TypeError("'record' must be a mapping or pandas Series")
|
|
590
|
+
|
|
591
|
+
payload = {"cmd": "add", "record": record, "seg": seg_payload}
|
|
592
|
+
self._invoke(payload)
|
|
593
|
+
|
|
594
|
+
# Internal client helpers -------------------------------------------------
|
|
595
|
+
def _ensure_app(self) -> None:
|
|
596
|
+
app = QCoreApplication.instance()
|
|
597
|
+
if app is None:
|
|
598
|
+
self._app = QCoreApplication([])
|
|
599
|
+
self._owns_app = True
|
|
600
|
+
else:
|
|
601
|
+
self._app = app
|
|
602
|
+
|
|
603
|
+
def _invoke(self, payload: dict[str, Any]) -> dict[str, Any]:
|
|
604
|
+
if self._socket is None:
|
|
605
|
+
raise RuntimeError("LivePlotterClient is not connected")
|
|
606
|
+
|
|
607
|
+
body = pickle.dumps(payload, protocol=pickle.HIGHEST_PROTOCOL)
|
|
608
|
+
header = len(body).to_bytes(HEAD_LENGTH, byteorder="big", signed=False)
|
|
609
|
+
self._socket.write(header)
|
|
610
|
+
if body:
|
|
611
|
+
self._socket.write(body)
|
|
612
|
+
if not self._socket.waitForBytesWritten(self._timeout_ms):
|
|
613
|
+
raise TimeoutError("Timed out sending data to LivePlotter server")
|
|
614
|
+
|
|
615
|
+
response = self._read_packet()
|
|
616
|
+
status = response.get("status")
|
|
617
|
+
if status != "ok":
|
|
618
|
+
code = response.get("code", "unknown_error")
|
|
619
|
+
message = response.get("message", "Request failed")
|
|
620
|
+
raise RuntimeError(f"LivePlotter error ({code}): {message}")
|
|
621
|
+
return response
|
|
622
|
+
|
|
623
|
+
def _read_packet(self) -> dict[str, Any]:
|
|
624
|
+
if self._socket is None:
|
|
625
|
+
raise RuntimeError("LivePlotterClient is not connected")
|
|
626
|
+
|
|
627
|
+
header = self._read_exact(HEAD_LENGTH)
|
|
628
|
+
size = int.from_bytes(header, byteorder="big", signed=False)
|
|
629
|
+
body = self._read_exact(size) if size else b""
|
|
630
|
+
if not body:
|
|
631
|
+
return {}
|
|
632
|
+
return pickle.loads(body)
|
|
633
|
+
|
|
634
|
+
def _read_exact(self, size: int) -> bytes:
|
|
635
|
+
if self._socket is None:
|
|
636
|
+
raise RuntimeError("LivePlotterClient is not connected")
|
|
637
|
+
|
|
638
|
+
data = bytearray()
|
|
639
|
+
while len(data) < size:
|
|
640
|
+
chunk = self._socket.read(size - len(data))
|
|
641
|
+
if chunk:
|
|
642
|
+
data.extend(bytes(chunk))
|
|
643
|
+
continue
|
|
644
|
+
if not self._socket.waitForReadyRead(self._timeout_ms):
|
|
645
|
+
raise TimeoutError("Timed out waiting for LivePlotter response")
|
|
646
|
+
if self._socket.state() != QLocalSocket.ConnectedState and not self._socket.bytesAvailable():
|
|
647
|
+
raise ConnectionError("LivePlotter server disconnected")
|
|
648
|
+
return bytes(data)
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def main() -> None:
|
|
652
|
+
app = QApplication.instance()
|
|
653
|
+
owns_app = False
|
|
654
|
+
if app is None:
|
|
655
|
+
app = QApplication(sys.argv)
|
|
656
|
+
owns_app = True
|
|
657
|
+
|
|
658
|
+
window = LivePlotterWindow()
|
|
659
|
+
window.show()
|
|
660
|
+
|
|
661
|
+
if owns_app:
|
|
662
|
+
sys.exit(app.exec())
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
__all__ = ["LivePlotterWindow", "LivePlotterClient", "main"]
|
logqbit/logfolder.py
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import itertools
|
|
3
|
+
import os
|
|
4
|
+
import socket
|
|
5
|
+
import threading
|
|
6
|
+
import time
|
|
7
|
+
import weakref
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Callable
|
|
11
|
+
|
|
12
|
+
import numpy as np
|
|
13
|
+
import pandas as pd
|
|
14
|
+
from tqdm import tqdm
|
|
15
|
+
from tqdm.contrib.logging import logging_redirect_tqdm
|
|
16
|
+
|
|
17
|
+
from .registry import Registry, get_parser
|
|
18
|
+
|
|
19
|
+
yaml = get_parser()
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class LogFolder:
|
|
23
|
+
create_machine = socket.gethostname() # Can be overridden.
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
path: str | Path,
|
|
28
|
+
create: bool = True,
|
|
29
|
+
save_delay_secs: float = 1.0,
|
|
30
|
+
):
|
|
31
|
+
path = Path(path)
|
|
32
|
+
meta_path = path / "meta.yaml"
|
|
33
|
+
data_path = path / "data.parquet"
|
|
34
|
+
if path.exists() and path.is_dir():
|
|
35
|
+
pass
|
|
36
|
+
elif create:
|
|
37
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
38
|
+
with open(meta_path, "w", encoding="utf-8") as f:
|
|
39
|
+
yaml.dump(
|
|
40
|
+
{
|
|
41
|
+
"create_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
|
42
|
+
"create_machine": self.create_machine,
|
|
43
|
+
},
|
|
44
|
+
f,
|
|
45
|
+
)
|
|
46
|
+
else:
|
|
47
|
+
raise FileNotFoundError(f"LogFolder at '{path}' does not exist.")
|
|
48
|
+
|
|
49
|
+
self.path = path
|
|
50
|
+
|
|
51
|
+
if meta_path.exists():
|
|
52
|
+
self.reg = Registry(meta_path, auto_reload=False)
|
|
53
|
+
else:
|
|
54
|
+
self.reg = None
|
|
55
|
+
|
|
56
|
+
self._handler = _DataHandler(data_path, save_delay_secs, self)
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def df(self) -> pd.DataFrame:
|
|
60
|
+
"""Get the full dataframe, flushing all data rows."""
|
|
61
|
+
return self._handler.get_df()
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def meta_path(self) -> Path:
|
|
65
|
+
return self.reg.path
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def data_path(self) -> Path:
|
|
69
|
+
return self._handler.path
|
|
70
|
+
|
|
71
|
+
@classmethod
|
|
72
|
+
def new(cls, parent_path: Path) -> "LogFolder":
|
|
73
|
+
parent_path = Path(parent_path)
|
|
74
|
+
max_index = max(
|
|
75
|
+
(
|
|
76
|
+
int(entry.name)
|
|
77
|
+
for entry in os.scandir(parent_path)
|
|
78
|
+
if entry.is_dir() and entry.name.isdecimal()
|
|
79
|
+
),
|
|
80
|
+
default=-1,
|
|
81
|
+
)
|
|
82
|
+
new_index = max_index + 1
|
|
83
|
+
while (parent_path / str(new_index)).exists():
|
|
84
|
+
new_index += 1
|
|
85
|
+
new_folder = parent_path / str(new_index)
|
|
86
|
+
return cls(new_folder)
|
|
87
|
+
|
|
88
|
+
def add_row(self, **kwargs) -> None:
|
|
89
|
+
"""
|
|
90
|
+
Add a new row or multiple rows to the dataframe.
|
|
91
|
+
Supports both scalar and vector input.
|
|
92
|
+
For vector input, pandas will check length consistency.
|
|
93
|
+
"""
|
|
94
|
+
is_multi_row = [
|
|
95
|
+
k
|
|
96
|
+
for k, v in kwargs.items()
|
|
97
|
+
if hasattr(v, "__len__") and not isinstance(v, str)
|
|
98
|
+
]
|
|
99
|
+
if is_multi_row:
|
|
100
|
+
self._handler.add_multi_rows(pd.DataFrame(kwargs))
|
|
101
|
+
else:
|
|
102
|
+
self._handler.add_one_row(kwargs)
|
|
103
|
+
|
|
104
|
+
def capture(
|
|
105
|
+
self,
|
|
106
|
+
func: Callable[[float], dict[str, float | list[float]]],
|
|
107
|
+
axes: list[float | list[float]] | dict[str, float | list[float]],
|
|
108
|
+
):
|
|
109
|
+
if not isinstance(axes, dict): # Assumes isinstance(axes, list)
|
|
110
|
+
fsig = inspect.signature(func)
|
|
111
|
+
axes = dict(zip(fsig.parameters.keys(), axes))
|
|
112
|
+
|
|
113
|
+
run_axs: dict[str, list[float]] = {}
|
|
114
|
+
const_axs: dict[str, float] = {}
|
|
115
|
+
for k, v in axes.items():
|
|
116
|
+
if np.iterable(v):
|
|
117
|
+
run_axs[k] = v
|
|
118
|
+
else:
|
|
119
|
+
const_axs[k] = v
|
|
120
|
+
self.add_meta_to_head(
|
|
121
|
+
const=const_axs,
|
|
122
|
+
dims={k: [min(a), max(a), len(a)] for k, a in run_axs.items()},
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
step_table = list(itertools.product(*run_axs.values()))
|
|
126
|
+
|
|
127
|
+
with logging_redirect_tqdm():
|
|
128
|
+
for step in tqdm(step_table, ncols=80, desc=self.path.name):
|
|
129
|
+
step_kws = dict(zip(run_axs.keys(), step))
|
|
130
|
+
ret_kws = func(**step_kws, **const_axs)
|
|
131
|
+
self.add_row(**step_kws, **ret_kws)
|
|
132
|
+
|
|
133
|
+
def add_meta(self, meta: dict = None, /, **kwargs):
|
|
134
|
+
if meta is None:
|
|
135
|
+
meta = {}
|
|
136
|
+
meta.update(kwargs)
|
|
137
|
+
self.reg.root.update(meta)
|
|
138
|
+
self.reg.save()
|
|
139
|
+
|
|
140
|
+
def add_meta_to_head(self, meta: dict = None, /, **kwargs):
|
|
141
|
+
if meta is None:
|
|
142
|
+
meta = {}
|
|
143
|
+
meta.update(kwargs)
|
|
144
|
+
for i, (k, v) in enumerate(meta.items()):
|
|
145
|
+
self.reg.root.insert(i, k, v)
|
|
146
|
+
self.reg.save()
|
|
147
|
+
|
|
148
|
+
@property
|
|
149
|
+
def indeps(self) -> list[str]:
|
|
150
|
+
"""Running axes for plotting."""
|
|
151
|
+
return self.reg["indeps"] # Let KeyError raise if not exists.
|
|
152
|
+
|
|
153
|
+
@indeps.setter
|
|
154
|
+
def indeps(self, value: list[str]) -> None:
|
|
155
|
+
if not isinstance(value, list):
|
|
156
|
+
raise ValueError("indeps must be a list of strings.")
|
|
157
|
+
if not all(isinstance(v, str) for v in value):
|
|
158
|
+
raise ValueError("indeps must be a list of strings.")
|
|
159
|
+
|
|
160
|
+
self.reg["indeps"] = value
|
|
161
|
+
|
|
162
|
+
def flush(self) -> None:
|
|
163
|
+
"""Flash the pending data immediately, block until done."""
|
|
164
|
+
self._handler.flush()
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class _DataHandler:
|
|
168
|
+
def __init__(self, path: str | Path, save_delay_secs: float, parent: LogFolder):
|
|
169
|
+
self.path = Path(path)
|
|
170
|
+
self._segs: list[pd.DataFrame] = []
|
|
171
|
+
if self.path.exists():
|
|
172
|
+
self._segs.append(pd.read_parquet(self.path))
|
|
173
|
+
self._records: list[dict[str, float | int | str]] = []
|
|
174
|
+
|
|
175
|
+
self.save_delay_secs = save_delay_secs
|
|
176
|
+
self._should_stop = False
|
|
177
|
+
self._skip_debounce = EventWithWaitingState()
|
|
178
|
+
self._dirty = EventWithWaitingState()
|
|
179
|
+
self._lock = threading.Lock()
|
|
180
|
+
self._thread = threading.Thread(target=self._run, daemon=True)
|
|
181
|
+
self._thread.start()
|
|
182
|
+
weakref.finalize(parent, self._cleanup)
|
|
183
|
+
|
|
184
|
+
def get_df(self, _clear: bool = False) -> pd.DataFrame:
|
|
185
|
+
with self._lock:
|
|
186
|
+
if self._records:
|
|
187
|
+
self._segs.append(pd.DataFrame.from_records(self._records))
|
|
188
|
+
self._records = []
|
|
189
|
+
|
|
190
|
+
if len(self._segs) == 0:
|
|
191
|
+
df = pd.DataFrame({})
|
|
192
|
+
elif len(self._segs) == 1:
|
|
193
|
+
df = self._segs[0]
|
|
194
|
+
else:
|
|
195
|
+
df = pd.concat(self._segs)
|
|
196
|
+
self._segs = [df]
|
|
197
|
+
|
|
198
|
+
if _clear:
|
|
199
|
+
self._dirty.clear()
|
|
200
|
+
return df
|
|
201
|
+
|
|
202
|
+
def add_one_row(self, kwargs: dict[str, float | int | str]):
|
|
203
|
+
with self._lock:
|
|
204
|
+
self._records.append(kwargs)
|
|
205
|
+
if not self._dirty.is_set():
|
|
206
|
+
self._dirty.set()
|
|
207
|
+
|
|
208
|
+
def add_multi_rows(self, df: pd.DataFrame):
|
|
209
|
+
with self._lock:
|
|
210
|
+
if self._records:
|
|
211
|
+
self._segs.append(pd.DataFrame.from_records(self._records))
|
|
212
|
+
self._records = []
|
|
213
|
+
self._segs.append(df)
|
|
214
|
+
if not self._dirty.is_set():
|
|
215
|
+
self._dirty.set()
|
|
216
|
+
|
|
217
|
+
def _run(self):
|
|
218
|
+
while not self._should_stop:
|
|
219
|
+
self._dirty.wait()
|
|
220
|
+
if self._should_stop:
|
|
221
|
+
break
|
|
222
|
+
if self._skip_debounce.wait(self.save_delay_secs):
|
|
223
|
+
self._skip_debounce.clear()
|
|
224
|
+
df = self.get_df(_clear=True)
|
|
225
|
+
tmp_path = self.path.with_suffix(".tmp")
|
|
226
|
+
df.to_parquet(tmp_path, index=False)
|
|
227
|
+
tmp_path.replace(self.path)
|
|
228
|
+
|
|
229
|
+
def _cleanup(self):
|
|
230
|
+
try:
|
|
231
|
+
self._should_stop = True
|
|
232
|
+
self._skip_debounce.set() # Process all pending data.
|
|
233
|
+
self._dirty.set() # Just break the run loop.
|
|
234
|
+
if self._thread.is_alive():
|
|
235
|
+
self._thread.join(timeout=2)
|
|
236
|
+
except Exception:
|
|
237
|
+
pass
|
|
238
|
+
|
|
239
|
+
def flush(self):
|
|
240
|
+
"""Flash the pending data immediately, block until done."""
|
|
241
|
+
if self._skip_debounce.waiting:
|
|
242
|
+
self._skip_debounce.set()
|
|
243
|
+
while not self._dirty.waiting:
|
|
244
|
+
time.sleep(0.01)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
class EventWithWaitingState(threading.Event):
|
|
248
|
+
def __init__(self):
|
|
249
|
+
super().__init__()
|
|
250
|
+
self.waiting = False
|
|
251
|
+
|
|
252
|
+
def wait(self, timeout: float | None = None):
|
|
253
|
+
self.waiting = True
|
|
254
|
+
ret = super().wait(timeout)
|
|
255
|
+
self.waiting = False
|
|
256
|
+
return ret
|
logqbit/registry.py
ADDED
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import sys
|
|
3
|
+
import warnings
|
|
4
|
+
from collections.abc import Mapping, Sequence, Set
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
from ruamel.yaml import YAML
|
|
10
|
+
from ruamel.yaml.comments import CommentedMap
|
|
11
|
+
from typing_extensions import deprecated
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from ruamel.yaml.constructor import BaseConstructor
|
|
15
|
+
from ruamel.yaml.nodes import ScalarNode, SequenceNode
|
|
16
|
+
from ruamel.yaml.representer import BaseRepresenter
|
|
17
|
+
|
|
18
|
+
_sentinel = object()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class FileSnap:
|
|
22
|
+
__slots__ = ("path", "mtime", "size")
|
|
23
|
+
|
|
24
|
+
def __init__(self, path: Path):
|
|
25
|
+
self.path = Path(path)
|
|
26
|
+
st = self.path.stat()
|
|
27
|
+
self.mtime = st.st_mtime
|
|
28
|
+
self.size = st.st_size
|
|
29
|
+
|
|
30
|
+
def changed(self) -> bool:
|
|
31
|
+
st = self.path.stat()
|
|
32
|
+
if (st.st_mtime, st.st_size) != (self.mtime, self.size):
|
|
33
|
+
self.mtime = st.st_mtime
|
|
34
|
+
self.size = st.st_size
|
|
35
|
+
return True
|
|
36
|
+
return False
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class Registry:
|
|
40
|
+
"""A simple registry based on YAML file.
|
|
41
|
+
|
|
42
|
+
`get`/`set` values synchronized with the file unless explicitly calling `get_local`/`set_local`.
|
|
43
|
+
```python
|
|
44
|
+
reg = Registry('config.yaml')
|
|
45
|
+
reg['new_key/sub_key'] = 123 # synced with file
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
Operations on `root` and subitems are **local** and needs to be saved manually. e.g.
|
|
49
|
+
```python
|
|
50
|
+
reg.reload()
|
|
51
|
+
reg.root['another_key'] = 456 # local change, not synced until save.
|
|
52
|
+
reg.save()
|
|
53
|
+
```
|
|
54
|
+
Local changes will be discarded when `reload()`.
|
|
55
|
+
|
|
56
|
+
NOTE: Local operations is useful for batch update without frequent file I/O.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def __init__(self, path: str | Path, auto_reload: bool = True):
|
|
60
|
+
self.path = Path(path)
|
|
61
|
+
self.yaml = get_parser()
|
|
62
|
+
self.root: CommentedMap = None
|
|
63
|
+
self.load()
|
|
64
|
+
self._snap = FileSnap(self.path)
|
|
65
|
+
self.auto_reload = auto_reload
|
|
66
|
+
|
|
67
|
+
def __getitem__(self, key: str):
|
|
68
|
+
return self.get(key)
|
|
69
|
+
|
|
70
|
+
def __setitem__(self, key: str, value):
|
|
71
|
+
self.set(key, value, create_parents=True)
|
|
72
|
+
|
|
73
|
+
def get(self, key: str, default=_sentinel):
|
|
74
|
+
if self.auto_reload: self.reload()
|
|
75
|
+
return self.get_local(key, default)
|
|
76
|
+
|
|
77
|
+
def set(self, key: str, value, create_parents: bool = True):
|
|
78
|
+
if self.auto_reload: self.reload()
|
|
79
|
+
self.set_local(key, value, create_parents)
|
|
80
|
+
self.save()
|
|
81
|
+
|
|
82
|
+
def get_local(self, key: str, default=_sentinel):
|
|
83
|
+
obj = self.root
|
|
84
|
+
keys = key.split("/")
|
|
85
|
+
for k in keys:
|
|
86
|
+
try:
|
|
87
|
+
obj = obj[k]
|
|
88
|
+
except (KeyError, IndexError, TypeError):
|
|
89
|
+
if default is _sentinel:
|
|
90
|
+
raise
|
|
91
|
+
return default
|
|
92
|
+
return obj
|
|
93
|
+
|
|
94
|
+
def set_local(self, key: str, value, create_parents: bool = True):
|
|
95
|
+
obj = self.root
|
|
96
|
+
keys = key.split("/")
|
|
97
|
+
for k in keys[:-1]:
|
|
98
|
+
if not (k in obj and isinstance(obj[k], Mapping)):
|
|
99
|
+
if create_parents:
|
|
100
|
+
obj[k] = CommentedMap()
|
|
101
|
+
else:
|
|
102
|
+
raise KeyError(f"Parent key '{k}' does not exist.")
|
|
103
|
+
obj = obj[k]
|
|
104
|
+
obj[keys[-1]] = value
|
|
105
|
+
|
|
106
|
+
def print_local(self):
|
|
107
|
+
"""Print the local content to stdout."""
|
|
108
|
+
self.yaml.dump(self.root, sys.stdout)
|
|
109
|
+
|
|
110
|
+
def reload(self):
|
|
111
|
+
"""Reloads the file if it has changed since the last load."""
|
|
112
|
+
if self._snap.changed():
|
|
113
|
+
self.load()
|
|
114
|
+
return self.root
|
|
115
|
+
|
|
116
|
+
def load(self):
|
|
117
|
+
"""Load YAML file to self.root."""
|
|
118
|
+
# NOTE: `yaml.load` also returns `CommentedSeq`, `float`, `str`, `None`
|
|
119
|
+
# or other build-in types depending on the top-level YAML content. But
|
|
120
|
+
# only `CommentedMap` is legal for the use case of this class.
|
|
121
|
+
with open(self.path, "r", encoding="utf-8") as f:
|
|
122
|
+
self.root = self.yaml.load(f)
|
|
123
|
+
return self.root
|
|
124
|
+
|
|
125
|
+
def save(self, path: str | Path | None = None):
|
|
126
|
+
"""Save self.root to YAML file."""
|
|
127
|
+
path = self.path if path is None else path
|
|
128
|
+
tmp_path = path.with_suffix('.tmp')
|
|
129
|
+
with open(tmp_path, "w", encoding="utf-8") as f:
|
|
130
|
+
self.yaml.dump(self.root, f)
|
|
131
|
+
tmp_path.replace(path)
|
|
132
|
+
|
|
133
|
+
@deprecated("For backward compatibility only.")
|
|
134
|
+
def copy(self) -> dict:
|
|
135
|
+
if self.auto_reload: self.reload()
|
|
136
|
+
return _to_builtins(self.root)
|
|
137
|
+
|
|
138
|
+
@deprecated("For backward compatibility only.")
|
|
139
|
+
def cwd(self) -> str:
|
|
140
|
+
return self["data_folder"]
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def get_parser() -> YAML:
|
|
144
|
+
yaml = YAML()
|
|
145
|
+
yaml.preserve_quotes = True
|
|
146
|
+
yaml.width = 100
|
|
147
|
+
yaml.indent(mapping=2, sequence=4, offset=2)
|
|
148
|
+
yaml.representer.add_representer(np.ndarray, _represent_numpy_array)
|
|
149
|
+
yaml.representer.add_multi_representer(np.generic, _represent_numpy_scalar)
|
|
150
|
+
_set_yaml_for_labrad_units(yaml)
|
|
151
|
+
return yaml
|
|
152
|
+
|
|
153
|
+
def _represent_numpy_array(dumper: "BaseRepresenter", data: np.ndarray):
|
|
154
|
+
# return dumper.represent_sequence("!numpy", data.tolist(), flow_style=True)
|
|
155
|
+
return dumper.represent_sequence("tag:yaml.org,2002:seq", data.tolist(), flow_style=True)
|
|
156
|
+
|
|
157
|
+
def _represent_numpy_scalar(dumper: "BaseRepresenter", data: np.generic):
|
|
158
|
+
return dumper.represent_data(data.item())
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
####### labrad.units support #########
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _set_yaml_for_labrad_units(yaml: YAML) -> YAML:
|
|
165
|
+
return yaml # placeholder if labrad is not available
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
import labrad.units as lab_units
|
|
170
|
+
from labrad.units import Unit, Value, WithUnit
|
|
171
|
+
|
|
172
|
+
def _set_yaml_for_labrad_units(yaml: YAML) -> YAML:
|
|
173
|
+
yaml.resolver.add_implicit_resolver(
|
|
174
|
+
"!labrad_unit", _UNIT_PATTERN, list("+-0123456789")
|
|
175
|
+
)
|
|
176
|
+
yaml.constructor.add_constructor("!labrad_unit", _construct_labrad_value)
|
|
177
|
+
yaml.representer.add_representer(WithUnit, _represent_labrad_value)
|
|
178
|
+
yaml.representer.add_representer(Value, _represent_labrad_value)
|
|
179
|
+
except ImportError:
|
|
180
|
+
warnings.warn("labrad.units not found, unit support disabled.", ImportWarning)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
_UNIT_PATTERN = re.compile(r"^\s*([-+]?\d[\d_]*(?:\.\d[\d_]*)?)\s*([A-Za-z]*)\s*$")
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _construct_labrad_value(loader: "BaseConstructor", node: "ScalarNode"):
|
|
187
|
+
raw: str = loader.construct_scalar(node)
|
|
188
|
+
match = _UNIT_PATTERN.match(raw)
|
|
189
|
+
if not match:
|
|
190
|
+
return raw
|
|
191
|
+
|
|
192
|
+
magnitude_raw, unit_name = match.groups()
|
|
193
|
+
unit_obj: Unit | None = getattr(lab_units, unit_name, None)
|
|
194
|
+
if unit_obj is None:
|
|
195
|
+
return raw
|
|
196
|
+
|
|
197
|
+
magnitude = float(magnitude_raw.replace("_", ""))
|
|
198
|
+
return magnitude * unit_obj
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def _represent_labrad_value(dumper: "BaseRepresenter", data: WithUnit):
|
|
202
|
+
unit_name = data.unit.name
|
|
203
|
+
magnitude: float = data._value
|
|
204
|
+
if magnitude.is_integer():
|
|
205
|
+
spaced = f"{int(magnitude)} {unit_name}"
|
|
206
|
+
else:
|
|
207
|
+
spaced = f"{magnitude} {unit_name}"
|
|
208
|
+
# spaced = str(data)
|
|
209
|
+
# return dumper.represent_scalar('tag:yaml.org,2002:str', "="+spaced, style="")
|
|
210
|
+
return dumper.represent_scalar("!labrad_unit", spaced)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def _to_builtins(obj):
|
|
214
|
+
if isinstance(obj, Mapping):
|
|
215
|
+
return {_to_builtins(k): _to_builtins(v) for k, v in obj.items()}
|
|
216
|
+
if isinstance(obj, Sequence) and not isinstance(obj, (str, bytes)):
|
|
217
|
+
# BUG: labRAD dump tuple only, remove it!!
|
|
218
|
+
return tuple(_to_builtins(item) for item in obj)
|
|
219
|
+
if isinstance(obj, Set):
|
|
220
|
+
return {_to_builtins(item) for item in obj}
|
|
221
|
+
|
|
222
|
+
if isinstance(obj, int):
|
|
223
|
+
return int(obj)
|
|
224
|
+
if isinstance(obj, float):
|
|
225
|
+
return float(obj)
|
|
226
|
+
if isinstance(obj, str):
|
|
227
|
+
return str(obj)
|
|
228
|
+
|
|
229
|
+
return obj
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: logqbit
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: Simple data logger and live plotter for lab-scale experiments.
|
|
5
|
+
Requires-Dist: numpy>=2.3.3
|
|
6
|
+
Requires-Dist: pandas>=2.3.3
|
|
7
|
+
Requires-Dist: pyarrow>=21.0.0
|
|
8
|
+
Requires-Dist: pyqtgraph>=0.13.7
|
|
9
|
+
Requires-Dist: pyside6>=6.9.3
|
|
10
|
+
Requires-Dist: ruamel-yaml>=0.18.15
|
|
11
|
+
Requires-Dist: tqdm>=4.67.1
|
|
12
|
+
Requires-Dist: pylabrad>=0.98.3 ; extra == 'labrad'
|
|
13
|
+
Requires-Python: >=3.13
|
|
14
|
+
Project-URL: Homepage, https://github.com/Qiujv/logqbit
|
|
15
|
+
Provides-Extra: labrad
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
|
|
18
|
+
# LogQbit
|
|
19
|
+
|
|
20
|
+
*LogQbit* 是一个轻量且可扩展的实验数据记录工具包。
|
|
21
|
+
它最初用于记录 量子比特(qubit)实验测量数据,但凭借灵活的数据格式和实时可视化功能,同样适用于 任意中小规模(≤MB级)实验数据 的采集与管理。
|
|
22
|
+
|
|
23
|
+
*LogQbit* is a lightweight and extensible data logging toolkit for lab-scale experiments.
|
|
24
|
+
It was originally developed for recording quantum qubit measurement data,
|
|
25
|
+
but its flexible format and real-time visualization tools make it suitable for any small to medium (≤MB-level) experimental dataset.
|
|
26
|
+
|
|
27
|
+
通过使用 `logqbit`,你可以:
|
|
28
|
+
|
|
29
|
+
- 以最少的样板代码记录结构化实验数据;
|
|
30
|
+
|
|
31
|
+
- 使用集成的实时绘图工具可视化数据流;
|
|
32
|
+
|
|
33
|
+
- 通过交互式日志浏览器查看与分析记录的数据。
|
|
34
|
+
|
|
35
|
+
With `logqbit`, you can:
|
|
36
|
+
|
|
37
|
+
- Record structured experimental data with minimal boilerplate.
|
|
38
|
+
|
|
39
|
+
- Visualize data streams in real time with an integrated live plotter.
|
|
40
|
+
|
|
41
|
+
- Browse and analyze logged results through an interactive log browser.
|
|
42
|
+
|
|
43
|
+
无论是量子比特读出、参数扫描,还是传感器输出记录,
|
|
44
|
+
`logqbit` 都能为你提供一个简洁而可靠的实验数据采集与回溯工作流。
|
|
45
|
+
|
|
46
|
+
Whether you are monitoring qubit readouts, scanning a parameter sweep, or simply logging sensor outputs,
|
|
47
|
+
`logqbit` provides a simple and robust workflow for capturing and revisiting your experimental data.
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
logqbit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
logqbit/assets/icon.svg,sha256=RJ-_0hYbS1YZMOXAAmkHrPfs9CcvEvHCguIQwfzs2mw,721
|
|
3
|
+
logqbit/browser.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
logqbit/constants.py,sha256=r0-qpPMaVOqwfdfY0oDBOU70DIHLTLhGiGUtd8pH_Fw,179
|
|
5
|
+
logqbit/live_plotter.py,sha256=I7Njn4aTAuEZSATrI637fJPpmSZw2FvR597eUWu850s,23581
|
|
6
|
+
logqbit/logfolder.py,sha256=LbAAW1UME61ENjsGu48ywXCS4ODtOqcD8KLa1LRbKU0,8073
|
|
7
|
+
logqbit/registry.py,sha256=8sTFkKY9T_oez6BptVWEBUcvppi4nLeziZaEahZTNLc,7475
|
|
8
|
+
logqbit-0.0.1.dist-info/WHEEL,sha256=eh7sammvW2TypMMMGKgsM83HyA_3qQ5Lgg3ynoecH3M,79
|
|
9
|
+
logqbit-0.0.1.dist-info/entry_points.txt,sha256=ePkNeOIr9vbB1rNUAUEXXCoOali6KolvradqAcnUKno,64
|
|
10
|
+
logqbit-0.0.1.dist-info/METADATA,sha256=i1b_3afutfferwqVZ9tKL77OckP50JRppbpAQky_Upw,1957
|
|
11
|
+
logqbit-0.0.1.dist-info/RECORD,,
|