arbok-inspector 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arbok_inspector/__init__.py +1 -0
- arbok_inspector/analysis/analysis_base.py +29 -0
- arbok_inspector/analysis/prepare_data.py +118 -0
- arbok_inspector/classes/base_run.py +275 -0
- arbok_inspector/classes/dim.py +26 -0
- arbok_inspector/classes/native_run.py +172 -0
- arbok_inspector/classes/qcodes_run.py +65 -0
- arbok_inspector/cli.py +4 -0
- arbok_inspector/configurations/1d_plot.json +49 -0
- arbok_inspector/configurations/2d_plot.json +60 -0
- arbok_inspector/dev.py +19 -0
- arbok_inspector/helpers/string_formaters.py +37 -0
- arbok_inspector/helpers/unit_formater.py +29 -0
- arbok_inspector/main.py +15 -0
- arbok_inspector/pages/__init__.py +2 -0
- arbok_inspector/pages/database_browser.py +139 -0
- arbok_inspector/pages/greeter.py +93 -0
- arbok_inspector/pages/run_view.py +259 -0
- arbok_inspector/state.py +101 -0
- arbok_inspector/test.db +0 -0
- arbok_inspector/test_main.py +65 -0
- arbok_inspector/widgets/build_run_selecter.py +163 -0
- arbok_inspector/widgets/build_run_view_actions.py +104 -0
- arbok_inspector/widgets/build_xarray_grid.py +145 -0
- arbok_inspector/widgets/build_xarray_html.py +57 -0
- arbok_inspector/widgets/json_plot_settings_dialog.py +77 -0
- arbok_inspector/widgets/update_day_selecter.py +64 -0
- arbok_inspector-1.3.0.dist-info/METADATA +90 -0
- arbok_inspector-1.3.0.dist-info/RECORD +33 -0
- arbok_inspector-1.3.0.dist-info/WHEEL +5 -0
- arbok_inspector-1.3.0.dist-info/entry_points.txt +2 -0
- arbok_inspector-1.3.0.dist-info/licenses/LICENSE +21 -0
- arbok_inspector-1.3.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
"""Run view page showing the data and plots for a specific run"""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import importlib.resources as resources
|
|
8
|
+
|
|
9
|
+
from nicegui import ui, app
|
|
10
|
+
|
|
11
|
+
from arbok_inspector.state import inspector
|
|
12
|
+
from arbok_inspector.widgets.build_xarray_grid import build_xarray_grid
|
|
13
|
+
from arbok_inspector.widgets.build_xarray_html import build_xarray_html
|
|
14
|
+
from arbok_inspector.widgets.build_run_view_actions import build_run_view_actions
|
|
15
|
+
from arbok_inspector.helpers.unit_formater import unit_formatter
|
|
16
|
+
from arbok_inspector.classes.qcodes_run import QcodesRun
|
|
17
|
+
from arbok_inspector.classes.native_run import NativeRun
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
from arbok_inspector.classes.dim import Dim
|
|
21
|
+
|
|
22
|
+
RUN_TABLE_COLUMNS = [
|
|
23
|
+
{'field': 'name', 'filter': 'agTextColumnFilter', 'floatingFilter': True},
|
|
24
|
+
{'field': 'size'},
|
|
25
|
+
{'field': 'x', 'checkboxSelection': True},
|
|
26
|
+
{'field': 'y', 'checkboxSelection': True},
|
|
27
|
+
{'field': 'average', 'checkboxSelection': True},
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
AXIS_OPTIONS = ['average', 'select_value', 'y-axis', 'x-axis']
|
|
31
|
+
|
|
32
|
+
EXPANSION_CLASSES = 'w-full p-0 gap-1 border border-gray-400 rounded-lg no-wrap items-start pt-0 mt-0'
|
|
33
|
+
TITLE_CLASSES = 'text-lg font-semibold'
|
|
34
|
+
|
|
35
|
+
@ui.page('/run/{run_id}')
|
|
36
|
+
async def run_page(run_id: str):
|
|
37
|
+
"""
|
|
38
|
+
Page showing the details and plots for a specific run.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
run_id (str): ID of the run to display
|
|
42
|
+
"""
|
|
43
|
+
ui.page_title(f"{run_id}")
|
|
44
|
+
_ = await ui.context.client.connected()
|
|
45
|
+
if 'run' in app.storage.tab:
|
|
46
|
+
print('run already exists!')
|
|
47
|
+
if inspector.database_type == 'qcodes':
|
|
48
|
+
run = QcodesRun(int(run_id))
|
|
49
|
+
elif 'arbok_native':
|
|
50
|
+
run = NativeRun(int(run_id))
|
|
51
|
+
else:
|
|
52
|
+
raise ValueError(
|
|
53
|
+
"Database type must be 'qcodes' or 'arbok_native is:"
|
|
54
|
+
f"{inspector.database_type}")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
app.storage.tab["placeholders"] = {'plots': None}
|
|
58
|
+
app.storage.tab["run"] = run
|
|
59
|
+
with resources.files("arbok_inspector.configurations").joinpath("1d_plot.json").open("r") as f:
|
|
60
|
+
app.storage.tab["plot_dict_1D"] = json.load(f)
|
|
61
|
+
with resources.files("arbok_inspector.configurations").joinpath("2d_plot.json").open("r") as f:
|
|
62
|
+
app.storage.tab["plot_dict_2D"] = json.load(f)
|
|
63
|
+
|
|
64
|
+
ui.label(f'Run-ID: {run_id}').classes('text-2xl font-bold')
|
|
65
|
+
with ui.row().classes('w-full gap-4'):
|
|
66
|
+
with ui.column().classes('flex-none'):
|
|
67
|
+
with ui.card().classes('w-full gap-2'):
|
|
68
|
+
ui.label("Coordinates:").classes('text-lg font-semibold pl-2')
|
|
69
|
+
ui.separator().classes('w-full my-1')
|
|
70
|
+
for i, _ in run.parallel_sweep_axes.items():
|
|
71
|
+
add_dim_dropdown(sweep_idx = i)
|
|
72
|
+
with ui.card().classes('w-full gap-2'):
|
|
73
|
+
ui.label("Results:").classes(TITLE_CLASSES)
|
|
74
|
+
for i, result in enumerate(run.full_data_set):
|
|
75
|
+
value = False
|
|
76
|
+
if result in run.plot_selection:
|
|
77
|
+
value = True
|
|
78
|
+
ui.checkbox(
|
|
79
|
+
text = result.replace("__", "."),
|
|
80
|
+
value = value,
|
|
81
|
+
on_change = lambda e, r=result: run.update_plot_selection(e.value, r),
|
|
82
|
+
).classes('text-sm h-4').props('color=purple')
|
|
83
|
+
with ui.card().classes('w-full gap-2'):
|
|
84
|
+
ui.label("Actions:").classes(TITLE_CLASSES)
|
|
85
|
+
build_run_view_actions()
|
|
86
|
+
with ui.expansion('Run info', icon = 'info').classes('w-full gap-2'):
|
|
87
|
+
# ui.label("Run info:").classes(TITLE_CLASSES)
|
|
88
|
+
for column_name, conf in run.database_columns.items():
|
|
89
|
+
value = str(conf['value'])
|
|
90
|
+
if len(value) > 20 or value is None:
|
|
91
|
+
continue
|
|
92
|
+
print(column_name, type(value))
|
|
93
|
+
if 'label' in conf:
|
|
94
|
+
label = ui.label(f"{conf['label']}: ")
|
|
95
|
+
else:
|
|
96
|
+
label = ui.label(f"{column_name.upper()}: ")
|
|
97
|
+
label.classes('font-semibold m-0 p-0"')
|
|
98
|
+
ui.label(value).classes("m-0 p-0 ml-5")
|
|
99
|
+
|
|
100
|
+
with ui.column().classes('flex-1 min-w-0'):
|
|
101
|
+
with ui.expansion('Plots', icon='stacked_line_chart', value=True)\
|
|
102
|
+
.classes(EXPANSION_CLASSES):
|
|
103
|
+
app.storage.tab["placeholders"]["plots"] = ui.row().\
|
|
104
|
+
classes('w-full min-h-[50vh] p-1 items-stretch')
|
|
105
|
+
build_xarray_grid()
|
|
106
|
+
|
|
107
|
+
#.style('line-height: 1rem; padding-top: 0; padding-bottom: 0;')
|
|
108
|
+
with ui.expansion('xarray summary', icon='summarize', value=False)\
|
|
109
|
+
.classes(EXPANSION_CLASSES):
|
|
110
|
+
build_xarray_html()
|
|
111
|
+
with ui.expansion('analysis', icon='science', value=False)\
|
|
112
|
+
.classes(EXPANSION_CLASSES):
|
|
113
|
+
with ui.row():
|
|
114
|
+
ui.label("Working on it! -Andi").classes(TITLE_CLASSES)
|
|
115
|
+
with ui.expansion('metadata', icon='numbers', value=False)\
|
|
116
|
+
.classes(f"{EXPANSION_CLASSES} overflow-x-auto"):
|
|
117
|
+
placeholder_metadata = {}
|
|
118
|
+
|
|
119
|
+
placeholder_metadata['code'] = ui.code(
|
|
120
|
+
content = 'Placeholder for QUA program',
|
|
121
|
+
language = 'python')\
|
|
122
|
+
.classes('w-full overflow-x-auto whitespace-pre')
|
|
123
|
+
ui.button(
|
|
124
|
+
icon = 'code',
|
|
125
|
+
text="load qua program",
|
|
126
|
+
on_click = lambda: load_qua_code(run, placeholder_metadata),
|
|
127
|
+
)
|
|
128
|
+
ui.button(
|
|
129
|
+
icon = 'download',
|
|
130
|
+
text="download serialized qua program",
|
|
131
|
+
on_click = lambda: download_qua_code(run),
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
def add_dim_dropdown(sweep_idx: int):
|
|
135
|
+
"""
|
|
136
|
+
Add a dropdown to select the dimension option for a given sweep index.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
sweep_idx (int): Index of the sweep to add the dropdown for
|
|
140
|
+
"""
|
|
141
|
+
run = app.storage.tab["run"]
|
|
142
|
+
width = 'w-full'
|
|
143
|
+
dim = run.sweep_dict[sweep_idx]
|
|
144
|
+
local_placeholder = {"slider": None}
|
|
145
|
+
#with ui.column().classes('w-full no-wrap items-center gap-1'):
|
|
146
|
+
dims_names = run.parallel_sweep_axes[sweep_idx]
|
|
147
|
+
#ui.separator().classes('w-full my-1')
|
|
148
|
+
# with ui.card().classes('w-full gap-1 px-2 py-2'):
|
|
149
|
+
ui.radio(
|
|
150
|
+
options = dims_names,
|
|
151
|
+
value=dim.name,
|
|
152
|
+
on_change = lambda e: update_sweep_dim_name(dim, e.value)
|
|
153
|
+
).classes(f"{width} text-xs m-0 p-0").props('dense')
|
|
154
|
+
ui_element = ui.select(
|
|
155
|
+
options = AXIS_OPTIONS,
|
|
156
|
+
value = str(dim.option),
|
|
157
|
+
label = f'{dim.name.replace("__", ".")}',
|
|
158
|
+
on_change = lambda e: update_dim_selection(
|
|
159
|
+
dim, e.value, local_placeholder["slider"])
|
|
160
|
+
).classes(f"{width} text-sm m-0 p-0").props('dense')
|
|
161
|
+
dim.ui_selector = ui_element
|
|
162
|
+
local_placeholder["slider"] = ui.column().classes('w-full')
|
|
163
|
+
if dim.option == 'select_value':
|
|
164
|
+
build_dim_slider(run, dim, local_placeholder["slider"])
|
|
165
|
+
|
|
166
|
+
def update_dim_selection(dim: Dim, value: str, slider_placeholder):
|
|
167
|
+
"""
|
|
168
|
+
Update the dimension/sweep selection and rebuild the plot grid.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
dim (Dim): The dimension object to update
|
|
172
|
+
value (str): The new selection value
|
|
173
|
+
slider_placeholder: The UI placeholder to update
|
|
174
|
+
"""
|
|
175
|
+
run = app.storage.tab["run"]
|
|
176
|
+
if slider_placeholder is not None:
|
|
177
|
+
slider_placeholder.clear()
|
|
178
|
+
print(value)
|
|
179
|
+
if value == 'average':
|
|
180
|
+
run.update_subset_dims(dim, 'average')
|
|
181
|
+
dim.option = 'average'
|
|
182
|
+
if value == 'select_value':
|
|
183
|
+
with slider_placeholder:
|
|
184
|
+
build_dim_slider(run, dim, slider_placeholder)
|
|
185
|
+
else:
|
|
186
|
+
run.update_subset_dims(dim, value)
|
|
187
|
+
dim.option = value
|
|
188
|
+
build_xarray_grid()
|
|
189
|
+
|
|
190
|
+
def build_dim_slider(run: Runm, dim: Dim, slider_placeholder):
|
|
191
|
+
"""
|
|
192
|
+
Build a slider for selecting the index of a dimension.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
dim (Dim): The dimension object
|
|
196
|
+
slider_placeholder: The UI placeholder to add the slider to
|
|
197
|
+
"""
|
|
198
|
+
dim_size = run.full_data_set.sizes[dim.name]
|
|
199
|
+
with ui.row().classes("w-full items-center"):
|
|
200
|
+
with ui.column().classes('flex-grow'):
|
|
201
|
+
slider = ui.slider(
|
|
202
|
+
min=0, max=dim_size - 1, step=1, value=0,
|
|
203
|
+
on_change=lambda e: run.update_subset_dims(dim, 'select_value', e.value),
|
|
204
|
+
).classes('flex-grow')\
|
|
205
|
+
.props('color="purple" markers label-always')
|
|
206
|
+
label = ui.html('').classes('shrink-0 text-right px-2 py-1 bg-purple text-white rounded-lg text-xs font-normal text-center')
|
|
207
|
+
update_value_from_dim_slider(label, slider, dim, plot = False)
|
|
208
|
+
slider.on(
|
|
209
|
+
'update:model-value',
|
|
210
|
+
lambda e: update_value_from_dim_slider(label, slider, dim),
|
|
211
|
+
throttle=0.2, leading_events=False)
|
|
212
|
+
|
|
213
|
+
def update_value_from_dim_slider(label, slider, dim: Dim, plot = True):
|
|
214
|
+
"""
|
|
215
|
+
Update the label next to the slider with the current value and unit.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
label: The UI label to update
|
|
219
|
+
slider: The UI slider to get the value from
|
|
220
|
+
dim (Dim): The dimension object
|
|
221
|
+
"""
|
|
222
|
+
run = app.storage.tab["run"]
|
|
223
|
+
label_txt = f' {unit_formatter(run, dim, slider.value)} '
|
|
224
|
+
label.set_content(label_txt)
|
|
225
|
+
if plot:
|
|
226
|
+
build_xarray_grid()
|
|
227
|
+
|
|
228
|
+
def update_sweep_dim_name(dim: Dim, new_name: str):
|
|
229
|
+
"""
|
|
230
|
+
Update the name of the dimension in the sweep dict and the dim object.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
dim (Dim): The dimension object to update
|
|
234
|
+
new_name (str): The new name for the dimension
|
|
235
|
+
"""
|
|
236
|
+
run = app.storage.tab["run"]
|
|
237
|
+
dim.name = new_name
|
|
238
|
+
dim.ui_selector.label = new_name.replace("__", ".")
|
|
239
|
+
build_xarray_grid()
|
|
240
|
+
|
|
241
|
+
def load_qua_code(run: Run, placeholder: dict):
|
|
242
|
+
"""Load and display the QUA code for the given run."""
|
|
243
|
+
try:
|
|
244
|
+
qua_code = run.get_qua_code(as_string = True)
|
|
245
|
+
qua_code = qua_code.split("config = {")[0]
|
|
246
|
+
placeholder['code'].set_content(qua_code)
|
|
247
|
+
except Exception as e:
|
|
248
|
+
ui.notify(f'Error loading QUA code: {str(e)}', type='negative')
|
|
249
|
+
raise e
|
|
250
|
+
|
|
251
|
+
def download_qua_code(run: Run) -> None:
|
|
252
|
+
"""Download the serialized QUA code for the given run."""
|
|
253
|
+
try:
|
|
254
|
+
qua_code_bytes = run.get_qua_code(as_string = False)
|
|
255
|
+
ui.download(qua_code_bytes, 'test.py')
|
|
256
|
+
#os.remove(file_name)
|
|
257
|
+
except Exception as e:
|
|
258
|
+
ui.notify(f'Error downloading QUA code: {str(e)}', type='negative')
|
|
259
|
+
raise e
|
arbok_inspector/state.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from nicegui import ui
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import asyncio
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
from qcodes.dataset import initialise_or_create_database_at
|
|
7
|
+
|
|
8
|
+
import fsspec
|
|
9
|
+
import sqlite3
|
|
10
|
+
from sqlalchemy import create_engine, select, func
|
|
11
|
+
|
|
12
|
+
class ArbokInspector:
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.qcodes_database_path: Optional[Path] = None
|
|
15
|
+
self.initial_dialog = None
|
|
16
|
+
self.database_type = None # 'qcodes' or 'arbok'
|
|
17
|
+
|
|
18
|
+
self.qcodes_database_path = None
|
|
19
|
+
self.conn = None
|
|
20
|
+
self.cursor = None
|
|
21
|
+
|
|
22
|
+
self.database_engine = None
|
|
23
|
+
self.minio_filesystem = None
|
|
24
|
+
self.minio_bucket = None
|
|
25
|
+
|
|
26
|
+
def connect_qcodes_database(self):
|
|
27
|
+
self.conn = sqlite3.connect(self.qcodes_database_path)
|
|
28
|
+
self.conn.row_factory = sqlite3.Row
|
|
29
|
+
self.cursor = self.conn.cursor()
|
|
30
|
+
initialise_or_create_database_at(self.qcodes_database_path)
|
|
31
|
+
|
|
32
|
+
def connect_to_qcodes_database(self, path_input) -> None:
|
|
33
|
+
"""Connect to a QCoDeS database given a file path input widget."""
|
|
34
|
+
self.database_engine = None
|
|
35
|
+
self.minio_filesystem = None
|
|
36
|
+
self.minio_bucket = None
|
|
37
|
+
if path_input.value is None:
|
|
38
|
+
ui.notify('Please enter a file path', type='warning')
|
|
39
|
+
return
|
|
40
|
+
try:
|
|
41
|
+
file_path = Path(path_input.value)
|
|
42
|
+
if file_path.exists():
|
|
43
|
+
self.qcodes_database_path = file_path
|
|
44
|
+
ui.notify(f'Database path set: {file_path.name}', type='positive')
|
|
45
|
+
try:
|
|
46
|
+
self.connect_qcodes_database()
|
|
47
|
+
if self.initial_dialog:
|
|
48
|
+
self.initial_dialog.close()
|
|
49
|
+
self.database_type = 'qcodes'
|
|
50
|
+
ui.navigate.to('/browser')
|
|
51
|
+
except sqlite3.Error as e:
|
|
52
|
+
ui.notify(f'Error connecting to database: {str(e)}', type='negative')
|
|
53
|
+
else:
|
|
54
|
+
ui.notify('File does not exist', type='negative')
|
|
55
|
+
except Exception as ex:
|
|
56
|
+
ui.notify(f'Error: {str(ex)}', type='negative')
|
|
57
|
+
self.database_type = 'qcodes'
|
|
58
|
+
|
|
59
|
+
def connect_to_arbok_database(
|
|
60
|
+
self,
|
|
61
|
+
database_url: str,
|
|
62
|
+
minio_url: str,
|
|
63
|
+
minio_user: str,
|
|
64
|
+
minio_password: str,
|
|
65
|
+
minio_bucket: str) -> None:
|
|
66
|
+
"""
|
|
67
|
+
Connect to a native Arbok database given connection parameters.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
database_url (str): The database connection URL.
|
|
71
|
+
minio_url (str): The MinIO server URL.
|
|
72
|
+
minio_user (str): The MinIO username.
|
|
73
|
+
minio_password (str): The MinIO password.
|
|
74
|
+
minio_bucket (str): The MinIO bucket name.
|
|
75
|
+
"""
|
|
76
|
+
self.qcodes_database_path = None
|
|
77
|
+
self.conn = None
|
|
78
|
+
self.cursor = None
|
|
79
|
+
try:
|
|
80
|
+
self.database_engine = create_engine(database_url)
|
|
81
|
+
except Exception as ex:
|
|
82
|
+
ui.notify(f'Error creating database engine: {str(ex)}', type='negative')
|
|
83
|
+
return
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
self.minio_filesystem = fsspec.filesystem(
|
|
87
|
+
protocol = "s3",
|
|
88
|
+
client_kwargs={"endpoint_url": minio_url},
|
|
89
|
+
key=minio_user,
|
|
90
|
+
secret=minio_password
|
|
91
|
+
)
|
|
92
|
+
except Exception as ex:
|
|
93
|
+
ui.notify(f'Error connecting to MinIO: {str(ex)}', type='negative')
|
|
94
|
+
return
|
|
95
|
+
if self.initial_dialog:
|
|
96
|
+
self.initial_dialog.close()
|
|
97
|
+
self.database_type = 'native_arbok'
|
|
98
|
+
self.minio_bucket = minio_bucket
|
|
99
|
+
ui.navigate.to('/browser')
|
|
100
|
+
|
|
101
|
+
inspector = ArbokInspector()
|
arbok_inspector/test.db
ADDED
|
Binary file
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import xarray as xr
|
|
3
|
+
import plotly.express as px
|
|
4
|
+
from nicegui import ui
|
|
5
|
+
|
|
6
|
+
# --- Create a sample 4D DataArray ---
|
|
7
|
+
data = np.random.rand(5, 10, 20, 30) # shape: time, depth, y, x
|
|
8
|
+
coords = {
|
|
9
|
+
'time': np.arange(5),
|
|
10
|
+
'depth': np.linspace(0, 100, 10),
|
|
11
|
+
'y': np.linspace(-5, 5, 20),
|
|
12
|
+
'x': np.linspace(-5, 5, 30),
|
|
13
|
+
}
|
|
14
|
+
array = xr.DataArray(data, dims=('time', 'depth', 'y', 'x'), coords=coords)
|
|
15
|
+
|
|
16
|
+
# --- Define axes to plot ---
|
|
17
|
+
x_dim = 'x'
|
|
18
|
+
y_dim = 'y'
|
|
19
|
+
slider_dims = [dim for dim in array.dims if dim not in (x_dim, y_dim)]
|
|
20
|
+
|
|
21
|
+
# --- UI State ---
|
|
22
|
+
slider_values = {dim: 0 for dim in slider_dims}
|
|
23
|
+
plot_container = ui.row().classes('w-full justify-center')
|
|
24
|
+
|
|
25
|
+
# --- Heatmap update function ---
|
|
26
|
+
def update_plot():
|
|
27
|
+
# Index the array using current slider values
|
|
28
|
+
sel = {dim: slider_values[dim] for dim in slider_dims}
|
|
29
|
+
slice_2d = array.isel(**sel)
|
|
30
|
+
|
|
31
|
+
# Convert to plotly figure
|
|
32
|
+
fig = px.imshow(
|
|
33
|
+
slice_2d.values,
|
|
34
|
+
labels={'x': x_dim, 'y': y_dim},
|
|
35
|
+
x=array.coords[x_dim].values,
|
|
36
|
+
y=array.coords[y_dim].values,
|
|
37
|
+
color_continuous_scale='Viridis',
|
|
38
|
+
)
|
|
39
|
+
fig.update_layout(title=f'{x_dim} vs {y_dim} | ' + ', '.join([f'{dim}={slider_values[dim]}' for dim in slider_dims]))
|
|
40
|
+
|
|
41
|
+
plot_container.clear()
|
|
42
|
+
with plot_container:
|
|
43
|
+
ui.plotly(fig).classes('max-w-3xl max-h-96')
|
|
44
|
+
|
|
45
|
+
# --- Create sliders for all non-plotted dimensions ---
|
|
46
|
+
for dim in slider_dims:
|
|
47
|
+
max_index = len(array.coords[dim]) - 1
|
|
48
|
+
def make_slider(d=dim):
|
|
49
|
+
def on_change(val):
|
|
50
|
+
slider_values[d] = int(val)
|
|
51
|
+
update_plot()
|
|
52
|
+
ui.slider(
|
|
53
|
+
min=0,
|
|
54
|
+
max=max_index,
|
|
55
|
+
value=0,
|
|
56
|
+
step=1,
|
|
57
|
+
on_change=on_change,
|
|
58
|
+
#abel=f'{d} ({array.coords[d].values[0]})'
|
|
59
|
+
).props('label-always').classes('w-full')
|
|
60
|
+
make_slider()
|
|
61
|
+
|
|
62
|
+
# --- Initial plot ---
|
|
63
|
+
update_plot()
|
|
64
|
+
|
|
65
|
+
ui.run()
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
from datetime import datetime, timedelta
|
|
2
|
+
|
|
3
|
+
from nicegui import ui, app
|
|
4
|
+
from sqlalchemy import text
|
|
5
|
+
|
|
6
|
+
from arbok_inspector.state import inspector
|
|
7
|
+
|
|
8
|
+
small_col_width = 50
|
|
9
|
+
med_col_width = 60
|
|
10
|
+
|
|
11
|
+
QCODES_RUN_GRID_COLUMN_DEFS = [
|
|
12
|
+
{'headerName': 'Run ID', 'field': 'run_id', "width": small_col_width},
|
|
13
|
+
{'headerName': 'Name', 'field': 'name'},
|
|
14
|
+
{'headerName': 'Experiment', 'field': 'experiment_name'},
|
|
15
|
+
{'headerName': '# Results', 'field': 'result_counter', "width": small_col_width},
|
|
16
|
+
{'headerName': 'Started', 'field': 'run_timestamp', "width": small_col_width},
|
|
17
|
+
{'headerName': 'Finish', 'field': 'completed_timestamp', "width": small_col_width},
|
|
18
|
+
]
|
|
19
|
+
NATIVE_RUN_GRID_COLUMN_DEFS = [
|
|
20
|
+
{'headerName': 'Run ID', 'field': 'run_id', "width": small_col_width},
|
|
21
|
+
{'headerName': 'Name', 'field': 'name'},
|
|
22
|
+
{'headerName': 'Experiment', 'field': 'experiment'},
|
|
23
|
+
{'headerName': '# results', 'field': 'result_count', "width": med_col_width},
|
|
24
|
+
{'headerName': '# batches', 'field': 'batch_count', "width": med_col_width},
|
|
25
|
+
{'headerName': 'started', 'field': 'start_time', "width": med_col_width},
|
|
26
|
+
{'headerName': 'last result', 'field': 'completed_time', "width": med_col_width},
|
|
27
|
+
]
|
|
28
|
+
AGGRID_STYLE = 'height: 95%; min-height: 0;'
|
|
29
|
+
|
|
30
|
+
def build_run_selecter(target_day):
|
|
31
|
+
container = app.storage.tab['run_selecter']
|
|
32
|
+
container.clear()
|
|
33
|
+
|
|
34
|
+
offset_hours = app.storage.general["timezone"]
|
|
35
|
+
run_grid_rows = []
|
|
36
|
+
print(f"Showing runs from {target_day}")
|
|
37
|
+
if inspector.database_type == 'qcodes':
|
|
38
|
+
rows = get_qcodes_runs_for_day(inspector.cursor, target_day, offset_hours)
|
|
39
|
+
column_defs = QCODES_RUN_GRID_COLUMN_DEFS
|
|
40
|
+
else:
|
|
41
|
+
rows = get_native_arbok_runs_for_day(inspector.database_engine, target_day, offset_hours)
|
|
42
|
+
column_defs = NATIVE_RUN_GRID_COLUMN_DEFS
|
|
43
|
+
run_grid_rows = []
|
|
44
|
+
columns = [x['field'] for x in column_defs]
|
|
45
|
+
for run in rows:
|
|
46
|
+
run_dict = {}
|
|
47
|
+
for key in columns:
|
|
48
|
+
if key in run:
|
|
49
|
+
value = run[key]
|
|
50
|
+
if 'time' in key:
|
|
51
|
+
if value is not None:
|
|
52
|
+
local_dt = datetime.utcfromtimestamp(value)
|
|
53
|
+
local_dt += timedelta(hours=offset_hours)
|
|
54
|
+
value = local_dt.strftime('%H:%M:%S')
|
|
55
|
+
else:
|
|
56
|
+
value = 'N/A'
|
|
57
|
+
run_dict[key] = value
|
|
58
|
+
run_grid_rows.insert(0, run_dict)
|
|
59
|
+
with container:
|
|
60
|
+
ui.aggrid(
|
|
61
|
+
{
|
|
62
|
+
'defaultColDef': {'flex': 1},
|
|
63
|
+
'columnDefs': column_defs,
|
|
64
|
+
'rowData': run_grid_rows,
|
|
65
|
+
'rowSelection': 'multiple',
|
|
66
|
+
},
|
|
67
|
+
).classes('ag-theme-balham-dark').style(
|
|
68
|
+
AGGRID_STYLE
|
|
69
|
+
).on(
|
|
70
|
+
'cellClicked',
|
|
71
|
+
lambda event: open_run_page(event.args['data']['run_id'])
|
|
72
|
+
)
|
|
73
|
+
ui.notify(
|
|
74
|
+
'Run selector updated: \n'
|
|
75
|
+
f'found {len(run_grid_rows)} run(s)',
|
|
76
|
+
type='positive',
|
|
77
|
+
multi_line=True,
|
|
78
|
+
classes='multi-line-notification',
|
|
79
|
+
position = 'top-right'
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def get_qcodes_runs_for_day(
|
|
83
|
+
cursor, target_day: str, offset_hours: float
|
|
84
|
+
) -> list[dict]:
|
|
85
|
+
"""
|
|
86
|
+
Fetch runs from a QCoDeS (SQLite) database, joined with experiments,
|
|
87
|
+
excluding the 'qua_program' and 'snapshot' columns entirely.
|
|
88
|
+
"""
|
|
89
|
+
hours = int(offset_hours)
|
|
90
|
+
minutes = int((offset_hours - hours) * 60)
|
|
91
|
+
offset_str = f"{'+' if offset_hours >= 0 else '-'}{abs(hours):02d}:{abs(minutes):02d}"
|
|
92
|
+
|
|
93
|
+
# get all columns except the ones we want to exclude
|
|
94
|
+
exclude_columns = {'qua_program', 'snapshot'}
|
|
95
|
+
cursor.execute("PRAGMA table_info(runs)")
|
|
96
|
+
all_columns = [col['name'] for col in cursor.fetchall() if col['name'] not in exclude_columns]
|
|
97
|
+
|
|
98
|
+
# construct SELECT statement
|
|
99
|
+
columns_str = ", ".join(f"r.{col}" for col in all_columns)
|
|
100
|
+
|
|
101
|
+
query = f"""
|
|
102
|
+
SELECT {columns_str}, e.name AS experiment_name
|
|
103
|
+
FROM runs r
|
|
104
|
+
JOIN experiments e ON r.exp_id = e.exp_id
|
|
105
|
+
WHERE DATE(datetime(r.run_timestamp, 'unixepoch', '{offset_str}')) = ?
|
|
106
|
+
ORDER BY r.run_timestamp;
|
|
107
|
+
"""
|
|
108
|
+
|
|
109
|
+
cursor.execute(query, (target_day,))
|
|
110
|
+
rows = cursor.fetchall()
|
|
111
|
+
row_dicts = [dict(row) for row in rows]
|
|
112
|
+
return row_dicts
|
|
113
|
+
|
|
114
|
+
NATIVE_COLUMNS = {
|
|
115
|
+
'run_id': 'run ID',
|
|
116
|
+
'name': 'name',
|
|
117
|
+
'result_count': '# results',
|
|
118
|
+
'batch_count': '# batches',
|
|
119
|
+
'start_time': 'started',
|
|
120
|
+
'completed_time': 'last result',
|
|
121
|
+
'is_completed': 'completed'
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
def get_native_arbok_runs_for_day(
|
|
125
|
+
engine,
|
|
126
|
+
target_day: str,
|
|
127
|
+
offset_hours: float) -> list[dict]:
|
|
128
|
+
"""
|
|
129
|
+
Fetch runs from a native Arbok database
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
engine: SQLAlchemy engine connected to the database
|
|
133
|
+
target_day (str): The target day in 'YYYY-MM-DD'
|
|
134
|
+
offset_hours (float): The timezone offset in hours
|
|
135
|
+
Returns:
|
|
136
|
+
list[dict]: List of runs as dictionaries
|
|
137
|
+
"""
|
|
138
|
+
query = text("""
|
|
139
|
+
SELECT r.*, e.name AS experiment_name
|
|
140
|
+
FROM runs r
|
|
141
|
+
JOIN experiments e ON r.exp_id = e.exp_id
|
|
142
|
+
WHERE (to_timestamp(r.start_time) + (:offset_hours || ' hours')::interval)::date = :target_day
|
|
143
|
+
ORDER BY r.start_time;
|
|
144
|
+
""")
|
|
145
|
+
|
|
146
|
+
with engine.connect() as conn:
|
|
147
|
+
result = conn.execute(
|
|
148
|
+
query, {"offset_hours": offset_hours, "target_day": target_day}
|
|
149
|
+
)
|
|
150
|
+
runs_filtered = [
|
|
151
|
+
{**{col: row[col] for col in NATIVE_COLUMNS.keys()},
|
|
152
|
+
"experiment": row["experiment_name"]}
|
|
153
|
+
for row in result.mappings()
|
|
154
|
+
]
|
|
155
|
+
|
|
156
|
+
return runs_filtered
|
|
157
|
+
|
|
158
|
+
def open_run_page(run_id: int):
|
|
159
|
+
app.storage.general["avg_axis"] = app.storage.tab["avg_axis_input"].value
|
|
160
|
+
app.storage.general["result_keywords"] = app.storage.tab["result_keyword_input"].value
|
|
161
|
+
print(f"Result Keywords:")
|
|
162
|
+
print(app.storage.general['result_keywords'])
|
|
163
|
+
ui.navigate.to(f'/run/{run_id}', new_tab=True)
|