sqil-core 0.1.0__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqil_core/__init__.py +1 -0
- sqil_core/config_log.py +42 -0
- sqil_core/experiment/__init__.py +11 -0
- sqil_core/experiment/_analysis.py +125 -0
- sqil_core/experiment/_events.py +25 -0
- sqil_core/experiment/_experiment.py +553 -0
- sqil_core/experiment/data/plottr.py +778 -0
- sqil_core/experiment/helpers/_function_override_handler.py +111 -0
- sqil_core/experiment/helpers/_labone_wrappers.py +12 -0
- sqil_core/experiment/instruments/__init__.py +2 -0
- sqil_core/experiment/instruments/_instrument.py +190 -0
- sqil_core/experiment/instruments/drivers/SignalCore_SC5511A.py +515 -0
- sqil_core/experiment/instruments/local_oscillator.py +205 -0
- sqil_core/experiment/instruments/server.py +175 -0
- sqil_core/experiment/instruments/setup.yaml +21 -0
- sqil_core/experiment/instruments/zurich_instruments.py +55 -0
- sqil_core/fit/__init__.py +23 -0
- sqil_core/fit/_core.py +179 -31
- sqil_core/fit/_fit.py +544 -94
- sqil_core/fit/_guess.py +304 -0
- sqil_core/fit/_models.py +50 -1
- sqil_core/fit/_quality.py +266 -0
- sqil_core/resonator/__init__.py +2 -0
- sqil_core/resonator/_resonator.py +256 -74
- sqil_core/utils/__init__.py +40 -13
- sqil_core/utils/_analysis.py +226 -0
- sqil_core/utils/_const.py +83 -18
- sqil_core/utils/_formatter.py +127 -55
- sqil_core/utils/_plot.py +272 -6
- sqil_core/utils/_read.py +178 -95
- sqil_core/utils/_utils.py +147 -0
- {sqil_core-0.1.0.dist-info → sqil_core-1.1.0.dist-info}/METADATA +9 -1
- sqil_core-1.1.0.dist-info/RECORD +36 -0
- {sqil_core-0.1.0.dist-info → sqil_core-1.1.0.dist-info}/WHEEL +1 -1
- sqil_core-0.1.0.dist-info/RECORD +0 -19
- {sqil_core-0.1.0.dist-info → sqil_core-1.1.0.dist-info}/entry_points.txt +0 -0
sqil_core/utils/_plot.py
CHANGED
@@ -1,8 +1,26 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import TYPE_CHECKING
|
4
|
+
|
5
|
+
import matplotlib.pyplot as plt
|
1
6
|
import numpy as np
|
7
|
+
from matplotlib.gridspec import GridSpec
|
8
|
+
|
9
|
+
from sqil_core.fit import transform_data
|
10
|
+
|
11
|
+
from ._analysis import remove_linear_background, remove_offset, soft_normalize
|
12
|
+
from ._const import PARAM_METADATA
|
13
|
+
from ._formatter import (
|
14
|
+
ParamInfo,
|
15
|
+
format_number,
|
16
|
+
get_relevant_exp_parameters,
|
17
|
+
param_info_from_schema,
|
18
|
+
)
|
19
|
+
from ._read import extract_h5_data, get_data_and_info, map_data_dict, read_json
|
2
20
|
|
3
|
-
|
4
|
-
from .
|
5
|
-
from .
|
21
|
+
if TYPE_CHECKING:
|
22
|
+
from sqil_core.fit._core import FitResult
|
23
|
+
from sqil_core.utils import ParamDict
|
6
24
|
|
7
25
|
|
8
26
|
def set_plot_style(plt):
|
@@ -12,7 +30,7 @@ def set_plot_style(plt):
|
|
12
30
|
"xtick.labelsize": 18, # X-axis tick labels
|
13
31
|
"ytick.labelsize": 18, # Y-axis tick labels
|
14
32
|
"lines.linewidth": 2.5, # Line width
|
15
|
-
"lines.marker": "o",
|
33
|
+
# "lines.marker": "o",
|
16
34
|
"lines.markersize": 7, # Marker size
|
17
35
|
"lines.markeredgewidth": 1.5, # Marker line width
|
18
36
|
"lines.markerfacecolor": "none",
|
@@ -24,6 +42,7 @@ def set_plot_style(plt):
|
|
24
42
|
"ytick.major.width": 1.5,
|
25
43
|
"figure.figsize": (20, 7),
|
26
44
|
}
|
45
|
+
reset_plot_style(plt)
|
27
46
|
return plt.rcParams.update(style)
|
28
47
|
|
29
48
|
|
@@ -67,10 +86,10 @@ def build_title(title: str, path: str, params: list[str]) -> str:
|
|
67
86
|
dic = read_json(f"{path}/param_dict.json")
|
68
87
|
title += " with "
|
69
88
|
for idx, param in enumerate(params):
|
70
|
-
if not (param in
|
89
|
+
if not (param in PARAM_METADATA.keys()) or not (param in dic):
|
71
90
|
title += f"{param} = ? & "
|
72
91
|
continue
|
73
|
-
meta =
|
92
|
+
meta = PARAM_METADATA[param]
|
74
93
|
value = format_number(dic[param], 3, meta["unit"])
|
75
94
|
title += f"${meta['symbol']} =${value} & "
|
76
95
|
if idx % 2 == 0 and idx != 0:
|
@@ -105,3 +124,250 @@ def guess_plot_dimension(
|
|
105
124
|
return "1.5"
|
106
125
|
else:
|
107
126
|
return "1"
|
127
|
+
|
128
|
+
|
129
|
+
def finalize_plot(
|
130
|
+
fig,
|
131
|
+
title,
|
132
|
+
fit_res: FitResult = None,
|
133
|
+
qubit_params: ParamDict = {},
|
134
|
+
updated_params: dict = {},
|
135
|
+
sweep_info={},
|
136
|
+
relevant_params=[],
|
137
|
+
):
|
138
|
+
"""
|
139
|
+
Annotates a matplotlib figure with experiment parameters, fit quality, and title.
|
140
|
+
|
141
|
+
Parameters
|
142
|
+
----------
|
143
|
+
fig : matplotlib.figure.Figure
|
144
|
+
The figure object to annotate.
|
145
|
+
title : str
|
146
|
+
Title text to use for the plot.
|
147
|
+
fit_res : FitResult, optional
|
148
|
+
Fit result object containing model name and quality summary.
|
149
|
+
qubit_params : ParamDict, optional
|
150
|
+
Dictionary of experimental qubit parameters, indexed by parameter ID.
|
151
|
+
updated_params : dict, optional
|
152
|
+
Dictionary of updated parameters (e.g., from fitting), where keys are param IDs
|
153
|
+
and values are numeric or symbolic parameter values.
|
154
|
+
sweep_info : dict, optional
|
155
|
+
Information about sweep parameters (e.g., their IDs and labels).
|
156
|
+
relevant_params : list, optional
|
157
|
+
List of parameter IDs considered relevant for display under "Experiment".
|
158
|
+
"""
|
159
|
+
# Make a summary of relevant experimental parameters
|
160
|
+
exp_params_keys = get_relevant_exp_parameters(
|
161
|
+
qubit_params, relevant_params, [info.id for info in sweep_info]
|
162
|
+
)
|
163
|
+
params_str = ", ".join(
|
164
|
+
[qubit_params[id].symbol_and_value for id in exp_params_keys]
|
165
|
+
)
|
166
|
+
# Make a summary of the updated qubit parameters
|
167
|
+
updated_params_info = {k: ParamInfo(k, v) for k, v in updated_params.items()}
|
168
|
+
update_params_str = ", ".join(
|
169
|
+
[updated_params_info[id].symbol_and_value for id in updated_params_info.keys()]
|
170
|
+
)
|
171
|
+
|
172
|
+
# Find appropriate y_position to print text
|
173
|
+
bbox = fig.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
|
174
|
+
fig_height_inches = bbox.height
|
175
|
+
if fig_height_inches < 8:
|
176
|
+
y_pos = -0.05
|
177
|
+
elif fig_height_inches < 10:
|
178
|
+
y_pos = -0.03
|
179
|
+
elif fig_height_inches < 13:
|
180
|
+
y_pos = -0.02
|
181
|
+
else:
|
182
|
+
y_pos = -0.01
|
183
|
+
|
184
|
+
# Add text to the plot
|
185
|
+
fig.suptitle(f"{title}\n" + update_params_str)
|
186
|
+
if fit_res:
|
187
|
+
fig.text(0.02, y_pos, f"Model: {fit_res.model_name} - {fit_res.quality()}")
|
188
|
+
if params_str:
|
189
|
+
fig.text(0.4, y_pos, "Experiment: " + params_str, ha="left")
|
190
|
+
|
191
|
+
|
192
|
+
def plot_mag_phase(path=None, datadict=None, raw=False):
|
193
|
+
"""
|
194
|
+
Plot the magnitude and phase of complex measurement data from an db path or in-memory dictionary.
|
195
|
+
|
196
|
+
This function generates either a 1D or 2D plot of the magnitude and phase of complex data,
|
197
|
+
depending on the presence of sweep parameters. It supports normalization and background
|
198
|
+
subtraction.
|
199
|
+
|
200
|
+
Parameters
|
201
|
+
----------
|
202
|
+
path : str or None, optional
|
203
|
+
Path to the folder containing measurement data. Required if `datadict` is not provided.
|
204
|
+
datadict : dict or None, optional
|
205
|
+
Pre-loaded data dictionary with schema, typically extracted using `extract_h5_data`.
|
206
|
+
Required if `path` is not provided.
|
207
|
+
raw : bool, default False
|
208
|
+
If True, skip normalization and background subtraction for 2D plots. Useful for viewing raw data.
|
209
|
+
|
210
|
+
Returns
|
211
|
+
-------
|
212
|
+
fig : matplotlib.figure.Figure
|
213
|
+
The matplotlib Figure object containing the plot.
|
214
|
+
axs : matplotlib.axes.Axes or ndarray of Axes
|
215
|
+
The Axes object(s) used for the subplot(s).
|
216
|
+
|
217
|
+
Raises
|
218
|
+
------
|
219
|
+
Exception
|
220
|
+
If neither `path` nor `datadict` is provided.
|
221
|
+
|
222
|
+
Notes
|
223
|
+
-----
|
224
|
+
- Axes and units are automatically inferred from the schema in the dataset.
|
225
|
+
"""
|
226
|
+
|
227
|
+
all_data, all_info, _ = get_data_and_info(path=path, datadict=datadict)
|
228
|
+
x_data, y_data, sweeps = all_data
|
229
|
+
x_info, y_info, sweep_info = all_info
|
230
|
+
|
231
|
+
# Rescale data
|
232
|
+
x_data_scaled = x_data * x_info.scale
|
233
|
+
y_data_scaled = y_data * y_info.scale
|
234
|
+
y_unit = f" [{y_info.rescaled_unit}]" if y_info.unit else ""
|
235
|
+
|
236
|
+
set_plot_style(plt)
|
237
|
+
|
238
|
+
if len(sweeps) == 0: # 1D plot
|
239
|
+
fig, axs = plt.subplots(2, 1, figsize=(20, 12), sharex=True)
|
240
|
+
|
241
|
+
axs[0].plot(x_data_scaled, np.abs(y_data_scaled), "o")
|
242
|
+
axs[0].set_ylabel("Magnitude" + y_unit)
|
243
|
+
axs[0].tick_params(labelbottom=True)
|
244
|
+
axs[0].xaxis.set_tick_params(
|
245
|
+
which="both", labelbottom=True
|
246
|
+
) # Redundant for safety
|
247
|
+
|
248
|
+
axs[1].plot(x_data_scaled, np.unwrap(np.angle(y_data_scaled)), "o")
|
249
|
+
axs[1].set_xlabel(x_info.name_and_unit)
|
250
|
+
axs[1].set_ylabel("Phase [rad]")
|
251
|
+
else: # 2D plot
|
252
|
+
fig, axs = plt.subplots(1, 2, figsize=(24, 12), sharex=True, sharey=True)
|
253
|
+
|
254
|
+
# Process mag and phase
|
255
|
+
mag, phase = np.abs(y_data), np.unwrap(np.angle(y_data))
|
256
|
+
if not raw:
|
257
|
+
mag = soft_normalize(remove_offset(mag))
|
258
|
+
flat_phase = remove_linear_background(x_data, phase, points_cut=1)
|
259
|
+
phase = soft_normalize(flat_phase)
|
260
|
+
# Load sweep parameter
|
261
|
+
sweep0_info = sweep_info[0]
|
262
|
+
sweep0_scaled = sweeps[0] * sweep0_info.scale
|
263
|
+
|
264
|
+
c0 = axs[0].pcolormesh(
|
265
|
+
x_data_scaled,
|
266
|
+
sweep0_scaled,
|
267
|
+
mag,
|
268
|
+
shading="auto",
|
269
|
+
cmap="PuBu",
|
270
|
+
)
|
271
|
+
if raw:
|
272
|
+
fig.colorbar(c0, ax=axs[0])
|
273
|
+
axs[0].set_title("Magnitude" + y_unit)
|
274
|
+
else:
|
275
|
+
axs[0].set_title("Magnitude (normalized)")
|
276
|
+
axs[0].set_xlabel(x_info.name_and_unit)
|
277
|
+
axs[0].set_ylabel(sweep0_info.name_and_unit)
|
278
|
+
|
279
|
+
c1 = axs[1].pcolormesh(
|
280
|
+
x_data_scaled,
|
281
|
+
sweep0_scaled,
|
282
|
+
phase,
|
283
|
+
shading="auto",
|
284
|
+
cmap="PuBu",
|
285
|
+
)
|
286
|
+
if raw:
|
287
|
+
fig.colorbar(c1, ax=axs[1])
|
288
|
+
axs[1].set_title("Phase [rad]")
|
289
|
+
else:
|
290
|
+
axs[1].set_title("Phase (normalized)")
|
291
|
+
axs[1].set_xlabel(x_info.name_and_unit)
|
292
|
+
axs[1].tick_params(labelleft=True)
|
293
|
+
axs[1].xaxis.set_tick_params(
|
294
|
+
which="both", labelleft=True
|
295
|
+
) # Redundant for safety
|
296
|
+
|
297
|
+
fig.tight_layout()
|
298
|
+
return fig, axs
|
299
|
+
|
300
|
+
|
301
|
+
def plot_projection_IQ(path=None, datadict=None, proj_data=None, full_output=False):
|
302
|
+
"""
|
303
|
+
Plots the real projection of complex I/Q data versus the x-axis and the full IQ plane.
|
304
|
+
|
305
|
+
Parameters
|
306
|
+
----------
|
307
|
+
path : str, optional
|
308
|
+
Path to the HDF5 file containing the data. Required if `datadict` is not provided.
|
309
|
+
datadict : dict, optional
|
310
|
+
Pre-loaded data dictionary with schema, typically extracted using `extract_h5_data`.
|
311
|
+
Required if `path` is not provided.
|
312
|
+
proj_data : np.ndarray, optional
|
313
|
+
Precomputed projected data (real part of transformed complex values).
|
314
|
+
If not provided, it will be computed using `transform_data`.
|
315
|
+
full_output : bool, default False
|
316
|
+
Whether to return projected data and the inverse transformation function.
|
317
|
+
|
318
|
+
Returns
|
319
|
+
-------
|
320
|
+
res : tuple
|
321
|
+
If `full_output` is False:
|
322
|
+
(fig, [ax_proj, ax_iq])
|
323
|
+
If `full_output` is True:
|
324
|
+
(fig, [ax_proj, ax_iq], proj_data, inv)
|
325
|
+
- `fig`: matplotlib Figure object.
|
326
|
+
- `ax_proj`: Axis for projection vs x-axis.
|
327
|
+
- `ax_iq`: Axis for I/Q scatter plot.
|
328
|
+
- `proj_data`: The real projection of the complex I/Q data.
|
329
|
+
- `inv`: The inverse transformation function used during projection.
|
330
|
+
|
331
|
+
Notes
|
332
|
+
-----
|
333
|
+
This function supports only 1D datasets. If sweep dimensions are detected, no plot is created.
|
334
|
+
The projection is performed using a data transformation routine (e.g., PCA or rotation).
|
335
|
+
"""
|
336
|
+
|
337
|
+
all_data, all_info, _ = get_data_and_info(path=path, datadict=datadict)
|
338
|
+
x_data, y_data, sweeps = all_data
|
339
|
+
x_info, y_info, sweep_info = all_info
|
340
|
+
|
341
|
+
# Get y_unit
|
342
|
+
y_unit = f" [{y_info.rescaled_unit}]" if y_info.unit else ""
|
343
|
+
|
344
|
+
set_plot_style(plt)
|
345
|
+
|
346
|
+
if len(sweeps) == 0:
|
347
|
+
# Project data
|
348
|
+
if proj_data is None:
|
349
|
+
proj_data, inv = transform_data(y_data, inv_transform=True)
|
350
|
+
|
351
|
+
set_plot_style(plt)
|
352
|
+
fig = plt.figure(figsize=(20, 7), constrained_layout=True)
|
353
|
+
gs = GridSpec(nrows=1, ncols=10, figure=fig, wspace=0.2)
|
354
|
+
|
355
|
+
# Plot the projection
|
356
|
+
ax_proj = fig.add_subplot(gs[:, :6]) # 6/10 width
|
357
|
+
ax_proj.plot(x_data * x_info.scale, proj_data.real * y_info.scale, "o")
|
358
|
+
ax_proj.set_xlabel(x_info.name_and_unit)
|
359
|
+
ax_proj.set_ylabel("Projected" + y_unit)
|
360
|
+
|
361
|
+
# Plot IQ data
|
362
|
+
ax_iq = fig.add_subplot(gs[:, 6:]) # 4/10 width
|
363
|
+
ax_iq.scatter(0, 0, marker="+", color="black", s=150)
|
364
|
+
ax_iq.plot(y_data.real * y_info.scale, y_data.imag * y_info.scale, "o")
|
365
|
+
ax_iq.set_xlabel("In-Phase" + y_unit)
|
366
|
+
ax_iq.set_ylabel("Quadrature" + y_unit)
|
367
|
+
ax_iq.set_aspect(aspect="equal", adjustable="datalim")
|
368
|
+
|
369
|
+
if full_output:
|
370
|
+
res = (fig, [ax_proj, ax_iq], proj_data, inv)
|
371
|
+
else:
|
372
|
+
res = (fig, [ax_proj, ax_iq])
|
373
|
+
return res
|
sqil_core/utils/_read.py
CHANGED
@@ -1,14 +1,26 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import json
|
2
4
|
import os
|
5
|
+
import shutil
|
6
|
+
from typing import TYPE_CHECKING
|
3
7
|
|
4
8
|
import h5py
|
5
9
|
import numpy as np
|
10
|
+
import yaml
|
11
|
+
from laboneq import serializers
|
12
|
+
|
13
|
+
from sqil_core.utils._formatter import param_info_from_schema
|
14
|
+
|
15
|
+
from ._const import _EXP_UNIT_MAP, PARAM_METADATA
|
6
16
|
|
7
|
-
|
17
|
+
if TYPE_CHECKING:
|
18
|
+
from laboneq.dsl.quantum.qpu import QPU
|
8
19
|
|
9
20
|
|
21
|
+
# TODO: add tests for schema
|
10
22
|
def extract_h5_data(
|
11
|
-
path: str, keys: list[str] | None = None
|
23
|
+
path: str, keys: list[str] | None = None, schema=False
|
12
24
|
) -> dict | tuple[np.ndarray, ...]:
|
13
25
|
"""Extract data at the given keys from an HDF5 file. If no keys are
|
14
26
|
given (None) returns the data field of the object.
|
@@ -42,6 +54,11 @@ def extract_h5_data(
|
|
42
54
|
with h5py.File(path, "r") as h5file:
|
43
55
|
data = h5file["data"]
|
44
56
|
data_keys = data.keys()
|
57
|
+
|
58
|
+
db_schema = None
|
59
|
+
if schema:
|
60
|
+
db_schema = json.loads(data.attrs.get("__schema__"))
|
61
|
+
|
45
62
|
# Extract only the requested keys
|
46
63
|
if bool(keys) and (len(keys) > 0):
|
47
64
|
res = []
|
@@ -51,9 +68,13 @@ def extract_h5_data(
|
|
51
68
|
res.append([])
|
52
69
|
continue
|
53
70
|
res.append(np.array(data[key][:]))
|
54
|
-
|
71
|
+
if not schema and len(res) == 1:
|
72
|
+
return res[0]
|
73
|
+
return tuple(res) if not schema else (*tuple(res), db_schema)
|
55
74
|
# Extract the whole data dictionary
|
56
|
-
|
75
|
+
h5_dict = _h5_to_dict(data)
|
76
|
+
return h5_dict if not schema else {**h5_dict, "schema": db_schema}
|
77
|
+
#
|
57
78
|
|
58
79
|
|
59
80
|
def _h5_to_dict(obj) -> dict:
|
@@ -68,112 +89,174 @@ def _h5_to_dict(obj) -> dict:
|
|
68
89
|
return data_dict
|
69
90
|
|
70
91
|
|
71
|
-
def
|
72
|
-
"""
|
73
|
-
|
74
|
-
dictionary = json.load(f)
|
75
|
-
return dictionary
|
92
|
+
def map_data_dict(data_dict: dict):
|
93
|
+
"""
|
94
|
+
Maps experimental data to standardized arrays using a provided schema.
|
76
95
|
|
96
|
+
This function interprets the structure of a measurement data dictionary
|
97
|
+
(obtained using extract_h5_data) by extracting relevant data fields according
|
98
|
+
to roles specified in the database schema. It returns the x-axis values, y-axis data,
|
99
|
+
any additional sweep parameters, and a mapping of keys used for each role.
|
77
100
|
|
78
|
-
|
79
|
-
|
101
|
+
Parameters
|
102
|
+
----------
|
103
|
+
data_dict : dict
|
104
|
+
Dictionary containing measurement data and an associated 'schema' key
|
105
|
+
that defines the role of each field (e.g., "x-axis", "data", "axis").
|
80
106
|
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
107
|
+
Returns
|
108
|
+
-------
|
109
|
+
x_data : np.ndarray
|
110
|
+
Array containing the x-axis values.
|
111
|
+
y_data : np.ndarray
|
112
|
+
Array containing the y-axis (measured) data.
|
113
|
+
sweeps : list[np.ndarray]
|
114
|
+
List of additional swept parameter arrays (if any).
|
115
|
+
key_map : dict
|
116
|
+
Dictionary with keys `"x_data"`, `"y_data"`, and `"sweeps"` indicating
|
117
|
+
the corresponding keys used in the original `data_dict`.
|
118
|
+
|
119
|
+
Notes
|
120
|
+
-----
|
121
|
+
- If the schema is missing, the function prints a warning and returns empty arrays.
|
122
|
+
- Each item in the schema must be a dictionary with a `"role"` key.
|
123
|
+
|
124
|
+
Examples
|
125
|
+
--------
|
126
|
+
>>> x, y, sweeps, mapping = map_data_dict(experiment_data)
|
127
|
+
>>> print(f"x-axis data from key: {mapping['x_data']}")
|
88
128
|
"""
|
89
129
|
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
def __str__(self):
|
122
|
-
"""Return a JSON-formatted string of the object."""
|
123
|
-
return json.dumps(self.to_dict())
|
124
|
-
|
125
|
-
def __eq__(self, other):
|
126
|
-
if isinstance(other, ParamInfo):
|
127
|
-
return (self.id == other.id) & (self.value == other.value)
|
128
|
-
if isinstance(other, (int, float, complex, str)):
|
129
|
-
return self.value == other
|
130
|
-
return False
|
131
|
-
|
132
|
-
def __bool__(self):
|
133
|
-
return bool(self.id)
|
134
|
-
|
135
|
-
|
136
|
-
ParamDict = dict[str, ParamInfo | dict[str, ParamInfo]]
|
137
|
-
|
138
|
-
|
139
|
-
def _enrich_param_dict(param_dict: dict) -> ParamDict:
|
140
|
-
"""Add metadata to param_dict entries."""
|
141
|
-
res = {}
|
142
|
-
for key, value in param_dict.items():
|
143
|
-
if isinstance(value, dict):
|
144
|
-
# Recursive step for nested dictionaries
|
145
|
-
res[key] = _enrich_param_dict(value)
|
146
|
-
else:
|
147
|
-
res[key] = ParamInfo(key, value)
|
148
|
-
return res
|
149
|
-
|
150
|
-
|
151
|
-
def read_param_dict(path: str) -> ParamDict:
|
152
|
-
"""Read param_dict and include additional information for each entry.
|
130
|
+
schema = data_dict.get("schema", None)
|
131
|
+
if schema is None:
|
132
|
+
print(
|
133
|
+
"Cannot automatically read data: no database schema was provided by the experiment."
|
134
|
+
)
|
135
|
+
|
136
|
+
x_data, y_data, sweeps = np.array([]), np.array([]), []
|
137
|
+
key_map = {"x_data": "", "y_data": "", "sweeps": []}
|
138
|
+
|
139
|
+
for key, value in schema.items():
|
140
|
+
if type(value) is not dict:
|
141
|
+
continue
|
142
|
+
role = value.get("role", None)
|
143
|
+
if role == "data":
|
144
|
+
key_map["y_data"] = key
|
145
|
+
y_data = data_dict[key]
|
146
|
+
elif role == "x-axis":
|
147
|
+
key_map["x_data"] = key
|
148
|
+
x_data = data_dict[key]
|
149
|
+
elif role == "axis":
|
150
|
+
key_map["sweeps"].append(key)
|
151
|
+
sweeps.append(data_dict[key])
|
152
|
+
|
153
|
+
return x_data, y_data, sweeps, key_map
|
154
|
+
|
155
|
+
|
156
|
+
def extract_mapped_data(path: str):
|
157
|
+
"""
|
158
|
+
Loads measurement data from an HDF5 file and maps it into x_data, y_data and sweeps.
|
159
|
+
The map and the database schema on which it relies are also returned.
|
153
160
|
|
154
161
|
Parameters
|
155
162
|
----------
|
156
|
-
path : str
|
157
|
-
Path to the file
|
163
|
+
path : str or Path
|
164
|
+
Path to the HDF5 file containing experimental data and schema definitions.
|
158
165
|
|
159
166
|
Returns
|
160
167
|
-------
|
161
|
-
|
162
|
-
|
168
|
+
x_data : np.ndarray
|
169
|
+
Array of x-axis values extracted according to the schema.
|
170
|
+
y_data : np.ndarray
|
171
|
+
Array of measured data values (y-axis).
|
172
|
+
sweeps : list[np.ndarray]
|
173
|
+
List of arrays for any additional swept parameters defined in the schema.
|
174
|
+
datadict_map : dict
|
175
|
+
Mapping of keys used for `"x_data"`, `"y_data"`, and `"sweeps"` in the original file.
|
176
|
+
schema : dict
|
177
|
+
The schema used to interpret the data structure and field roles.
|
178
|
+
|
179
|
+
Notes
|
180
|
+
-----
|
181
|
+
- This function expects the file to contain a top-level "schema" key that defines the
|
182
|
+
role of each dataset (e.g., "data", "x-axis", "axis").
|
183
|
+
- Uses `extract_h5_data` and `map_data_dict` internally for loading and interpretation.
|
184
|
+
|
185
|
+
Examples
|
186
|
+
--------
|
187
|
+
>>> x, y, sweeps, datadict_map, schema = extract_mapped_data(path)
|
163
188
|
"""
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
189
|
+
|
190
|
+
datadict = extract_h5_data(path, schema=True)
|
191
|
+
schema = datadict.get("schema")
|
192
|
+
x_data, y_data, sweeps, datadict_map = map_data_dict(datadict)
|
193
|
+
return x_data, y_data, sweeps, datadict_map, schema
|
168
194
|
|
169
195
|
|
170
|
-
def
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
196
|
+
def get_data_and_info(path=None, datadict=None):
|
197
|
+
if path is None and datadict is None:
|
198
|
+
raise Exception("At least one of `path` and `datadict` must be specified.")
|
199
|
+
|
200
|
+
if path is not None:
|
201
|
+
datadict = extract_h5_data(path, schema=True)
|
202
|
+
|
203
|
+
# Get schema and map data
|
204
|
+
schema = datadict.get("schema")
|
205
|
+
x_data, y_data, sweeps, datadict_map = map_data_dict(datadict)
|
206
|
+
|
207
|
+
# Get metadata on x_data and y_data
|
208
|
+
x_info = param_info_from_schema(
|
209
|
+
datadict_map["x_data"], schema[datadict_map["x_data"]]
|
210
|
+
)
|
211
|
+
y_info = param_info_from_schema(
|
212
|
+
datadict_map["y_data"], schema[datadict_map["y_data"]]
|
213
|
+
)
|
214
|
+
|
215
|
+
sweep_info = []
|
216
|
+
for sweep_key in datadict_map["sweeps"]:
|
217
|
+
sweep_info.append(param_info_from_schema(sweep_key, schema[sweep_key]))
|
218
|
+
|
219
|
+
return (x_data, y_data, sweeps), (x_info, y_info, sweep_info), datadict
|
220
|
+
|
221
|
+
|
222
|
+
def read_json(path: str) -> dict:
|
223
|
+
"""Reads a json file and returns the data as a dictionary."""
|
224
|
+
with open(path) as f:
|
225
|
+
dictionary = json.load(f)
|
226
|
+
return dictionary
|
227
|
+
|
228
|
+
|
229
|
+
def read_yaml(path: str) -> dict:
|
230
|
+
with open(path) as stream:
|
231
|
+
try:
|
232
|
+
return yaml.safe_load(stream)
|
233
|
+
except yaml.YAMLError as exc:
|
234
|
+
print(exc)
|
235
|
+
|
236
|
+
|
237
|
+
def read_qpu(dir_path: str, filename: str) -> QPU:
|
238
|
+
"""Reads QPU file stored in dir_path/filename using laboneq serializers."""
|
239
|
+
qpu = serializers.load(os.path.join(dir_path, filename))
|
240
|
+
return qpu
|
176
241
|
|
177
242
|
|
178
243
|
def get_measurement_id(path):
|
179
244
|
return os.path.basename(path)[0:5]
|
245
|
+
|
246
|
+
|
247
|
+
def copy_folder(src: str, dst: str):
|
248
|
+
# Ensure destination exists
|
249
|
+
os.makedirs(dst, exist_ok=True)
|
250
|
+
|
251
|
+
# Copy files recursively
|
252
|
+
for root, dirs, files in os.walk(src):
|
253
|
+
for dir_name in dirs:
|
254
|
+
os.makedirs(
|
255
|
+
os.path.join(dst, os.path.relpath(os.path.join(root, dir_name), src)),
|
256
|
+
exist_ok=True,
|
257
|
+
)
|
258
|
+
for file_name in files:
|
259
|
+
shutil.copy2(
|
260
|
+
os.path.join(root, file_name),
|
261
|
+
os.path.join(dst, os.path.relpath(os.path.join(root, file_name), src)),
|
262
|
+
)
|