sqil-core 0.1.0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. sqil_core/__init__.py +1 -0
  2. sqil_core/config_log.py +42 -0
  3. sqil_core/experiment/__init__.py +11 -0
  4. sqil_core/experiment/_analysis.py +95 -0
  5. sqil_core/experiment/_events.py +25 -0
  6. sqil_core/experiment/_experiment.py +553 -0
  7. sqil_core/experiment/data/plottr.py +778 -0
  8. sqil_core/experiment/helpers/_function_override_handler.py +111 -0
  9. sqil_core/experiment/helpers/_labone_wrappers.py +12 -0
  10. sqil_core/experiment/instruments/__init__.py +2 -0
  11. sqil_core/experiment/instruments/_instrument.py +190 -0
  12. sqil_core/experiment/instruments/drivers/SignalCore_SC5511A.py +515 -0
  13. sqil_core/experiment/instruments/local_oscillator.py +205 -0
  14. sqil_core/experiment/instruments/server.py +175 -0
  15. sqil_core/experiment/instruments/setup.yaml +21 -0
  16. sqil_core/experiment/instruments/zurich_instruments.py +55 -0
  17. sqil_core/fit/__init__.py +22 -0
  18. sqil_core/fit/_core.py +179 -31
  19. sqil_core/fit/_fit.py +490 -81
  20. sqil_core/fit/_guess.py +232 -0
  21. sqil_core/fit/_models.py +32 -1
  22. sqil_core/fit/_quality.py +266 -0
  23. sqil_core/resonator/__init__.py +2 -0
  24. sqil_core/resonator/_resonator.py +256 -74
  25. sqil_core/utils/__init__.py +36 -13
  26. sqil_core/utils/_analysis.py +123 -0
  27. sqil_core/utils/_const.py +74 -18
  28. sqil_core/utils/_formatter.py +126 -55
  29. sqil_core/utils/_plot.py +272 -6
  30. sqil_core/utils/_read.py +178 -95
  31. sqil_core/utils/_utils.py +147 -0
  32. {sqil_core-0.1.0.dist-info → sqil_core-1.0.0.dist-info}/METADATA +9 -1
  33. sqil_core-1.0.0.dist-info/RECORD +36 -0
  34. {sqil_core-0.1.0.dist-info → sqil_core-1.0.0.dist-info}/WHEEL +1 -1
  35. sqil_core-0.1.0.dist-info/RECORD +0 -19
  36. {sqil_core-0.1.0.dist-info → sqil_core-1.0.0.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  import numpy as np
2
+ from scipy.signal import argrelextrema
2
3
 
3
4
 
4
5
  def remove_offset(data: np.ndarray, avg: int = 3) -> np.ndarray:
@@ -211,6 +212,90 @@ def line_between_2_points(
211
212
  return slope, intercept
212
213
 
213
214
 
215
+ def soft_normalize(data: np.ndarray) -> np.ndarray:
216
+ """
217
+ Apply soft normalization to a 1D or 2D array with optional NaNs.
218
+
219
+ This function performs z-score normalization followed by a smooth
220
+ non-linear compression using a hyperbolic tangent (tanh) function.
221
+ It is designed to reduce the effect of outliers while preserving
222
+ the dynamic range of typical data values. The result is rescaled to [0, 1].
223
+
224
+ For 2D arrays, normalization is done row-wise, but compression is
225
+ based on a global threshold across all non-NaN entries.
226
+
227
+ Parameters
228
+ ----------
229
+ data : np.ndarray
230
+ Input data, must be a 1D or 2D NumPy array. Can contain NaNs.
231
+
232
+ Returns
233
+ -------
234
+ np.ndarray
235
+ Normalized data, same shape as input, with values scaled to [0, 1].
236
+ NaNs are preserved.
237
+
238
+ Raises
239
+ ------
240
+ ValueError
241
+ If `data` is not 1D or 2D.
242
+
243
+ Notes
244
+ -----
245
+ - Z-score normalization is done using nanmean and nanstd.
246
+ - Outliers are compressed using a tanh centered at a scaled threshold.
247
+ - Output values are guaranteed to be in [0, 1] range, except NaNs.
248
+ - Rows with zero standard deviation are flattened to 0.5.
249
+ """
250
+
251
+ if data.ndim not in [1, 2]:
252
+ raise ValueError("Input must be 1D or 2D")
253
+
254
+ data = np.array(data, dtype=np.float64)
255
+ nan_mask = np.isnan(data)
256
+
257
+ if data.ndim == 1:
258
+ mean = np.nanmean(data)
259
+ std = np.nanstd(data)
260
+ std = 1.0 if std == 0 else std
261
+ abs_z = np.abs((data - mean) / std)
262
+ else:
263
+ mean = np.nanmean(data, axis=1, keepdims=True)
264
+ std = np.nanstd(data, axis=1, keepdims=True)
265
+ std = np.where(std == 0, 1.0, std)
266
+ abs_z = np.abs((data - mean) / std)
267
+
268
+ # Flatten over all values for global thresholding
269
+ flat_abs_z = abs_z[~nan_mask]
270
+ if flat_abs_z.size == 0:
271
+ return np.full_like(data, 0.5)
272
+
273
+ threshold = 4.0 * np.mean(flat_abs_z)
274
+ alpha = 1.0 / (4.0 * np.std(flat_abs_z)) if np.std(flat_abs_z) != 0 else 1.0
275
+
276
+ compressed = np.tanh(alpha * (abs_z - threshold))
277
+
278
+ # Rescale to [0, 1]
279
+ compressed[nan_mask] = np.nan
280
+ min_val = np.nanmin(compressed)
281
+ max_val = np.nanmax(compressed)
282
+ if max_val == min_val:
283
+ rescaled = np.full_like(compressed, 0.5)
284
+ else:
285
+ rescaled = (compressed - min_val) / (max_val - min_val)
286
+
287
+ rescaled[nan_mask] = np.nan
288
+ return rescaled
289
+
290
+
291
+ def find_closest_index(arr, target):
292
+ """
293
+ Find the index of the element in `arr` closest to the `target` value.
294
+ """
295
+
296
+ return np.abs(arr - target).argmin()
297
+
298
+
214
299
  def compute_snr_peaked(
215
300
  x_data: np.ndarray,
216
301
  y_data: np.ndarray,
@@ -290,3 +375,41 @@ def compute_snr_peaked(
290
375
  snr = signal / noise_std if noise_std > 0 else np.inf # Avoid division by zero
291
376
 
292
377
  return snr
378
+
379
+
380
+ def find_first_minima_idx(data):
381
+ """
382
+ Find the index of the first local minimum in a 1D array.
383
+
384
+ Parameters
385
+ ----------
386
+ data : array-like
387
+ 1D sequence of numerical values.
388
+
389
+ Returns
390
+ -------
391
+ int or None
392
+ Index of the first local minimum, or None if no local minimum is found.
393
+
394
+ Notes
395
+ -----
396
+ A local minimum is defined as a point that is smaller than its immediate neighbors.
397
+ Uses `scipy.signal.argrelextrema` to detect local minima.
398
+
399
+ Examples
400
+ --------
401
+ >>> data = [3, 2, 4, 1, 5]
402
+ >>> find_first_minima_idx(data)
403
+ 1
404
+ """
405
+ data = np.array(data)
406
+ minima_indices = argrelextrema(data, np.less)[0]
407
+
408
+ # Check boundaries for minima (optional)
409
+ if data.size < 2:
410
+ return None
411
+
412
+ if len(minima_indices) > 0:
413
+ return minima_indices[0]
414
+
415
+ return None
sqil_core/utils/_const.py CHANGED
@@ -4,7 +4,7 @@ _EXP_UNIT_MAP = {
4
4
  -15: "p",
5
5
  -12: "f",
6
6
  -9: "n",
7
- -6: r"\mu",
7
+ -6: r"\mu ",
8
8
  -3: "m",
9
9
  0: "",
10
10
  3: "k",
@@ -14,36 +14,92 @@ _EXP_UNIT_MAP = {
14
14
  15: "P",
15
15
  }
16
16
 
17
- _PARAM_METADATA = {
18
- "current": {"name": "Current", "symbol": "I", "unit": "A", "scale": 1e3},
19
- "ro_freq": {
17
+ PARAM_METADATA = {
18
+ "readout_resonator_frequency": {
20
19
  "name": "Readout frequency",
21
20
  "symbol": "f_{RO}",
22
21
  "unit": "Hz",
23
22
  "scale": 1e-9,
23
+ "precision": 5,
24
24
  },
25
- "ro_power": {
26
- "name": "Readout power",
27
- "symbol": "P_{RO}",
25
+ "readout_range_out": {
26
+ "name": "Readout power offset",
27
+ "symbol": "P_0^{RO}",
28
28
  "unit": "dBm",
29
29
  "scale": 1,
30
30
  },
31
- "qu_freq": {
32
- "name": "Qubit frequency",
33
- "symbol": "f_q",
31
+ "readout_amplitude": {
32
+ "name": "Readout amplitude",
33
+ "symbol": "A_{RO}",
34
+ "unit": "",
35
+ "scale": 1,
36
+ },
37
+ "readout_length": {
38
+ "name": "Readout length",
39
+ "symbol": "T_{RO}",
40
+ "unit": "s",
41
+ "scale": 1e6,
42
+ },
43
+ "readout_lo_frequency": {
44
+ "name": "Internal readout LO frequency",
45
+ "symbol": "f_{LO-int}^{RO}",
46
+ "unit": "Hz",
47
+ "scale": 1e-9,
48
+ },
49
+ "readout_external_lo_frequency": {
50
+ "name": "External LO frequency",
51
+ "symbol": "f_{LO}^{Ext}",
34
52
  "unit": "Hz",
35
53
  "scale": 1e-9,
36
54
  },
37
- "qu_power": {"name": "Qubit power", "symbol": "P_q", "unit": "dBm", "scale": 1},
38
- "vna_bw": {"name": "VNA bandwidth", "symbol": "BW_{VNA}", "unit": "Hz", "scale": 1},
39
- "vna_avg": {"name": "VNA averages", "symbol": "avg_{VNA}", "unit": "", "scale": 1},
40
- "index": {"name": "Index", "symbol": "idx", "unit": "", "scale": 1},
55
+ "readout_external_lo_power": {
56
+ "name": "External LO power",
57
+ "symbol": "P_{LO}^{Ext}",
58
+ "unit": "dBm",
59
+ "scale": 1,
60
+ },
61
+ "readout_kappa_tot": {"symbol": r"\kappa_{tot}", "unit": "Hz", "scale": "MHz"},
62
+ "resonance_frequency_ge": {
63
+ "name": "Readout frequency",
64
+ "symbol": "f_{ge}",
65
+ "unit": "Hz",
66
+ "scale": 1e-9,
67
+ "precision": 5,
68
+ },
69
+ "resonance_frequency_ef": {
70
+ "name": "Readout frequency",
71
+ "symbol": "f_{ef}",
72
+ "unit": "Hz",
73
+ "scale": 1e-9,
74
+ "precision": 5,
75
+ },
76
+ "spectroscopy_amplitude": {
77
+ "name": "Spectroscopy amplitude",
78
+ "symbol": "A_{sp}",
79
+ "unit": "",
80
+ "scale": 1,
81
+ },
82
+ "ge_drive_amplitude_pi": {
83
+ "name": "Drive amplitude pi ge",
84
+ "symbol": r"A_{\pi}^{ge}",
85
+ "unit": "",
86
+ "scale": 1,
87
+ },
88
+ "ge_drive_length": {
89
+ "name": "Drive length ge",
90
+ "symbol": r"T_{\pi}^{ge}",
91
+ "unit": "s",
92
+ "scale": 1e9,
93
+ },
41
94
  }
42
95
 
43
96
  ONE_TONE_PARAMS = np.array(
44
- ["current", "ro_power", "vna_bw", "vna_avg", "qu_power", "qu_freq"]
97
+ [
98
+ "readout_amplitude",
99
+ "readout_length",
100
+ "readout_external_lo_frequency",
101
+ "readout_external_lo_power",
102
+ ]
45
103
  )
46
104
 
47
- TWO_TONE_PARAMS = np.array(
48
- ["ro_freq", "ro_power", "current", "vna_bw", "vna_avg", "qu_power"]
49
- )
105
+ TWO_TONE_PARAMS = np.array(["spectroscopy_amplitude"])
@@ -1,10 +1,12 @@
1
+ import json
1
2
  from decimal import ROUND_DOWN, Decimal
2
3
 
4
+ import attrs
3
5
  import numpy as np
4
6
  from scipy.stats import norm
5
7
  from tabulate import tabulate
6
8
 
7
- from ._const import _EXP_UNIT_MAP, _PARAM_METADATA
9
+ from ._const import _EXP_UNIT_MAP, PARAM_METADATA
8
10
 
9
11
 
10
12
  def _cut_to_significant_digits(number, n):
@@ -88,13 +90,13 @@ def get_name_and_unit(param_id: str) -> str:
88
90
  str
89
91
  Name and [unit]
90
92
  """
91
- meta = _PARAM_METADATA[param_id]
93
+ meta = PARAM_METADATA[param_id]
92
94
  scale = meta["scale"] if "scale" in meta else 1
93
95
  exponent = -(int(f"{scale:.0e}".split("e")[1]) // 3) * 3
94
96
  return f"{meta['name']} [{_EXP_UNIT_MAP[exponent]}{meta['unit']}]"
95
97
 
96
98
 
97
- def print_fit_params(param_names, params, std_errs=None, perc_errs=None):
99
+ def format_fit_params(param_names, params, std_errs=None, perc_errs=None):
98
100
  matrix = [param_names, params]
99
101
 
100
102
  headers = ["Param", "Fitted value"]
@@ -111,58 +113,7 @@ def print_fit_params(param_names, params, std_errs=None, perc_errs=None):
111
113
  data = [matrix[:, i] for i in range(len(params))]
112
114
 
113
115
  table = tabulate(data, headers=headers, tablefmt="github")
114
- print(table + "\n")
115
-
116
-
117
- def print_fit_metrics(fit_quality, keys: list[str] | None = None):
118
- if keys is None:
119
- keys = fit_quality.keys() if fit_quality else []
120
-
121
- # Print fit quality parameters
122
- for key in keys:
123
- value = fit_quality[key]
124
- quality = ""
125
- # Evaluate reduced Chi-squared
126
- if key == "red_chi2":
127
- key = "reduced χ²"
128
- if value <= 0.5:
129
- quality = "GREAT (or overfitting)"
130
- elif (value > 0.9) and (value <= 1.1):
131
- quality = "GREAT"
132
- elif (value > 0.5) and (value <= 2):
133
- quality = "GOOD"
134
- elif (value > 2) and (value <= 5):
135
- quality = "MEDIUM"
136
- elif value > 5:
137
- quality = "BAD"
138
- # Evaluate R-squared
139
- elif key == "r2":
140
- # Skip if complex
141
- if isinstance(value, complex):
142
- continue
143
- key = "R²"
144
- if value < 0:
145
- quality = "BAD - a horizontal line would be better"
146
- elif value > 0.97:
147
- quality = "GREAT"
148
- elif value > 0.95:
149
- quality = "GOOD"
150
- elif value > 0.80:
151
- quality = "MEDIUM"
152
- else:
153
- quality = "BAD"
154
- # Normalized mean absolute error NMAE and
155
- # normalized root mean square error NRMSE
156
- elif (key == "nmae") or (key == "nrmse"):
157
- if value < 0.1:
158
- quality = "GREAT"
159
- elif value < 0.2:
160
- quality = "GOOD"
161
- else:
162
- quality = "BAD"
163
-
164
- # Print result
165
- print(f"{key}\t{value:.3e}\t{quality}")
116
+ return table + "\n"
166
117
 
167
118
 
168
119
  def _sigma_for_confidence(confidence_level: float) -> float:
@@ -186,3 +137,123 @@ def _sigma_for_confidence(confidence_level: float) -> float:
186
137
  sigma_multiplier = norm.ppf(1 - alpha / 2)
187
138
 
188
139
  return sigma_multiplier
140
+
141
+
142
+ class ParamInfo:
143
+ """Parameter information for items of param_dict
144
+
145
+ Attributes:
146
+ id (str): QPU key
147
+ value (any): the value of the parameter
148
+ name (str): full name of the parameter (e.g. Readout frequency)
149
+ symbol (str): symbol of the parameter in Latex notation (e.g. f_{RO})
150
+ unit (str): base unit of measurement (e.g. Hz)
151
+ scale (int): the scale that should be generally applied to raw data (e.g. 1e-9 to take raw Hz to GHz)
152
+ """
153
+
154
+ def __init__(self, id, value=None, metadata=None):
155
+ self.id = id
156
+ self.value = value
157
+
158
+ if metadata is not None:
159
+ meta = metadata
160
+ elif id in PARAM_METADATA:
161
+ meta = PARAM_METADATA[id]
162
+ else:
163
+ meta = {}
164
+
165
+ self.name = meta.get("name", None)
166
+ self.symbol = meta.get("symbol", id)
167
+ self.unit = meta.get("unit", "")
168
+ self.scale = meta.get("scale", 1)
169
+ self.precision = meta.get("precision", 3)
170
+
171
+ if self.name is None:
172
+ self.name = self.id[0].upper() + self.id[1:].replace("_", " ")
173
+
174
+ def to_dict(self):
175
+ """Convert ParamInfo to a dictionary."""
176
+ return {
177
+ "id": self.id,
178
+ "value": self.value,
179
+ "name": self.name,
180
+ "symbol": self.symbol,
181
+ "unit": self.unit,
182
+ "scale": self.scale,
183
+ "precision": self.precision,
184
+ }
185
+
186
+ @property
187
+ def name_and_unit(self):
188
+ return self.name + (
189
+ f" [{self.rescaled_unit}]" if self.unit or self.scale != 1 else ""
190
+ )
191
+
192
+ @property
193
+ def rescaled_unit(self):
194
+ # if self.unit == "":
195
+ # return self.unit
196
+ exponent = -(int(f"{self.scale:.0e}".split("e")[1]) // 3) * 3
197
+ unit = f"{_EXP_UNIT_MAP[exponent]}{self.unit}"
198
+ return unit
199
+
200
+ @property
201
+ def symbol_and_value(self, latex=True):
202
+ sym = f"${self.symbol}$" if latex else self.symbol
203
+ equal = f"$=$" if latex else " = "
204
+ val = format_number(self.value, self.precision, self.unit, latex=latex)
205
+ return f"{sym}{equal}{val}"
206
+
207
+ def __str__(self):
208
+ """Return a JSON-formatted string of the object."""
209
+ return json.dumps(self.to_dict())
210
+
211
+ def __eq__(self, other):
212
+ if isinstance(other, ParamInfo):
213
+ return (self.id == other.id) & (self.value == other.value)
214
+ if isinstance(other, (int, float, complex, str)):
215
+ return self.value == other
216
+ return False
217
+
218
+ def __bool__(self):
219
+ return bool(self.id)
220
+
221
+
222
+ ParamDict = dict[str, ParamInfo]
223
+
224
+
225
+ def param_info_from_schema(key, metadata) -> ParamInfo:
226
+ metadata_id = metadata.get("param_id")
227
+ if metadata_id is not None:
228
+ return ParamInfo(metadata_id)
229
+ return ParamInfo(key, metadata=metadata)
230
+
231
+
232
+ def enrich_qubit_params(qubit) -> ParamDict:
233
+ qubit_params = attrs.asdict(qubit.parameters)
234
+ res = {}
235
+ for key, value in qubit_params.items():
236
+ res[key] = ParamInfo(key, value)
237
+ return res
238
+
239
+
240
+ def get_relevant_exp_parameters(
241
+ qubit_params: ParamDict, exp_param_ids: list, sweep_ids: list, only_keys=True
242
+ ):
243
+ # Filter out sweeps
244
+ filtered = [id for id in exp_param_ids if id not in sweep_ids]
245
+
246
+ # Filter special cases
247
+ # No external LO frequency => external Lo info is irrelevant
248
+ if (["readout_external_lo_frequency"] in exp_param_ids) and (
249
+ not qubit_params.get("readout_external_lo_frequency").value
250
+ ):
251
+ parms_to_exclude = [
252
+ "readout_external_lo_frequency",
253
+ "readout_external_lo_power",
254
+ ]
255
+ filtered = [id for id in filtered if id not in parms_to_exclude]
256
+
257
+ result = {key: value for key, value in qubit_params.items() if key in filtered}
258
+
259
+ return list(result.keys()) if only_keys else result