sqil-core 0.0.2__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqil_core/__init__.py +6 -2
- sqil_core/config.py +13 -0
- sqil_core/config_log.py +42 -0
- sqil_core/experiment/__init__.py +11 -0
- sqil_core/experiment/_analysis.py +95 -0
- sqil_core/experiment/_events.py +25 -0
- sqil_core/experiment/_experiment.py +553 -0
- sqil_core/experiment/data/plottr.py +778 -0
- sqil_core/experiment/helpers/_function_override_handler.py +111 -0
- sqil_core/experiment/helpers/_labone_wrappers.py +12 -0
- sqil_core/experiment/instruments/__init__.py +2 -0
- sqil_core/experiment/instruments/_instrument.py +190 -0
- sqil_core/experiment/instruments/drivers/SignalCore_SC5511A.py +515 -0
- sqil_core/experiment/instruments/local_oscillator.py +205 -0
- sqil_core/experiment/instruments/server.py +175 -0
- sqil_core/experiment/instruments/setup.yaml +21 -0
- sqil_core/experiment/instruments/zurich_instruments.py +55 -0
- sqil_core/fit/__init__.py +38 -0
- sqil_core/fit/_core.py +1084 -0
- sqil_core/fit/_fit.py +1191 -0
- sqil_core/fit/_guess.py +232 -0
- sqil_core/fit/_models.py +127 -0
- sqil_core/fit/_quality.py +266 -0
- sqil_core/resonator/__init__.py +13 -0
- sqil_core/resonator/_resonator.py +989 -0
- sqil_core/utils/__init__.py +85 -5
- sqil_core/utils/_analysis.py +415 -0
- sqil_core/utils/_const.py +105 -0
- sqil_core/utils/_formatter.py +259 -0
- sqil_core/utils/_plot.py +373 -0
- sqil_core/utils/_read.py +262 -0
- sqil_core/utils/_utils.py +164 -0
- {sqil_core-0.0.2.dist-info → sqil_core-1.0.0.dist-info}/METADATA +40 -7
- sqil_core-1.0.0.dist-info/RECORD +36 -0
- {sqil_core-0.0.2.dist-info → sqil_core-1.0.0.dist-info}/WHEEL +1 -1
- {sqil_core-0.0.2.dist-info → sqil_core-1.0.0.dist-info}/entry_points.txt +1 -1
- sqil_core/utils/analysis.py +0 -68
- sqil_core/utils/const.py +0 -38
- sqil_core/utils/formatter.py +0 -134
- sqil_core/utils/read.py +0 -156
- sqil_core-0.0.2.dist-info/RECORD +0 -10
sqil_core/utils/_read.py
ADDED
@@ -0,0 +1,262 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import json
|
4
|
+
import os
|
5
|
+
import shutil
|
6
|
+
from typing import TYPE_CHECKING
|
7
|
+
|
8
|
+
import h5py
|
9
|
+
import numpy as np
|
10
|
+
import yaml
|
11
|
+
from laboneq import serializers
|
12
|
+
|
13
|
+
from sqil_core.utils._formatter import param_info_from_schema
|
14
|
+
|
15
|
+
from ._const import _EXP_UNIT_MAP, PARAM_METADATA
|
16
|
+
|
17
|
+
if TYPE_CHECKING:
|
18
|
+
from laboneq.dsl.quantum.qpu import QPU
|
19
|
+
|
20
|
+
|
21
|
+
# TODO: add tests for schema
|
22
|
+
def extract_h5_data(
|
23
|
+
path: str, keys: list[str] | None = None, schema=False
|
24
|
+
) -> dict | tuple[np.ndarray, ...]:
|
25
|
+
"""Extract data at the given keys from an HDF5 file. If no keys are
|
26
|
+
given (None) returns the data field of the object.
|
27
|
+
|
28
|
+
Parameters
|
29
|
+
----------
|
30
|
+
path : str
|
31
|
+
path to the HDF5 file or a folder in which is contained a data.ddh5 file
|
32
|
+
keys : None or List, optional
|
33
|
+
list of keys to extract from file['data'], by default None
|
34
|
+
|
35
|
+
Returns
|
36
|
+
-------
|
37
|
+
Dict or Tuple[np.ndarray, ...]
|
38
|
+
The full data dictionary if keys = None.
|
39
|
+
The tuple with the requested keys otherwise.
|
40
|
+
|
41
|
+
Example
|
42
|
+
-------
|
43
|
+
Extract the data object from the dataset:
|
44
|
+
>>> data = extract_h5_data(path)
|
45
|
+
Extracting only 'amp' and 'phase' from the dataset:
|
46
|
+
>>> amp, phase = extract_h5_data(path, ['amp', 'phase'])
|
47
|
+
Extracting only 'phase':
|
48
|
+
>>> phase, = extract_h5_data(path, ['phase'])
|
49
|
+
"""
|
50
|
+
# If the path is to a folder open /data.ddh5
|
51
|
+
if os.path.isdir(path):
|
52
|
+
path = os.path.join(path, "data.ddh5")
|
53
|
+
|
54
|
+
with h5py.File(path, "r") as h5file:
|
55
|
+
data = h5file["data"]
|
56
|
+
data_keys = data.keys()
|
57
|
+
|
58
|
+
db_schema = None
|
59
|
+
if schema:
|
60
|
+
db_schema = json.loads(data.attrs.get("__schema__"))
|
61
|
+
|
62
|
+
# Extract only the requested keys
|
63
|
+
if bool(keys) and (len(keys) > 0):
|
64
|
+
res = []
|
65
|
+
for key in keys:
|
66
|
+
key = str(key)
|
67
|
+
if (not bool(key)) | (key not in data_keys):
|
68
|
+
res.append([])
|
69
|
+
continue
|
70
|
+
res.append(np.array(data[key][:]))
|
71
|
+
if not schema and len(res) == 1:
|
72
|
+
return res[0]
|
73
|
+
return tuple(res) if not schema else (*tuple(res), db_schema)
|
74
|
+
# Extract the whole data dictionary
|
75
|
+
h5_dict = _h5_to_dict(data)
|
76
|
+
return h5_dict if not schema else {**h5_dict, "schema": db_schema}
|
77
|
+
#
|
78
|
+
|
79
|
+
|
80
|
+
def _h5_to_dict(obj) -> dict:
|
81
|
+
"""Convert h5 data into a dictionary"""
|
82
|
+
data_dict = {}
|
83
|
+
for key in obj.keys():
|
84
|
+
item = obj[key]
|
85
|
+
if isinstance(item, h5py.Dataset):
|
86
|
+
data_dict[key] = item[:]
|
87
|
+
elif isinstance(item, h5py.Group):
|
88
|
+
data_dict[key] = extract_h5_data(item)
|
89
|
+
return data_dict
|
90
|
+
|
91
|
+
|
92
|
+
def map_data_dict(data_dict: dict):
|
93
|
+
"""
|
94
|
+
Maps experimental data to standardized arrays using a provided schema.
|
95
|
+
|
96
|
+
This function interprets the structure of a measurement data dictionary
|
97
|
+
(obtained using extract_h5_data) by extracting relevant data fields according
|
98
|
+
to roles specified in the database schema. It returns the x-axis values, y-axis data,
|
99
|
+
any additional sweep parameters, and a mapping of keys used for each role.
|
100
|
+
|
101
|
+
Parameters
|
102
|
+
----------
|
103
|
+
data_dict : dict
|
104
|
+
Dictionary containing measurement data and an associated 'schema' key
|
105
|
+
that defines the role of each field (e.g., "x-axis", "data", "axis").
|
106
|
+
|
107
|
+
Returns
|
108
|
+
-------
|
109
|
+
x_data : np.ndarray
|
110
|
+
Array containing the x-axis values.
|
111
|
+
y_data : np.ndarray
|
112
|
+
Array containing the y-axis (measured) data.
|
113
|
+
sweeps : list[np.ndarray]
|
114
|
+
List of additional swept parameter arrays (if any).
|
115
|
+
key_map : dict
|
116
|
+
Dictionary with keys `"x_data"`, `"y_data"`, and `"sweeps"` indicating
|
117
|
+
the corresponding keys used in the original `data_dict`.
|
118
|
+
|
119
|
+
Notes
|
120
|
+
-----
|
121
|
+
- If the schema is missing, the function prints a warning and returns empty arrays.
|
122
|
+
- Each item in the schema must be a dictionary with a `"role"` key.
|
123
|
+
|
124
|
+
Examples
|
125
|
+
--------
|
126
|
+
>>> x, y, sweeps, mapping = map_data_dict(experiment_data)
|
127
|
+
>>> print(f"x-axis data from key: {mapping['x_data']}")
|
128
|
+
"""
|
129
|
+
|
130
|
+
schema = data_dict.get("schema", None)
|
131
|
+
if schema is None:
|
132
|
+
print(
|
133
|
+
"Cannot automatically read data: no database schema was provided by the experiment."
|
134
|
+
)
|
135
|
+
|
136
|
+
x_data, y_data, sweeps = np.array([]), np.array([]), []
|
137
|
+
key_map = {"x_data": "", "y_data": "", "sweeps": []}
|
138
|
+
|
139
|
+
for key, value in schema.items():
|
140
|
+
if type(value) is not dict:
|
141
|
+
continue
|
142
|
+
role = value.get("role", None)
|
143
|
+
if role == "data":
|
144
|
+
key_map["y_data"] = key
|
145
|
+
y_data = data_dict[key]
|
146
|
+
elif role == "x-axis":
|
147
|
+
key_map["x_data"] = key
|
148
|
+
x_data = data_dict[key]
|
149
|
+
elif role == "axis":
|
150
|
+
key_map["sweeps"].append(key)
|
151
|
+
sweeps.append(data_dict[key])
|
152
|
+
|
153
|
+
return x_data, y_data, sweeps, key_map
|
154
|
+
|
155
|
+
|
156
|
+
def extract_mapped_data(path: str):
|
157
|
+
"""
|
158
|
+
Loads measurement data from an HDF5 file and maps it into x_data, y_data and sweeps.
|
159
|
+
The map and the database schema on which it relies are also returned.
|
160
|
+
|
161
|
+
Parameters
|
162
|
+
----------
|
163
|
+
path : str or Path
|
164
|
+
Path to the HDF5 file containing experimental data and schema definitions.
|
165
|
+
|
166
|
+
Returns
|
167
|
+
-------
|
168
|
+
x_data : np.ndarray
|
169
|
+
Array of x-axis values extracted according to the schema.
|
170
|
+
y_data : np.ndarray
|
171
|
+
Array of measured data values (y-axis).
|
172
|
+
sweeps : list[np.ndarray]
|
173
|
+
List of arrays for any additional swept parameters defined in the schema.
|
174
|
+
datadict_map : dict
|
175
|
+
Mapping of keys used for `"x_data"`, `"y_data"`, and `"sweeps"` in the original file.
|
176
|
+
schema : dict
|
177
|
+
The schema used to interpret the data structure and field roles.
|
178
|
+
|
179
|
+
Notes
|
180
|
+
-----
|
181
|
+
- This function expects the file to contain a top-level "schema" key that defines the
|
182
|
+
role of each dataset (e.g., "data", "x-axis", "axis").
|
183
|
+
- Uses `extract_h5_data` and `map_data_dict` internally for loading and interpretation.
|
184
|
+
|
185
|
+
Examples
|
186
|
+
--------
|
187
|
+
>>> x, y, sweeps, datadict_map, schema = extract_mapped_data(path)
|
188
|
+
"""
|
189
|
+
|
190
|
+
datadict = extract_h5_data(path, schema=True)
|
191
|
+
schema = datadict.get("schema")
|
192
|
+
x_data, y_data, sweeps, datadict_map = map_data_dict(datadict)
|
193
|
+
return x_data, y_data, sweeps, datadict_map, schema
|
194
|
+
|
195
|
+
|
196
|
+
def get_data_and_info(path=None, datadict=None):
|
197
|
+
if path is None and datadict is None:
|
198
|
+
raise Exception("At least one of `path` and `datadict` must be specified.")
|
199
|
+
|
200
|
+
if path is not None:
|
201
|
+
datadict = extract_h5_data(path, schema=True)
|
202
|
+
|
203
|
+
# Get schema and map data
|
204
|
+
schema = datadict.get("schema")
|
205
|
+
x_data, y_data, sweeps, datadict_map = map_data_dict(datadict)
|
206
|
+
|
207
|
+
# Get metadata on x_data and y_data
|
208
|
+
x_info = param_info_from_schema(
|
209
|
+
datadict_map["x_data"], schema[datadict_map["x_data"]]
|
210
|
+
)
|
211
|
+
y_info = param_info_from_schema(
|
212
|
+
datadict_map["y_data"], schema[datadict_map["y_data"]]
|
213
|
+
)
|
214
|
+
|
215
|
+
sweep_info = []
|
216
|
+
for sweep_key in datadict_map["sweeps"]:
|
217
|
+
sweep_info.append(param_info_from_schema(sweep_key, schema[sweep_key]))
|
218
|
+
|
219
|
+
return (x_data, y_data, sweeps), (x_info, y_info, sweep_info), datadict
|
220
|
+
|
221
|
+
|
222
|
+
def read_json(path: str) -> dict:
|
223
|
+
"""Reads a json file and returns the data as a dictionary."""
|
224
|
+
with open(path) as f:
|
225
|
+
dictionary = json.load(f)
|
226
|
+
return dictionary
|
227
|
+
|
228
|
+
|
229
|
+
def read_yaml(path: str) -> dict:
|
230
|
+
with open(path) as stream:
|
231
|
+
try:
|
232
|
+
return yaml.safe_load(stream)
|
233
|
+
except yaml.YAMLError as exc:
|
234
|
+
print(exc)
|
235
|
+
|
236
|
+
|
237
|
+
def read_qpu(dir_path: str, filename: str) -> QPU:
|
238
|
+
"""Reads QPU file stored in dir_path/filename using laboneq serializers."""
|
239
|
+
qpu = serializers.load(os.path.join(dir_path, filename))
|
240
|
+
return qpu
|
241
|
+
|
242
|
+
|
243
|
+
def get_measurement_id(path):
|
244
|
+
return os.path.basename(path)[0:5]
|
245
|
+
|
246
|
+
|
247
|
+
def copy_folder(src: str, dst: str):
|
248
|
+
# Ensure destination exists
|
249
|
+
os.makedirs(dst, exist_ok=True)
|
250
|
+
|
251
|
+
# Copy files recursively
|
252
|
+
for root, dirs, files in os.walk(src):
|
253
|
+
for dir_name in dirs:
|
254
|
+
os.makedirs(
|
255
|
+
os.path.join(dst, os.path.relpath(os.path.join(root, dir_name), src)),
|
256
|
+
exist_ok=True,
|
257
|
+
)
|
258
|
+
for file_name in files:
|
259
|
+
shutil.copy2(
|
260
|
+
os.path.join(root, file_name),
|
261
|
+
os.path.join(dst, os.path.relpath(os.path.join(root, file_name), src)),
|
262
|
+
)
|
@@ -0,0 +1,164 @@
|
|
1
|
+
import hashlib
|
2
|
+
import importlib.util
|
3
|
+
import inspect
|
4
|
+
import sys
|
5
|
+
from collections.abc import Iterable
|
6
|
+
|
7
|
+
from sqil_core.config_log import logger
|
8
|
+
|
9
|
+
|
10
|
+
def fill_gaps(primary_list: list, fallback_list: list) -> list:
|
11
|
+
"""
|
12
|
+
Fills gaps in the primary list using values from the fallback list.
|
13
|
+
|
14
|
+
This function iterates through two lists, and for each pair of elements,
|
15
|
+
it fills in the gaps where the element in the primary list is `None`
|
16
|
+
with the corresponding element from the fallback list. If the element
|
17
|
+
in the primary list is not `None`, it is kept as-is.
|
18
|
+
|
19
|
+
Parameters
|
20
|
+
----------
|
21
|
+
primary_list : list
|
22
|
+
A list of values where some elements may be `None`, which will be replaced by values from `fallback_list`.
|
23
|
+
fallback_list : list
|
24
|
+
A list of values used to fill gaps in `primary_list`.
|
25
|
+
|
26
|
+
Returns
|
27
|
+
-------
|
28
|
+
result : list
|
29
|
+
A new list where `None` values in `primary_list` are replaced by corresponding values from `fallback_list`.
|
30
|
+
|
31
|
+
Examples
|
32
|
+
--------
|
33
|
+
>>> primary_list = [1, None, 3, None, 5]
|
34
|
+
>>> fallback_list = [10, 20, 30, 40, 50]
|
35
|
+
>>> fill_gaps(primary_list, fallback_list)
|
36
|
+
[1, 20, 3, 40, 5]
|
37
|
+
"""
|
38
|
+
if (fallback_list is None) or (len(fallback_list) == 0):
|
39
|
+
return primary_list
|
40
|
+
|
41
|
+
if primary_list is None:
|
42
|
+
return fallback_list
|
43
|
+
|
44
|
+
if len(primary_list) == 0:
|
45
|
+
primary_list = []
|
46
|
+
|
47
|
+
result = primary_list
|
48
|
+
fallback_list = fallback_list[0 : len(primary_list)]
|
49
|
+
for i in range(len(fallback_list)):
|
50
|
+
if result[i] == None:
|
51
|
+
result[i] = fallback_list[i]
|
52
|
+
|
53
|
+
return result
|
54
|
+
|
55
|
+
|
56
|
+
def make_iterable(obj) -> Iterable:
|
57
|
+
"""
|
58
|
+
Ensures that the given object is an iterable.
|
59
|
+
|
60
|
+
If the input object is already an iterable (excluding strings), it is returned as-is.
|
61
|
+
Otherwise, it is wrapped in a list to make it iterable.
|
62
|
+
|
63
|
+
Parameters
|
64
|
+
----------
|
65
|
+
obj : Any
|
66
|
+
The object to be converted into an iterable.
|
67
|
+
|
68
|
+
Returns
|
69
|
+
-------
|
70
|
+
iterable : Iterable
|
71
|
+
An iterable version of the input object. If the input is not already an iterable,
|
72
|
+
it is returned as a single-element list.
|
73
|
+
|
74
|
+
Examples
|
75
|
+
--------
|
76
|
+
>>> make_iterable(42)
|
77
|
+
[42]
|
78
|
+
|
79
|
+
>>> make_iterable([1, 2, 3])
|
80
|
+
[1, 2, 3]
|
81
|
+
|
82
|
+
>>> make_iterable("hello")
|
83
|
+
["hello"] # Strings are not treated as iterables in this function
|
84
|
+
"""
|
85
|
+
if isinstance(obj, str):
|
86
|
+
return [obj]
|
87
|
+
return obj if isinstance(obj, Iterable) else [obj]
|
88
|
+
|
89
|
+
|
90
|
+
def has_at_least_one(lst: list, value) -> bool:
|
91
|
+
"""
|
92
|
+
Checks whether a given value appears at least once in a list.
|
93
|
+
If the object passed is not iterable, it is converted to an interable,
|
94
|
+
e.g. if lst = 5, the function transform lst = [lst].
|
95
|
+
|
96
|
+
Parameters
|
97
|
+
----------
|
98
|
+
lst : list
|
99
|
+
The list to search.
|
100
|
+
value : Any
|
101
|
+
The value to look for in the list. If `None`, the function checks for the presence
|
102
|
+
of `None` using identity comparison.
|
103
|
+
|
104
|
+
Returns
|
105
|
+
-------
|
106
|
+
bool
|
107
|
+
True if the value appears at least once in the list; False otherwise.
|
108
|
+
"""
|
109
|
+
lst = make_iterable(lst)
|
110
|
+
|
111
|
+
if value is None:
|
112
|
+
return any(x is None for x in lst)
|
113
|
+
else:
|
114
|
+
return any(x == value for x in lst)
|
115
|
+
|
116
|
+
|
117
|
+
def _count_function_parameters(func):
|
118
|
+
sig = inspect.signature(func)
|
119
|
+
return len(
|
120
|
+
[
|
121
|
+
param
|
122
|
+
for param in sig.parameters.values()
|
123
|
+
if param.default == inspect.Parameter.empty
|
124
|
+
and param.kind
|
125
|
+
in (
|
126
|
+
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
127
|
+
inspect.Parameter.POSITIONAL_ONLY,
|
128
|
+
)
|
129
|
+
]
|
130
|
+
)
|
131
|
+
|
132
|
+
|
133
|
+
def _extract_variables_from_module(module_name, path):
|
134
|
+
try:
|
135
|
+
spec = importlib.util.spec_from_file_location(module_name, path)
|
136
|
+
module = importlib.util.module_from_spec(spec)
|
137
|
+
sys.modules[module_name] = module
|
138
|
+
spec.loader.exec_module(module)
|
139
|
+
|
140
|
+
# Get all variables and their values
|
141
|
+
variables = {
|
142
|
+
name: value
|
143
|
+
for name, value in vars(module).items()
|
144
|
+
if not name.startswith("__")
|
145
|
+
}
|
146
|
+
return variables
|
147
|
+
|
148
|
+
except Exception as e:
|
149
|
+
logger.error(f"Error while extracting variables from {path}: {str(e)}")
|
150
|
+
|
151
|
+
return {}
|
152
|
+
|
153
|
+
|
154
|
+
def _hash_file(path):
|
155
|
+
"""Generate a hash for the file using SHA256."""
|
156
|
+
sha256_hash = hashlib.sha256()
|
157
|
+
try:
|
158
|
+
with open(path, "rb") as file:
|
159
|
+
for byte_block in iter(lambda: file.read(4096), b""):
|
160
|
+
sha256_hash.update(byte_block)
|
161
|
+
except Exception as e:
|
162
|
+
logger.error(f"Unable to hash file '{path}': {str(e)}")
|
163
|
+
return None
|
164
|
+
return sha256_hash.hexdigest()
|
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.3
|
2
2
|
Name: sqil-core
|
3
|
-
Version: 0.0
|
3
|
+
Version: 1.0.0
|
4
4
|
Summary: The codebase of the SQIL group in EPFL
|
5
5
|
Author: Andrea Duina
|
6
6
|
Requires-Python: >=3.10,<4.0
|
@@ -9,12 +9,21 @@ Classifier: Programming Language :: Python :: 3.10
|
|
9
9
|
Classifier: Programming Language :: Python :: 3.11
|
10
10
|
Classifier: Programming Language :: Python :: 3.12
|
11
11
|
Classifier: Programming Language :: Python :: 3.13
|
12
|
+
Requires-Dist: blinker (>=1.9.0,<2.0.0)
|
12
13
|
Requires-Dist: h5py (>=3.12.1,<4.0.0)
|
13
14
|
Requires-Dist: isort (==5.9.3)
|
15
|
+
Requires-Dist: laboneq (>=2.54.0,<3.0.0)
|
16
|
+
Requires-Dist: laboneq-applications (>=2.4.0,<3.0.0)
|
17
|
+
Requires-Dist: lmfit (>=1.3.2,<2.0.0)
|
14
18
|
Requires-Dist: matplotlib (>=3.9.3,<4.0.0)
|
15
|
-
Requires-Dist:
|
19
|
+
Requires-Dist: mpld3 (>=0.5.10,<0.6.0)
|
20
|
+
Requires-Dist: numpy (>=2.2.3,<3.0.0)
|
21
|
+
Requires-Dist: plottr (>=0.14.0,<0.15.0)
|
22
|
+
Requires-Dist: pyro5 (>=5.15,<6.0)
|
23
|
+
Requires-Dist: qcodes (>=0.51.0,<0.52.0)
|
24
|
+
Requires-Dist: qcodes-contrib-drivers (>=0.23.0,<0.24.0)
|
16
25
|
Requires-Dist: scipy (>=1.14.1,<2.0.0)
|
17
|
-
Requires-Dist:
|
26
|
+
Requires-Dist: tabulate (>=0.9.0,<0.10.0)
|
18
27
|
Description-Content-Type: text/markdown
|
19
28
|
|
20
29
|
# For users
|
@@ -38,34 +47,44 @@ path = 'path to your data folder'
|
|
38
47
|
mag, phase, freq = sqil.extract_h5_data(path, ['mag_dB', 'phase', 'ro_freq'])
|
39
48
|
```
|
40
49
|
|
50
|
+
## Documentation
|
51
|
+
You can find the documentation for this package [here](https://sqil-epfl.github.io/sqil-core/)
|
52
|
+
|
41
53
|
# For developers
|
42
54
|
|
43
55
|
## Development
|
44
56
|
|
45
57
|
1. **Install poetry if you haven't already**
|
58
|
+
|
46
59
|
```bash
|
47
60
|
$ pip install poetry
|
61
|
+
$ pip install poetry-plugin-shell
|
48
62
|
```
|
49
63
|
|
50
64
|
2. **Install the required packages using poetry**
|
65
|
+
|
51
66
|
```bash
|
52
67
|
$ poetry install
|
53
68
|
```
|
54
69
|
|
55
70
|
3. **Install the pre-commit hooks**
|
56
|
-
If you are on windows you need to install git ([https://git-scm.com/downloads](here)) and add it to your windows PATH.
|
57
|
-
After the installation open a new terminal.
|
71
|
+
If you are on windows you need to install git ([https://git-scm.com/downloads](here)) and add it to your windows PATH.
|
72
|
+
After the installation open a new terminal.
|
73
|
+
|
58
74
|
```bash
|
59
75
|
$ poetry run pre-commit install
|
60
76
|
```
|
77
|
+
|
61
78
|
This will check if your python files are formatted correctly when you try to commit.
|
62
79
|
If that's not the case the commit will be canceled and the files will be automatically formatted.
|
63
80
|
Then you'll have to add and commit again the new files.
|
64
81
|
|
65
82
|
4. **Start the virtual environment**
|
83
|
+
|
66
84
|
```bash
|
67
85
|
$ poetry shell
|
68
86
|
```
|
87
|
+
|
69
88
|
To exit the virtual environment just use `exit`
|
70
89
|
|
71
90
|
#### Test your changes
|
@@ -91,7 +110,11 @@ $ poetry run build
|
|
91
110
|
$ pip install PATH_TO_SQIL_CORE_FOLDER/dist/SQIL_CORE-VERSION.whl
|
92
111
|
```
|
93
112
|
|
94
|
-
If you're
|
113
|
+
If you're testing a new function remember to import it in the folder's `__init__.py` file.
|
114
|
+
|
115
|
+
|
116
|
+
If you're using a jupyter notebook remember to restart the kernel.
|
117
|
+
|
95
118
|
|
96
119
|
## Build
|
97
120
|
|
@@ -99,6 +122,16 @@ If you're using a jupyter notebook remember to restart the kernel
|
|
99
122
|
$ poetry run build
|
100
123
|
```
|
101
124
|
|
125
|
+
## Publish
|
126
|
+
|
127
|
+
To publish version X.X.X run the commands below. This will trigger a GitHub action that deploys to release to PyPi (pip) and GitHub.
|
128
|
+
Remember also to change the version number in the `pyproject.toml` file.
|
129
|
+
|
130
|
+
```bash
|
131
|
+
$ git tag vX.X.X
|
132
|
+
$ git push origin vX.X.X
|
133
|
+
```
|
134
|
+
|
102
135
|
## Docs
|
103
136
|
|
104
137
|
Serve docs
|
@@ -0,0 +1,36 @@
|
|
1
|
+
sqil_core/__init__.py,sha256=Bm5X6xWur6bzw9PGyrHBkBUYSkmrA32ufnrQIbuMDxE,236
|
2
|
+
sqil_core/config.py,sha256=x7nNdIGJh_2jU6_WuyZ_VjjwkcvibAK-Rc-k0yePUFA,166
|
3
|
+
sqil_core/config_log.py,sha256=sSj1HBembBYsos6ilf0V5ll_FBZCNNr9K8CYOS8xxWg,1221
|
4
|
+
sqil_core/experiment/__init__.py,sha256=kIkwb1Yr-UYIM0XfD6IsQTI72nPD33JDHHSekXzlF68,389
|
5
|
+
sqil_core/experiment/_analysis.py,sha256=3v4FmCaVkJobGWUSU--lcXvtk6Ex6GstG9IcP3L4BnE,3114
|
6
|
+
sqil_core/experiment/_events.py,sha256=inPcGJu0iGqJKIeDRqDkSmwpgLb9ABzGF8U_ExqfoeY,693
|
7
|
+
sqil_core/experiment/_experiment.py,sha256=FtFGvN5eAHQhfQm5X3dATXpUvCb6hVgsLObBUPVVtyQ,19823
|
8
|
+
sqil_core/experiment/data/plottr.py,sha256=-hNvzM6u8hm6-5AjcQZMN85fZVP2E1SkrZEevJHoPi0,26481
|
9
|
+
sqil_core/experiment/helpers/_function_override_handler.py,sha256=ldynTPQKifeCbx9OiCqs_KdH9MPYcy0aoToA16NFcCY,3463
|
10
|
+
sqil_core/experiment/helpers/_labone_wrappers.py,sha256=YmmEoNFeQ53xsdvDM7KUluVqpoIimv-PS0QoZ85m6Cs,357
|
11
|
+
sqil_core/experiment/instruments/__init__.py,sha256=V88fgDuu4J6swreuAKeGrc6_P0XZehlKnOVhw71weBk,82
|
12
|
+
sqil_core/experiment/instruments/_instrument.py,sha256=WRYeqgVqfvcanpBBzrY8gx2XrGfntstMAiZidKAvvFE,6025
|
13
|
+
sqil_core/experiment/instruments/drivers/SignalCore_SC5511A.py,sha256=dTH2mbFrTGKPnFL8Z2Kt-V7SOkHgYuOqqY1nyqftRlI,18257
|
14
|
+
sqil_core/experiment/instruments/local_oscillator.py,sha256=07E7UVo4ZEV0ZyjCkmrbGJicV43TMtxsdvi4l-w4Dzg,6214
|
15
|
+
sqil_core/experiment/instruments/server.py,sha256=veNRc0iRQav4EayHefBnwo9UYJj8lhIFD0L_PI1dwtM,6098
|
16
|
+
sqil_core/experiment/instruments/setup.yaml,sha256=B1x3nzUuJZOKRcGY7RHOIoZeVxz2wnMOZY6mNGDAuV8,399
|
17
|
+
sqil_core/experiment/instruments/zurich_instruments.py,sha256=g3QLZVBW2XP_GFFWlmgXmiSzelvoT8GKe4FzH7xW1zY,1880
|
18
|
+
sqil_core/fit/__init__.py,sha256=xDLMBmWW29eZqfwBQHS5cLgIxJNPC5sCvS4CggiQhj0,817
|
19
|
+
sqil_core/fit/_core.py,sha256=wL1XnM2LoYGlnlz1zRrtMKoVZC6aKE1Xbv6y8kBq1BY,41076
|
20
|
+
sqil_core/fit/_fit.py,sha256=vb4DnjnvoVXQtF1WI_WHxfgdIw5Eqw4HV24DqKSJv6M,39297
|
21
|
+
sqil_core/fit/_guess.py,sha256=EE8W65E3ljePNZ5-CmHyFUNUkhQwmF50zl-CALut2-Q,6961
|
22
|
+
sqil_core/fit/_models.py,sha256=H3eAz3JyESprF_m7rY4_PBqCGt4sA94SxWcTrC9QZog,4131
|
23
|
+
sqil_core/fit/_quality.py,sha256=e4IZvVk2hOvIaPuFaUobvgyT2zA5eji9KiHyoQetpc8,8772
|
24
|
+
sqil_core/resonator/__init__.py,sha256=d8MYTwjEUetlw1d4MRcHwHQPYmiwxhn40fFhctWSNaY,254
|
25
|
+
sqil_core/resonator/_resonator.py,sha256=qClDQFDHzg38oModL7ZxpsEUDpijxeDHyr_hv4b0t2g,35558
|
26
|
+
sqil_core/utils/__init__.py,sha256=0y-yPGMOpU4En9xVfCWLTqfKt3ERpTgZknZAMbKnQqY,1950
|
27
|
+
sqil_core/utils/_analysis.py,sha256=qDe914FQxJYneiRliMP1IgVoGDAFfddZ9yXUascVZjU,13435
|
28
|
+
sqil_core/utils/_const.py,sha256=hHTAReTvm3UOIb2kj-oUQn4RKodjSXUnwB0V5cnDB0E,2406
|
29
|
+
sqil_core/utils/_formatter.py,sha256=C_inxxGJ3xEbrWKjGy0xVNzjjNbDMQSCSkWuRMA1noQ,7907
|
30
|
+
sqil_core/utils/_plot.py,sha256=u9OE9I3-xx3KsfMba3hZiIhneECiRBFxpe9HCi-1mU0,12813
|
31
|
+
sqil_core/utils/_read.py,sha256=9TTYQDz0nm81PkvCQBjETFHwpJGBtK5HprjhzxVY2ps,8432
|
32
|
+
sqil_core/utils/_utils.py,sha256=GU0eLO73Lil8yFj9aBWv_Q_dCZZHepSW4emhFOcP0Yc,4652
|
33
|
+
sqil_core-1.0.0.dist-info/METADATA,sha256=zshSJ-TMAfgx6zpvecwx7OMvTNTJc3lFo5BAW5hgg5U,3347
|
34
|
+
sqil_core-1.0.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
35
|
+
sqil_core-1.0.0.dist-info/entry_points.txt,sha256=mnYKe0NKDcT5Py_8cL44TgcbLVOUS-CxmGBMEcsbGAQ,95
|
36
|
+
sqil_core-1.0.0.dist-info/RECORD,,
|
sqil_core/utils/analysis.py
DELETED
@@ -1,68 +0,0 @@
|
|
1
|
-
import numpy as np
|
2
|
-
|
3
|
-
|
4
|
-
def remove_offset(data: np.ndarray, avg: int = 3) -> np.ndarray:
|
5
|
-
"""Removes the initial offset from a data matrix or vector by subtracting
|
6
|
-
the average of the first `avg` points. After applying this function,
|
7
|
-
the first point of each column of the data will be shifted to (about) 0.
|
8
|
-
|
9
|
-
Parameters
|
10
|
-
----------
|
11
|
-
data : np.ndarray
|
12
|
-
Input data, either a 1D vector or a 2D matrix
|
13
|
-
avg : int, optional
|
14
|
-
The number of initial points to average when calculating
|
15
|
-
the offset, by default 3
|
16
|
-
|
17
|
-
Returns
|
18
|
-
-------
|
19
|
-
np.ndarray
|
20
|
-
The input data with the offset removed
|
21
|
-
"""
|
22
|
-
is1D = len(data.shape) == 1
|
23
|
-
if is1D:
|
24
|
-
return data - np.mean(data[0:avg])
|
25
|
-
return data - np.mean(data[:, 0:avg], axis=1).reshape(data.shape[0], 1)
|
26
|
-
|
27
|
-
|
28
|
-
def estimate_linear_background(x: np.ndarray, data: np.ndarray, points_cut=0.1) -> list:
|
29
|
-
is1D = len(data.shape) == 1
|
30
|
-
points = data.shape[0] if is1D else data.shape[1]
|
31
|
-
cut = int(points * points_cut)
|
32
|
-
|
33
|
-
# Consider just the cut points
|
34
|
-
x_data = x[0:cut] if is1D else x[0:cut, :]
|
35
|
-
X = np.vstack([np.ones_like(x_data), x_data]).T
|
36
|
-
y_data = data[0:cut] if is1D else data[0:cut, :]
|
37
|
-
|
38
|
-
# Linear fit
|
39
|
-
coefficients, residuals, _, _ = np.linalg.lstsq(
|
40
|
-
X, y_data if is1D else y_data.T, rcond=None
|
41
|
-
)
|
42
|
-
|
43
|
-
return coefficients
|
44
|
-
|
45
|
-
|
46
|
-
def remove_linear_background(
|
47
|
-
x: np.ndarray, data: np.ndarray, points_cut=0.1
|
48
|
-
) -> np.ndarray:
|
49
|
-
"""Removes a linear background from the input data (e.g. the phase background
|
50
|
-
of a spectroscopy).
|
51
|
-
|
52
|
-
|
53
|
-
Parameters
|
54
|
-
----------
|
55
|
-
data : np.ndarray
|
56
|
-
Input data. Can be a 1D vector or a 2D matrix.
|
57
|
-
|
58
|
-
Returns
|
59
|
-
-------
|
60
|
-
np.ndarray
|
61
|
-
The input data with the linear background removed. The shape of the
|
62
|
-
returned array matches the input `data`.
|
63
|
-
"""
|
64
|
-
coefficients = estimate_linear_background(x, data, points_cut)
|
65
|
-
|
66
|
-
# Remove background over the whole array
|
67
|
-
X = np.vstack([np.ones_like(x), x]).T
|
68
|
-
return data - (X @ coefficients).T
|
sqil_core/utils/const.py
DELETED
@@ -1,38 +0,0 @@
|
|
1
|
-
EXP_UNIT_MAP = {
|
2
|
-
-15: "p",
|
3
|
-
-12: "f",
|
4
|
-
-9: "n",
|
5
|
-
-6: "\mu",
|
6
|
-
-3: "m",
|
7
|
-
0: "",
|
8
|
-
3: "k",
|
9
|
-
6: "M",
|
10
|
-
9: "G",
|
11
|
-
12: "T",
|
12
|
-
15: "P",
|
13
|
-
}
|
14
|
-
|
15
|
-
PARAM_METADATA = {
|
16
|
-
"current": {"name": "Current", "symbol": "I", "unit": "A", "scale": 1e3},
|
17
|
-
"ro_freq": {
|
18
|
-
"name": "Readout frequency",
|
19
|
-
"symbol": "f_{RO}",
|
20
|
-
"unit": "Hz",
|
21
|
-
"scale": 1e-9,
|
22
|
-
},
|
23
|
-
"ro_power": {
|
24
|
-
"name": "Readout power",
|
25
|
-
"symbol": "P_{RO}",
|
26
|
-
"unit": "dBm",
|
27
|
-
"scale": 1,
|
28
|
-
},
|
29
|
-
"qu_freq": {
|
30
|
-
"name": "Qubit frequency",
|
31
|
-
"symbol": "f_q",
|
32
|
-
"unit": "Hz",
|
33
|
-
"scale": 1e-9,
|
34
|
-
},
|
35
|
-
"qu_power": {"name": "Qubit power", "symbol": "P_q", "unit": "dBm", "scale": 1},
|
36
|
-
"vna_bw": {"name": "VNA bandwidth", "symbol": "BW_{VNA}", "unit": "Hz", "scale": 1},
|
37
|
-
"vna_avg": {"name": "VNA averages", "symbol": "avg_{VNA}", "unit": "", "scale": 1},
|
38
|
-
}
|