pymagnetos 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pymagnetos/__init__.py +15 -0
- pymagnetos/cli.py +40 -0
- pymagnetos/core/__init__.py +19 -0
- pymagnetos/core/_config.py +340 -0
- pymagnetos/core/_data.py +132 -0
- pymagnetos/core/_processor.py +905 -0
- pymagnetos/core/config_models.py +57 -0
- pymagnetos/core/gui/__init__.py +6 -0
- pymagnetos/core/gui/_base_mainwindow.py +819 -0
- pymagnetos/core/gui/widgets/__init__.py +19 -0
- pymagnetos/core/gui/widgets/_batch_processing.py +319 -0
- pymagnetos/core/gui/widgets/_configuration.py +167 -0
- pymagnetos/core/gui/widgets/_files.py +129 -0
- pymagnetos/core/gui/widgets/_graphs.py +93 -0
- pymagnetos/core/gui/widgets/_param_content.py +20 -0
- pymagnetos/core/gui/widgets/_popup_progressbar.py +29 -0
- pymagnetos/core/gui/widgets/_text_logger.py +32 -0
- pymagnetos/core/signal_processing.py +1004 -0
- pymagnetos/core/utils.py +85 -0
- pymagnetos/log.py +126 -0
- pymagnetos/py.typed +0 -0
- pymagnetos/pytdo/__init__.py +6 -0
- pymagnetos/pytdo/_config.py +24 -0
- pymagnetos/pytdo/_config_models.py +59 -0
- pymagnetos/pytdo/_tdoprocessor.py +1052 -0
- pymagnetos/pytdo/assets/config_default.toml +84 -0
- pymagnetos/pytdo/gui/__init__.py +26 -0
- pymagnetos/pytdo/gui/_worker.py +106 -0
- pymagnetos/pytdo/gui/main.py +617 -0
- pymagnetos/pytdo/gui/widgets/__init__.py +8 -0
- pymagnetos/pytdo/gui/widgets/_buttons.py +66 -0
- pymagnetos/pytdo/gui/widgets/_configuration.py +78 -0
- pymagnetos/pytdo/gui/widgets/_graphs.py +280 -0
- pymagnetos/pytdo/gui/widgets/_param_content.py +137 -0
- pymagnetos/pyuson/__init__.py +7 -0
- pymagnetos/pyuson/_config.py +26 -0
- pymagnetos/pyuson/_config_models.py +71 -0
- pymagnetos/pyuson/_echoprocessor.py +1901 -0
- pymagnetos/pyuson/assets/config_default.toml +92 -0
- pymagnetos/pyuson/gui/__init__.py +26 -0
- pymagnetos/pyuson/gui/_worker.py +135 -0
- pymagnetos/pyuson/gui/main.py +767 -0
- pymagnetos/pyuson/gui/widgets/__init__.py +7 -0
- pymagnetos/pyuson/gui/widgets/_buttons.py +95 -0
- pymagnetos/pyuson/gui/widgets/_configuration.py +85 -0
- pymagnetos/pyuson/gui/widgets/_graphs.py +248 -0
- pymagnetos/pyuson/gui/widgets/_param_content.py +193 -0
- pymagnetos-0.1.0.dist-info/METADATA +23 -0
- pymagnetos-0.1.0.dist-info/RECORD +51 -0
- pymagnetos-0.1.0.dist-info/WHEEL +4 -0
- pymagnetos-0.1.0.dist-info/entry_points.txt +7 -0
|
@@ -0,0 +1,1052 @@
|
|
|
1
|
+
"""TDO Processor class to analyse TDO experiments."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Literal, Self
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
from scipy import signal
|
|
10
|
+
|
|
11
|
+
from ..core import BaseProcessor, sp
|
|
12
|
+
from ..log import configure_logger
|
|
13
|
+
from ._config import TDOConfig
|
|
14
|
+
|
|
15
|
+
MEAS_TO_CROP = (
|
|
16
|
+
"pickup_time",
|
|
17
|
+
"pickup",
|
|
18
|
+
"magfield_time",
|
|
19
|
+
"magfield",
|
|
20
|
+
"signal_time",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
TDO_SIGNAL_NAME = "tdo_signal" # name of the TDO signal (the frequencies barycenters)
|
|
24
|
+
CSV_TDO_NCOLS = 3
|
|
25
|
+
CSV_RES_NCOLS = 11
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TDOProcessor(BaseProcessor):
|
|
31
|
+
"""A Processor class for TDO experiments."""
|
|
32
|
+
|
|
33
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
34
|
+
configure_logger(logger, "pytdo.log")
|
|
35
|
+
|
|
36
|
+
self._config_cls = TDOConfig
|
|
37
|
+
|
|
38
|
+
super().__init__(*args, **kwargs)
|
|
39
|
+
|
|
40
|
+
self.metadata = dict()
|
|
41
|
+
|
|
42
|
+
self._meas_name = [*self.cfg.measurements.keys()][0]
|
|
43
|
+
self._tdo_name = TDO_SIGNAL_NAME
|
|
44
|
+
self._tdo_det_inc_name = self._tdo_name + "_detrend_inc"
|
|
45
|
+
self._tdo_det_dec_name = self._tdo_name + "_detrend_dec"
|
|
46
|
+
self._tdo_inv_inc_name = self._tdo_det_inc_name.replace("detrend", "inverse")
|
|
47
|
+
self._tdo_inv_dec_name = self._tdo_det_dec_name.replace("detrend", "inverse")
|
|
48
|
+
|
|
49
|
+
self.npoints_raw = 0
|
|
50
|
+
|
|
51
|
+
self.is_cropped = {meas_name: False for meas_name in MEAS_TO_CROP}
|
|
52
|
+
self.is_cropped[self._meas_name] = False
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def expid(self) -> str:
|
|
56
|
+
"""Experiment ID, linked to the Config object."""
|
|
57
|
+
return self.cfg.expid
|
|
58
|
+
|
|
59
|
+
@expid.setter
|
|
60
|
+
def expid(self, value: str):
|
|
61
|
+
"""Setter for `expid`."""
|
|
62
|
+
self.cfg.expid = value
|
|
63
|
+
self.cfg.build_filenames()
|
|
64
|
+
self._reinit()
|
|
65
|
+
logger.info(f"Experiment ID set to {self.expid}.")
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def data_directory(self) -> Path:
|
|
69
|
+
"""Data directory, linked to the Config object."""
|
|
70
|
+
return self.cfg.data_directory
|
|
71
|
+
|
|
72
|
+
@data_directory.setter
|
|
73
|
+
def data_directory(self, value: str | Path):
|
|
74
|
+
"""Setter for `data_directory`."""
|
|
75
|
+
self.cfg.data_directory = Path(value)
|
|
76
|
+
self.cfg.build_filenames()
|
|
77
|
+
self._reinit()
|
|
78
|
+
logger.info(f"Data directory set to {self.data_directory}.")
|
|
79
|
+
|
|
80
|
+
def load_pickup(self) -> Self:
|
|
81
|
+
"""
|
|
82
|
+
Load the pickup binary file with the settings from the configuration.
|
|
83
|
+
|
|
84
|
+
The experiment temperature and the sampling rate is also read from the file and
|
|
85
|
+
added to the metadata.
|
|
86
|
+
"""
|
|
87
|
+
# Get reader settings
|
|
88
|
+
filename = Path(self.cfg.filenames["pickup"])
|
|
89
|
+
precision = self.cfg.files["pickup"].precision
|
|
90
|
+
endian = self.cfg.files["pickup"].endian
|
|
91
|
+
order = self.cfg.files["pickup"].order
|
|
92
|
+
npickups = self.cfg.parameters.pickup_number
|
|
93
|
+
pickup_index = self.cfg.parameters.pickup_index
|
|
94
|
+
|
|
95
|
+
logger.info(f"Loading pickup data from from {filename.name}...")
|
|
96
|
+
if filename.is_file():
|
|
97
|
+
# Load file
|
|
98
|
+
pu = self._load_pickup(
|
|
99
|
+
filename,
|
|
100
|
+
precision=precision,
|
|
101
|
+
endian=endian,
|
|
102
|
+
order=order,
|
|
103
|
+
nseries=npickups,
|
|
104
|
+
index=pickup_index,
|
|
105
|
+
)
|
|
106
|
+
# Store metadata
|
|
107
|
+
self.metadata["fs_pickup"] = float(pu[0])
|
|
108
|
+
self.metadata["temperature"] = float(pu[1])
|
|
109
|
+
|
|
110
|
+
# Store in the Data object
|
|
111
|
+
nsamples = pu.shape[0] - 2
|
|
112
|
+
self.set_data_raw(
|
|
113
|
+
"pickup_time",
|
|
114
|
+
np.linspace(
|
|
115
|
+
0, nsamples / self.metadata["fs_pickup"], nsamples, endpoint=False
|
|
116
|
+
),
|
|
117
|
+
)
|
|
118
|
+
self.set_data_raw("pickup", pu[2:])
|
|
119
|
+
|
|
120
|
+
logger.info(
|
|
121
|
+
f"Pickup loaded. Temperature : {self.metadata['temperature']:2.4f}K, "
|
|
122
|
+
f"{nsamples} points sampled at {self.metadata['fs_pickup'] / 1e3}kHz."
|
|
123
|
+
)
|
|
124
|
+
else:
|
|
125
|
+
# We'll simulate a fake pickup when we'll know the signal size
|
|
126
|
+
logger.info(f"{filename.name} not found, skipping.")
|
|
127
|
+
self.set_data_raw("pickup_time", np.array([]))
|
|
128
|
+
self.set_data_raw("pickup", np.array([]))
|
|
129
|
+
self.metadata["fs_pickup"] = 1.0
|
|
130
|
+
|
|
131
|
+
self.is_cropped["pickup_time"] = False
|
|
132
|
+
self.is_cropped["pickup"] = False
|
|
133
|
+
|
|
134
|
+
return self
|
|
135
|
+
|
|
136
|
+
def load_oscillo(self) -> Self:
|
|
137
|
+
"""
|
|
138
|
+
Load TDO signal.
|
|
139
|
+
|
|
140
|
+
Corresponding data is stored in `data_raw`.
|
|
141
|
+
"""
|
|
142
|
+
# Get reader settings
|
|
143
|
+
filename = Path(self.cfg.filenames["oscillo"])
|
|
144
|
+
precision = self.cfg.files["oscillo"].precision
|
|
145
|
+
endian = self.cfg.files["oscillo"].endian
|
|
146
|
+
|
|
147
|
+
if not filename.is_file():
|
|
148
|
+
logger.error(f"{filename.name} not found.")
|
|
149
|
+
return self
|
|
150
|
+
|
|
151
|
+
# Load data
|
|
152
|
+
logger.info(f"Loading data from {filename.name}...")
|
|
153
|
+
data = self.load_bin(filename, precision=precision, endian=endian)
|
|
154
|
+
|
|
155
|
+
# Store metadata
|
|
156
|
+
self.metadata["fs_signal"] = float(data[0])
|
|
157
|
+
temperature = float(data[1])
|
|
158
|
+
self.npoints_raw = data.shape[0] - 2
|
|
159
|
+
|
|
160
|
+
if "temperature" in self.metadata:
|
|
161
|
+
# Already the temperature found in the pickup binary file
|
|
162
|
+
other_temperature = self.metadata.pop("temperature")
|
|
163
|
+
if temperature != other_temperature:
|
|
164
|
+
logger.error(
|
|
165
|
+
"Temperatures found in the files do not match "
|
|
166
|
+
f"(got {temperature} and {other_temperature})."
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Keep the one from the TDO binary file
|
|
170
|
+
self.metadata["temperature"] = temperature
|
|
171
|
+
|
|
172
|
+
# Store data
|
|
173
|
+
self.set_data_raw(self._meas_name, data[2:])
|
|
174
|
+
self.get_signal_time()
|
|
175
|
+
|
|
176
|
+
self.is_cropped[self._meas_name] = False
|
|
177
|
+
|
|
178
|
+
logger.info(
|
|
179
|
+
f"{self._meas_name} loaded. "
|
|
180
|
+
f"Temperature : {temperature:2.4f}K, {self.npoints_raw} points sampled"
|
|
181
|
+
f" at {self.metadata['fs_signal'] / 1e6}MHz."
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
return self
|
|
185
|
+
|
|
186
|
+
def get_signal_time(self, fs: float | None = None) -> Self:
|
|
187
|
+
"""Get the experiment time vector corresponding to the raw data."""
|
|
188
|
+
if fs is None:
|
|
189
|
+
fs = self.metadata["fs_signal"]
|
|
190
|
+
elif "fs_signal" not in self.metadata:
|
|
191
|
+
self.load_oscillo()
|
|
192
|
+
|
|
193
|
+
self.set_data_raw(
|
|
194
|
+
"signal_time",
|
|
195
|
+
np.linspace(
|
|
196
|
+
0,
|
|
197
|
+
self.npoints_raw / fs,
|
|
198
|
+
self.npoints_raw,
|
|
199
|
+
endpoint=False,
|
|
200
|
+
),
|
|
201
|
+
)
|
|
202
|
+
self.is_cropped["signal_time"] = False
|
|
203
|
+
|
|
204
|
+
return self
|
|
205
|
+
|
|
206
|
+
def compute_field(self, method: str = "trapz") -> Self:
|
|
207
|
+
"""Compute magnetic field."""
|
|
208
|
+
# Checks
|
|
209
|
+
if "pickup" not in self.data_raw:
|
|
210
|
+
self.load_pickup()
|
|
211
|
+
|
|
212
|
+
# Get parameters
|
|
213
|
+
surface = self.cfg.parameters.pickup_surface
|
|
214
|
+
|
|
215
|
+
# Integrate and store
|
|
216
|
+
self._compute_field(surface=surface, method=method)
|
|
217
|
+
|
|
218
|
+
self.is_cropped["magfield"] = False
|
|
219
|
+
self.is_cropped["magfield_time"] = False
|
|
220
|
+
|
|
221
|
+
return self
|
|
222
|
+
|
|
223
|
+
def _crop_signal(
|
|
224
|
+
self,
|
|
225
|
+
meas_name: str,
|
|
226
|
+
start: int,
|
|
227
|
+
stop: int,
|
|
228
|
+
where: Literal["raw", "processed"] = "raw",
|
|
229
|
+
force: bool = False,
|
|
230
|
+
append: str = "_full",
|
|
231
|
+
):
|
|
232
|
+
"""
|
|
233
|
+
Crop a 1D vector from `start` to `stop`.
|
|
234
|
+
|
|
235
|
+
The dataset named `meas_name` must be a 1D vector.
|
|
236
|
+
|
|
237
|
+
Parameters
|
|
238
|
+
----------
|
|
239
|
+
meas_name : str
|
|
240
|
+
Dataset name. Must be allowed to be re-cropped, e.g. exists in the
|
|
241
|
+
`is_cropped` dict.
|
|
242
|
+
start, stop : int
|
|
243
|
+
Start and stop index.
|
|
244
|
+
where : {"raw", "processed"}, optional
|
|
245
|
+
Where to get and set the data (`data_raw` or `data_processed`). Default is
|
|
246
|
+
"raw".
|
|
247
|
+
force : bool, optional
|
|
248
|
+
Whether to crop even if a dataset named `meas_name + append` already exists.
|
|
249
|
+
Default is False.
|
|
250
|
+
append : str, optional
|
|
251
|
+
Bit of text that is appended to the base measurement name to store the
|
|
252
|
+
original data. Default is "_full".
|
|
253
|
+
"""
|
|
254
|
+
match where:
|
|
255
|
+
case "raw":
|
|
256
|
+
getter = getattr(self, "get_data_raw")
|
|
257
|
+
setter = getattr(self, "set_data_raw")
|
|
258
|
+
case "processed":
|
|
259
|
+
getter = getattr(self, "get_data_processed")
|
|
260
|
+
setter = getattr(self, "set_data_processed")
|
|
261
|
+
|
|
262
|
+
if getter(meas_name + append, checkonly=True) and not force:
|
|
263
|
+
self.is_cropped[meas_name] = True
|
|
264
|
+
logger.info(f"{meas_name} not cropped because already cropped.")
|
|
265
|
+
elif getter(meas_name, checkonly=True):
|
|
266
|
+
sig = getter(meas_name)
|
|
267
|
+
setter(meas_name + append, sig.copy())
|
|
268
|
+
setter(meas_name, sig[start:stop])
|
|
269
|
+
self.is_cropped[meas_name] = True
|
|
270
|
+
logger.info(f"{meas_name} cropped.")
|
|
271
|
+
else:
|
|
272
|
+
self.is_cropped[meas_name] = False
|
|
273
|
+
logger.warning(f"{meas_name} not cropped because not loaded or created.")
|
|
274
|
+
|
|
275
|
+
def crop_signal(self, force: bool = False) -> Self:
|
|
276
|
+
"""
|
|
277
|
+
Crop signal up until `max_time` defined in the settings.
|
|
278
|
+
|
|
279
|
+
The resulting signal is put in `data_raw`, the original data is kept with a
|
|
280
|
+
trailing `_full`.
|
|
281
|
+
"""
|
|
282
|
+
logger.info("Cropping data...")
|
|
283
|
+
|
|
284
|
+
# pyqtSignal
|
|
285
|
+
if "fs_signal" in self.metadata:
|
|
286
|
+
idx_crop = int(self.metadata["fs_signal"] * self.cfg.settings.max_time)
|
|
287
|
+
self._crop_signal(self._meas_name, 0, idx_crop, where="raw", force=force)
|
|
288
|
+
self._crop_signal("signal_time", 0, idx_crop, where="raw", force=force)
|
|
289
|
+
else:
|
|
290
|
+
logger.warning("Data not loaded.")
|
|
291
|
+
|
|
292
|
+
# Pickup and magnetic field
|
|
293
|
+
if "fs_pickup" in self.metadata:
|
|
294
|
+
idx_crop = int(self.metadata["fs_pickup"] * self.cfg.settings.max_time)
|
|
295
|
+
self._crop_signal("pickup_time", 0, idx_crop, where="raw", force=force)
|
|
296
|
+
self._crop_signal("pickup", 0, idx_crop, where="raw", force=force)
|
|
297
|
+
self._crop_signal(
|
|
298
|
+
"magfield_time", 0, idx_crop, where="processed", force=force
|
|
299
|
+
)
|
|
300
|
+
self._crop_signal("magfield", 0, idx_crop, where="processed", force=force)
|
|
301
|
+
else:
|
|
302
|
+
logger.warning("Pickup not loaded.")
|
|
303
|
+
|
|
304
|
+
return self
|
|
305
|
+
|
|
306
|
+
def compute_spectrogram(self) -> Self:
|
|
307
|
+
"""
|
|
308
|
+
Perform a spectrogram on the raw signal.
|
|
309
|
+
|
|
310
|
+
This a FFT performed on overlapping time windows of the signal. This results in
|
|
311
|
+
a power spectral density (PSD) as a function of time and frequency.
|
|
312
|
+
Parameters for the spectrogram are defined in the settings section of the
|
|
313
|
+
configuration.
|
|
314
|
+
|
|
315
|
+
The time and frequency vector and the 2D PSD are stored in `data_processed`.
|
|
316
|
+
"""
|
|
317
|
+
# Check if data was loaded
|
|
318
|
+
if not self._check_data_loaded():
|
|
319
|
+
self.load_oscillo()
|
|
320
|
+
# Check if data was time-cropped
|
|
321
|
+
if not self.is_cropped[self._meas_name]:
|
|
322
|
+
self.crop_signal()
|
|
323
|
+
|
|
324
|
+
# Get parameters
|
|
325
|
+
window = "hann"
|
|
326
|
+
nperseg = self.cfg.settings.spectro_nperseg
|
|
327
|
+
detrend = "linear"
|
|
328
|
+
noverlap = self.cfg.settings.spectro_noverlap
|
|
329
|
+
nfft = self.cfg.settings.spectro_win_size
|
|
330
|
+
|
|
331
|
+
if noverlap == -1:
|
|
332
|
+
noverlap = nperseg // 2
|
|
333
|
+
|
|
334
|
+
# Perform spectrogram
|
|
335
|
+
logger.info(
|
|
336
|
+
"Computing spectrogram (time-window size : "
|
|
337
|
+
f"{nperseg / self.metadata['fs_signal'] * 1e6:2.2f}µs)..."
|
|
338
|
+
)
|
|
339
|
+
fxx, txx, sxx = signal.spectrogram(
|
|
340
|
+
self.get_data_raw(self._meas_name),
|
|
341
|
+
self.metadata["fs_signal"],
|
|
342
|
+
window=window,
|
|
343
|
+
nperseg=nperseg,
|
|
344
|
+
noverlap=noverlap,
|
|
345
|
+
nfft=nfft,
|
|
346
|
+
detrend=detrend,
|
|
347
|
+
)
|
|
348
|
+
logger.info("Spectrogram computed.")
|
|
349
|
+
|
|
350
|
+
# Store data
|
|
351
|
+
self.set_data_processed("spectro_f", fxx)
|
|
352
|
+
self.set_data_processed("spectro_t", txx)
|
|
353
|
+
self.set_data_processed("spectro_s", sxx)
|
|
354
|
+
|
|
355
|
+
return self
|
|
356
|
+
|
|
357
|
+
def find_barycenters(self) -> Self:
|
|
358
|
+
"""
|
|
359
|
+
Find barycenters in the spectrogram.
|
|
360
|
+
|
|
361
|
+
The maxima are found in the 2D PSD. The barycenter of the corresponding
|
|
362
|
+
frequencies is computed in a time window around the maxima, defined by the
|
|
363
|
+
settings in the configuration.
|
|
364
|
+
|
|
365
|
+
This results in a time vector (the time windows) and the TDO signal (the
|
|
366
|
+
barycenters, e.g. frequencies). Those two quantities are stored in
|
|
367
|
+
`data_processed`.
|
|
368
|
+
"""
|
|
369
|
+
# Checks
|
|
370
|
+
if not self._check_spectro_computed():
|
|
371
|
+
logger.error("Spectrogram not computed. Run `compute_spectrogram()` first")
|
|
372
|
+
return self
|
|
373
|
+
|
|
374
|
+
# Get parameters to log
|
|
375
|
+
fxx = self.get_data_processed("spectro_f")
|
|
376
|
+
freq_window = self.cfg.settings.barycenters_fwindow
|
|
377
|
+
nsamples = int(freq_window / np.mean(np.diff(fxx)))
|
|
378
|
+
logger.info(
|
|
379
|
+
"Finding spectrogram barycenters (frequency-window half-size : "
|
|
380
|
+
f"{freq_window:2.3f}Hz, {nsamples} samples)..."
|
|
381
|
+
)
|
|
382
|
+
# Get barycenters
|
|
383
|
+
self.set_data_processed(
|
|
384
|
+
self._tdo_name,
|
|
385
|
+
sp.find_barycenters(
|
|
386
|
+
fxx,
|
|
387
|
+
self.get_data_processed("spectro_s"),
|
|
388
|
+
freq_window,
|
|
389
|
+
fast=self.cfg.settings.barycenters_fast,
|
|
390
|
+
),
|
|
391
|
+
)
|
|
392
|
+
# Corresponding time vector
|
|
393
|
+
self.apply_time_offset()
|
|
394
|
+
|
|
395
|
+
logger.info("TDO signal extracted.")
|
|
396
|
+
|
|
397
|
+
return self
|
|
398
|
+
|
|
399
|
+
def apply_time_offset(self) -> Self:
|
|
400
|
+
"""Apply offset on the experiment time vector."""
|
|
401
|
+
if not self.get_data_processed("spectro_t", checkonly=True):
|
|
402
|
+
logger.warning("Spectrogram was not computed.")
|
|
403
|
+
return self
|
|
404
|
+
|
|
405
|
+
logger.info("Applying offset on time vector...")
|
|
406
|
+
time_tdo = self.get_data_processed("spectro_t")
|
|
407
|
+
self.set_data_processed("time_exp", time_tdo - self.cfg.settings.time_offset)
|
|
408
|
+
self.align_field()
|
|
409
|
+
logger.info(
|
|
410
|
+
f"Applied a {self.cfg.settings.time_offset * 1e6}µs PU/TDO time offset"
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
return self
|
|
414
|
+
|
|
415
|
+
def align_field(self) -> Self:
|
|
416
|
+
"""Align magnetic field on the `time_exp` vector."""
|
|
417
|
+
if not self._check_field_computed():
|
|
418
|
+
self.compute_field()
|
|
419
|
+
self.crop_signal()
|
|
420
|
+
|
|
421
|
+
if not self._check_barycenters_computed():
|
|
422
|
+
self.find_barycenters()
|
|
423
|
+
if not self._check_barycenters_computed():
|
|
424
|
+
logger.error("TDO signal could not be extracted, check messages above.")
|
|
425
|
+
return self
|
|
426
|
+
|
|
427
|
+
exptime = self.get_data_processed("time_exp")
|
|
428
|
+
magtime = self.get_data_processed("magfield_time")
|
|
429
|
+
magfield = self.get_data_processed("magfield")
|
|
430
|
+
self.set_data_processed(
|
|
431
|
+
"magfield", np.interp(exptime, magtime, magfield, left=0)
|
|
432
|
+
)
|
|
433
|
+
self.set_data_processed("magfield_time", exptime)
|
|
434
|
+
|
|
435
|
+
# Get the up / down indices
|
|
436
|
+
self.get_up_down_indices()
|
|
437
|
+
|
|
438
|
+
# Remove previously-computed values based on this TDO signal
|
|
439
|
+
self._remove_data_changed()
|
|
440
|
+
|
|
441
|
+
return self
|
|
442
|
+
|
|
443
|
+
def get_up_down_indices(self) -> Self:
|
|
444
|
+
"""Get increasing and decreasing field indices."""
|
|
445
|
+
self.inds_inc, self.inds_dec = sp.get_up_down_indices(
|
|
446
|
+
self.get_data_processed("magfield")
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
return self
|
|
450
|
+
|
|
451
|
+
def detrend_polyfit(self) -> Self:
|
|
452
|
+
"""Fit the signal to detrend it."""
|
|
453
|
+
# Checks
|
|
454
|
+
if not self._check_field_aligned():
|
|
455
|
+
self.align_field()
|
|
456
|
+
if not self._check_field_aligned():
|
|
457
|
+
logger.error(
|
|
458
|
+
"Magnetic field could not be aligned, check messages above"
|
|
459
|
+
)
|
|
460
|
+
return self
|
|
461
|
+
|
|
462
|
+
if not hasattr(self, "inds_inc") or not hasattr(self, "inds_dec"):
|
|
463
|
+
self.get_up_down_indices()
|
|
464
|
+
|
|
465
|
+
# Get parameters
|
|
466
|
+
b1, b2 = self.cfg.settings.poly_window
|
|
467
|
+
deg = self.cfg.settings.poly_deg
|
|
468
|
+
|
|
469
|
+
# Checks
|
|
470
|
+
maxfield = self.get_data_processed("magfield").max()
|
|
471
|
+
if b1 > maxfield:
|
|
472
|
+
logger.error(
|
|
473
|
+
f"Field window lower bound is {b1}T but max field is {maxfield}T"
|
|
474
|
+
)
|
|
475
|
+
return self
|
|
476
|
+
|
|
477
|
+
# Fit and detrend
|
|
478
|
+
logger.info(
|
|
479
|
+
f"Fitting (degree : {deg}) and detrending TDO signal in {b1, b2}T..."
|
|
480
|
+
)
|
|
481
|
+
fp_inc, res_inc = sp.fit_poly(
|
|
482
|
+
self.get_data_processed("magfield")[self.inds_inc],
|
|
483
|
+
self.get_data_processed(self._tdo_name)[self.inds_inc],
|
|
484
|
+
b1,
|
|
485
|
+
b2,
|
|
486
|
+
deg,
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
fp_dec, res_dec = sp.fit_poly(
|
|
490
|
+
self.get_data_processed("magfield")[self.inds_dec],
|
|
491
|
+
self.get_data_processed(self._tdo_name)[self.inds_dec],
|
|
492
|
+
b1,
|
|
493
|
+
b2,
|
|
494
|
+
deg,
|
|
495
|
+
)
|
|
496
|
+
|
|
497
|
+
# Store
|
|
498
|
+
self.set_data_processed(self._tdo_name + "_fit_inc", fp_inc)
|
|
499
|
+
self.set_data_processed(self._tdo_name + "_fit_dec", fp_dec)
|
|
500
|
+
self.set_data_processed(self._tdo_det_inc_name, res_inc)
|
|
501
|
+
self.set_data_processed(self._tdo_det_dec_name, res_dec)
|
|
502
|
+
|
|
503
|
+
logger.info("TDO signal detrended.")
|
|
504
|
+
|
|
505
|
+
return self
|
|
506
|
+
|
|
507
|
+
def oversample_inverse_field(self) -> Self:
|
|
508
|
+
"""Express data in 1/B and oversample."""
|
|
509
|
+
# Checks
|
|
510
|
+
if not self._check_tdo_detrended():
|
|
511
|
+
self.detrend_polyfit()
|
|
512
|
+
if not self._check_tdo_detrended():
|
|
513
|
+
logger.error("TDO signal could not be detrended")
|
|
514
|
+
return self
|
|
515
|
+
|
|
516
|
+
# Get parameters
|
|
517
|
+
b1, b2 = self.cfg.settings.fft_window
|
|
518
|
+
npoints = self.cfg.settings.npoints_interp_inverse
|
|
519
|
+
|
|
520
|
+
if b1 == -1:
|
|
521
|
+
b1 = self.cfg.settings.poly_window[0]
|
|
522
|
+
if b2 == -1:
|
|
523
|
+
b2 = self.cfg.settings.poly_window[1]
|
|
524
|
+
if b1 == 0 or b2 == 0:
|
|
525
|
+
logger.error("Can't use 0T as a boundary for FFT (we need 1/B)")
|
|
526
|
+
return self
|
|
527
|
+
|
|
528
|
+
# Interpolate inverse field
|
|
529
|
+
logger.info(
|
|
530
|
+
f"Oversampling TDO signal in 1/B ({npoints} points in {b1, b2}T)..."
|
|
531
|
+
)
|
|
532
|
+
field_inverse_inc, tdo_inverse_inc = sp.interpolate_inverse(
|
|
533
|
+
self.get_data_processed("magfield")[self.inds_inc],
|
|
534
|
+
self.get_data_processed(self._tdo_det_inc_name),
|
|
535
|
+
b1,
|
|
536
|
+
b2,
|
|
537
|
+
npoints,
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
field_inverse_dec, tdo_inverse_dec = sp.interpolate_inverse(
|
|
541
|
+
self.get_data_processed("magfield")[self.inds_dec],
|
|
542
|
+
self.get_data_processed(self._tdo_det_dec_name),
|
|
543
|
+
b1,
|
|
544
|
+
b2,
|
|
545
|
+
npoints,
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
# Store
|
|
549
|
+
self.set_data_processed("magfield_inverse_inc", field_inverse_inc)
|
|
550
|
+
self.set_data_processed(self._tdo_inv_inc_name, tdo_inverse_inc)
|
|
551
|
+
self.set_data_processed("magfield_inverse_dec", field_inverse_dec)
|
|
552
|
+
self.set_data_processed(self._tdo_inv_dec_name, tdo_inverse_dec)
|
|
553
|
+
|
|
554
|
+
logger.info("TDO signal interpolated in 1/B.")
|
|
555
|
+
|
|
556
|
+
return self
|
|
557
|
+
|
|
558
|
+
def fft_inverse_field(self) -> Self:
|
|
559
|
+
"""
|
|
560
|
+
FFT on the signal in 1/B.
|
|
561
|
+
|
|
562
|
+
The resulting "frequencies" are therefore in units of B (teslas).
|
|
563
|
+
"""
|
|
564
|
+
if not self._check_tdo_inverse():
|
|
565
|
+
self.oversample_inverse_field()
|
|
566
|
+
if not self._check_tdo_inverse():
|
|
567
|
+
logger.error("Could not compute FFT because there is no signal in 1/B")
|
|
568
|
+
return self
|
|
569
|
+
|
|
570
|
+
# FFT
|
|
571
|
+
logger.info("FFT on TDO signal in 1/B...")
|
|
572
|
+
f_inc, fft_inc = sp.fourier_transform(
|
|
573
|
+
self.get_data_processed(self._tdo_inv_inc_name),
|
|
574
|
+
np.mean(np.diff(self.get_data_processed("magfield_inverse_inc"))),
|
|
575
|
+
pad_mult=self.cfg.settings.fft_pad_mult,
|
|
576
|
+
)
|
|
577
|
+
|
|
578
|
+
f_dec, fft_dec = sp.fourier_transform(
|
|
579
|
+
self.get_data_processed(self._tdo_inv_dec_name),
|
|
580
|
+
np.mean(np.diff(self.get_data_processed("magfield_inverse_dec"))),
|
|
581
|
+
pad_mult=self.cfg.settings.fft_pad_mult,
|
|
582
|
+
)
|
|
583
|
+
|
|
584
|
+
# Clip
|
|
585
|
+
idx_max_bfreq = sp.find_nearest_index(f_dec, self.cfg.settings.max_bfreq)
|
|
586
|
+
f_inc = f_inc[:idx_max_bfreq]
|
|
587
|
+
fft_inc = fft_inc[:idx_max_bfreq]
|
|
588
|
+
f_dec = f_dec[:idx_max_bfreq]
|
|
589
|
+
fft_dec = fft_dec[:idx_max_bfreq]
|
|
590
|
+
|
|
591
|
+
# Store
|
|
592
|
+
self.set_data_processed("bfreq_inc", f_inc)
|
|
593
|
+
self.set_data_processed("fft_inc", fft_inc)
|
|
594
|
+
self.set_data_processed("bfreq_dec", f_dec)
|
|
595
|
+
self.set_data_processed("fft_dec", fft_dec)
|
|
596
|
+
|
|
597
|
+
logger.info(
|
|
598
|
+
f"FFT computed and cropped up until {self.cfg.settings.max_bfreq}T."
|
|
599
|
+
)
|
|
600
|
+
|
|
601
|
+
return self
|
|
602
|
+
|
|
603
|
+
def extract_tdo(self) -> Self:
|
|
604
|
+
"""Helper function that extracts the TDO signal."""
|
|
605
|
+
self.compute_spectrogram().find_barycenters()
|
|
606
|
+
|
|
607
|
+
return self
|
|
608
|
+
|
|
609
|
+
def analyse(self) -> Self:
|
|
610
|
+
"""Helper function to detrend the signal, sort signal in 1/B and compute FFT."""
|
|
611
|
+
self.detrend_polyfit().oversample_inverse_field().fft_inverse_field()
|
|
612
|
+
|
|
613
|
+
return self
|
|
614
|
+
|
|
615
|
+
def collect_arrays(self, array_names_list: list[str]) -> list[np.ndarray]:
|
|
616
|
+
"""Collect and data in `data_processed`."""
|
|
617
|
+
return [self.get_data_processed(name) for name in array_names_list]
|
|
618
|
+
|
|
619
|
+
def get_csv_filename(self, suffix: str = "", ext: str = ".csv") -> str:
|
|
620
|
+
"""Build the output file name."""
|
|
621
|
+
return str(self.data_directory / (self.expid + suffix + ext))
|
|
622
|
+
|
|
623
|
+
def save_tdo_csv(self, filename: str | Path | None = None, sep: str = "\t") -> Self:
|
|
624
|
+
"""
|
|
625
|
+
Save intermediate results as a CSV file.
|
|
626
|
+
|
|
627
|
+
The intermediate results correspond to the extracted TDO signal, before any
|
|
628
|
+
post-processing (detrending, ...).
|
|
629
|
+
If the file already exists, it is overwritten.
|
|
630
|
+
"""
|
|
631
|
+
if filename is None:
|
|
632
|
+
filename = self.get_csv_filename(suffix="-out")
|
|
633
|
+
|
|
634
|
+
logger.info("Saving extracted TDO signal...")
|
|
635
|
+
|
|
636
|
+
if not self._check_field_aligned():
|
|
637
|
+
logger.warning(
|
|
638
|
+
"The TDO signal and the magnetic fields are not aligned, aligning now"
|
|
639
|
+
)
|
|
640
|
+
self.align_field()
|
|
641
|
+
|
|
642
|
+
to_save = ["time_exp", "magfield", self._tdo_name]
|
|
643
|
+
a = sp.collate_arrays(self.collect_arrays(to_save))
|
|
644
|
+
|
|
645
|
+
header = sep.join(["time", "field", "tdo"])
|
|
646
|
+
|
|
647
|
+
np.savetxt(
|
|
648
|
+
filename,
|
|
649
|
+
a,
|
|
650
|
+
delimiter=sep,
|
|
651
|
+
header=header,
|
|
652
|
+
comments="",
|
|
653
|
+
)
|
|
654
|
+
|
|
655
|
+
logger.info(f"Saved at {Path(filename).name}.")
|
|
656
|
+
|
|
657
|
+
return self
|
|
658
|
+
|
|
659
|
+
def _remove_data_changed(self):
|
|
660
|
+
"""Remove data that was computed but the data it relied on changed."""
|
|
661
|
+
self.remove_data_processed(self._tdo_name + "_fit_inc")
|
|
662
|
+
self.remove_data_processed(self._tdo_name + "_fit_dec")
|
|
663
|
+
self.remove_data_processed(self._tdo_det_inc_name)
|
|
664
|
+
self.remove_data_processed(self._tdo_det_dec_name)
|
|
665
|
+
self.remove_data_processed(self._tdo_inv_inc_name)
|
|
666
|
+
self.remove_data_processed(self._tdo_inv_dec_name)
|
|
667
|
+
self.remove_data_processed("bfreq_inc")
|
|
668
|
+
self.remove_data_processed("bfreq_dec")
|
|
669
|
+
self.remove_data_processed("fft_inc")
|
|
670
|
+
self.remove_data_processed("fft_dec")
|
|
671
|
+
|
|
672
|
+
def _get_result_names_list(self, ud: Literal["inc", "dec"]):
|
|
673
|
+
"""
|
|
674
|
+
Generate the list of things to save.
|
|
675
|
+
|
|
676
|
+
Either in increasing or decreasing magnetic field.
|
|
677
|
+
"""
|
|
678
|
+
return [
|
|
679
|
+
"time_exp",
|
|
680
|
+
"magfield",
|
|
681
|
+
self._tdo_name,
|
|
682
|
+
self._tdo_name + "_fit_" + ud,
|
|
683
|
+
self._tdo_det_dec_name if ud == "dec" else self._tdo_det_inc_name,
|
|
684
|
+
"magfield_inverse_" + ud,
|
|
685
|
+
self._tdo_inv_dec_name if ud == "dec" else self._tdo_inv_inc_name,
|
|
686
|
+
"bfreq_" + ud,
|
|
687
|
+
"fft_" + ud,
|
|
688
|
+
]
|
|
689
|
+
|
|
690
|
+
def _get_results_up_down(
|
|
691
|
+
self, ud: Literal["inc", "dec"]
|
|
692
|
+
) -> tuple[np.ndarray, list[str]]:
|
|
693
|
+
"""Save increasing or decreasing magnetic field results in a CSV file."""
|
|
694
|
+
names_list = self._get_result_names_list(ud)
|
|
695
|
+
to_save = self.collect_arrays(names_list)
|
|
696
|
+
names_list.insert(3, "magfield_" + ud)
|
|
697
|
+
to_save.insert(
|
|
698
|
+
3,
|
|
699
|
+
self.get_data_processed("magfield")[
|
|
700
|
+
self.inds_dec if ud == "dec" else self.inds_inc
|
|
701
|
+
],
|
|
702
|
+
)
|
|
703
|
+
names_list.insert(4, self._tdo_name + "_" + ud)
|
|
704
|
+
to_save.insert(
|
|
705
|
+
4,
|
|
706
|
+
self.get_data_processed(self._tdo_name)[
|
|
707
|
+
self.inds_dec if ud == "dec" else self.inds_inc
|
|
708
|
+
],
|
|
709
|
+
)
|
|
710
|
+
return sp.collate_arrays(to_save), names_list
|
|
711
|
+
|
|
712
|
+
def save_results_csv(
|
|
713
|
+
self,
|
|
714
|
+
filename_prefix: str | Path | None = None,
|
|
715
|
+
sep: str = "\t",
|
|
716
|
+
) -> Self:
|
|
717
|
+
"""
|
|
718
|
+
Save analysed results as CSV.
|
|
719
|
+
|
|
720
|
+
It includes : time, aligned magnetic field, TDO signal, detrended TDO signal,
|
|
721
|
+
1/B, TDO signal oversampled in 1/B, Fourier transform.
|
|
722
|
+
"""
|
|
723
|
+
if filename_prefix is None:
|
|
724
|
+
filename_prefix = Path(self.get_csv_filename(suffix="-results"))
|
|
725
|
+
else:
|
|
726
|
+
filename_prefix = Path(filename_prefix)
|
|
727
|
+
|
|
728
|
+
logger.info("Saving analysed results...")
|
|
729
|
+
|
|
730
|
+
# Increasing magnetic field
|
|
731
|
+
filename_inc = filename_prefix.with_stem(filename_prefix.stem + "-up")
|
|
732
|
+
to_save, header = self._get_results_up_down("inc")
|
|
733
|
+
header_inc = sep.join(header)
|
|
734
|
+
np.savetxt(
|
|
735
|
+
filename_inc,
|
|
736
|
+
to_save,
|
|
737
|
+
delimiter=sep,
|
|
738
|
+
header=header_inc,
|
|
739
|
+
comments="",
|
|
740
|
+
)
|
|
741
|
+
|
|
742
|
+
# Decreasing magnetic field
|
|
743
|
+
filename_dec = filename_prefix.with_stem(filename_prefix.stem + "-down")
|
|
744
|
+
to_save, header = self._get_results_up_down("dec")
|
|
745
|
+
header_dec = sep.join(header)
|
|
746
|
+
np.savetxt(
|
|
747
|
+
filename_dec,
|
|
748
|
+
to_save,
|
|
749
|
+
delimiter=sep,
|
|
750
|
+
header=header_dec,
|
|
751
|
+
comments="",
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
logger.info(f"Saved at {filename_inc.name} and {filename_dec.name}.")
|
|
755
|
+
|
|
756
|
+
return self
|
|
757
|
+
|
|
758
|
+
def load_csv(self, filepath: Path | str, sep: str | None = None):
|
|
759
|
+
"""Load data from a CSV file."""
|
|
760
|
+
filepath = Path(filepath)
|
|
761
|
+
if not filepath.is_file():
|
|
762
|
+
logger.error("File does not exist")
|
|
763
|
+
return
|
|
764
|
+
|
|
765
|
+
reader, delimiter = self._determine_file_format(filepath, sep=sep)
|
|
766
|
+
|
|
767
|
+
if isinstance(reader, str) and reader == "unknown":
|
|
768
|
+
logger.error(f"Could not load input file ({filepath})")
|
|
769
|
+
return
|
|
770
|
+
|
|
771
|
+
logger.info(f"Reading data from {filepath.name}...")
|
|
772
|
+
self._reinit()
|
|
773
|
+
status = reader(filepath, delimiter)
|
|
774
|
+
if not status:
|
|
775
|
+
logger.error(f"Failed to read {filepath.name}, check messages above")
|
|
776
|
+
return
|
|
777
|
+
|
|
778
|
+
# Get the up / down indices
|
|
779
|
+
self.get_up_down_indices()
|
|
780
|
+
|
|
781
|
+
logger.info(f"Read data from {filepath.name}.")
|
|
782
|
+
|
|
783
|
+
def _determine_file_format(
|
|
784
|
+
self, filepath: Path, sep: str | None = None
|
|
785
|
+
) -> tuple[Callable[[Path, str], bool] | str, str]:
|
|
786
|
+
"""Determine what type of file we got."""
|
|
787
|
+
match filepath.suffix:
|
|
788
|
+
case ".out":
|
|
789
|
+
return self._load_csv_legacy_tdo, " " if sep is None else sep
|
|
790
|
+
case ".txt":
|
|
791
|
+
return self._load_csv_legacy_results, "," if sep is None else sep
|
|
792
|
+
case ".csv" | ".tsv":
|
|
793
|
+
return self._determine_csv_format(filepath, sep=sep)
|
|
794
|
+
case _:
|
|
795
|
+
logger.error(f"Unknown file extension : {filepath.suffix}")
|
|
796
|
+
return "unknown", ""
|
|
797
|
+
|
|
798
|
+
def _determine_csv_format(
|
|
799
|
+
self, filepath: Path, sep: str | None = None
|
|
800
|
+
) -> tuple[Callable[[Path, str], bool] | str, str]:
|
|
801
|
+
"""Determine what type of CSV file we got."""
|
|
802
|
+
# Read first line
|
|
803
|
+
with open(filepath) as f:
|
|
804
|
+
header = f.readline()
|
|
805
|
+
|
|
806
|
+
# Count how much delimiters there are to find the number of columns
|
|
807
|
+
if sep is None:
|
|
808
|
+
sep = "\t"
|
|
809
|
+
ncols = header.count(sep) + 1
|
|
810
|
+
|
|
811
|
+
if ncols == CSV_TDO_NCOLS:
|
|
812
|
+
return self._load_csv_tdo, sep
|
|
813
|
+
elif ncols == CSV_RES_NCOLS:
|
|
814
|
+
return self._load_csv_results, sep
|
|
815
|
+
else:
|
|
816
|
+
logger.error(
|
|
817
|
+
f"Could not determine what type of file this is : {filepath.name}"
|
|
818
|
+
)
|
|
819
|
+
return "unknown", ""
|
|
820
|
+
|
|
821
|
+
def _load_csv_legacy_tdo(self, filepath: Path, sep: str = " ") -> bool:
|
|
822
|
+
"""Load already extracted TDO signal from a legacy .out file."""
|
|
823
|
+
logger.info("Loading TDO signal from a (legacy) .out file...")
|
|
824
|
+
data = np.loadtxt(filepath, delimiter=sep, skiprows=1, usecols=(0, 1))
|
|
825
|
+
|
|
826
|
+
# data is ["magfield", self._tdo_name]
|
|
827
|
+
self.set_data_processed("magfield", data[:, 0])
|
|
828
|
+
self.set_data_processed(self._tdo_name, data[:, 1])
|
|
829
|
+
|
|
830
|
+
# Create the time vector to pass the field-aligned checks
|
|
831
|
+
self.set_data_processed(
|
|
832
|
+
"time_exp", np.linspace(0, self.cfg.settings.max_time, data.shape[0])
|
|
833
|
+
)
|
|
834
|
+
self.set_data_processed("magfield_time", self.get_data_processed("time_exp"))
|
|
835
|
+
|
|
836
|
+
return True
|
|
837
|
+
|
|
838
|
+
def _load_csv_legacy_results(self, filepath: Path, sep: str = ",") -> bool:
|
|
839
|
+
"""Load already computed results from a legacy .txt file."""
|
|
840
|
+
logger.info("Loading results from a (legacy) .txt file...")
|
|
841
|
+
|
|
842
|
+
# Load data
|
|
843
|
+
data = np.loadtxt(filepath, delimiter=sep, skiprows=1)
|
|
844
|
+
|
|
845
|
+
# Guess if it's increasing or decreasing magnetic field
|
|
846
|
+
inc_or_dec = self._determine_inc_or_dec(filepath)
|
|
847
|
+
if inc_or_dec == "unknown":
|
|
848
|
+
logger.error(
|
|
849
|
+
"Couldn't determine if the file is for increasing or decreasing "
|
|
850
|
+
"magnetic field"
|
|
851
|
+
)
|
|
852
|
+
return False
|
|
853
|
+
self._set_map_in_data_processed(self._map_cols_legacy_results(data, inc_or_dec))
|
|
854
|
+
|
|
855
|
+
# Set extra data (to pass field-aligned check)
|
|
856
|
+
self.set_data_processed("magfield_time", self.get_data_processed("time_exp"))
|
|
857
|
+
|
|
858
|
+
return True
|
|
859
|
+
|
|
860
|
+
def _load_csv_tdo(self, filepath: Path, sep: str = "\t") -> bool:
|
|
861
|
+
"""Load already extracted TDO signal from a CSV file."""
|
|
862
|
+
logger.info("Loading TDO signal from CSV file...")
|
|
863
|
+
data = np.loadtxt(filepath, delimiter=sep, skiprows=1, usecols=(0, 1, 2))
|
|
864
|
+
|
|
865
|
+
# data is ["time_exp", "magfield", self._tdo_name]
|
|
866
|
+
self.set_data_processed("time_exp", data[:, 0])
|
|
867
|
+
self.set_data_processed("magfield_time", data[:, 0])
|
|
868
|
+
self.set_data_processed("magfield", data[:, 1])
|
|
869
|
+
self.set_data_processed(self._tdo_name, data[:, 2])
|
|
870
|
+
|
|
871
|
+
return True
|
|
872
|
+
|
|
873
|
+
def _load_csv_results(self, filepath: Path, sep: str = "\t") -> bool:
|
|
874
|
+
"""Load already computed results from a CSV file."""
|
|
875
|
+
logger.info("Loading results from a CSV file...")
|
|
876
|
+
|
|
877
|
+
# Load data
|
|
878
|
+
data = np.loadtxt(filepath, delimiter=sep, skiprows=1)
|
|
879
|
+
|
|
880
|
+
# Guess if it's increasing or decreasing magnetic field
|
|
881
|
+
inc_or_dec = self._determine_inc_or_dec(filepath)
|
|
882
|
+
if inc_or_dec == "unknown":
|
|
883
|
+
logger.error(
|
|
884
|
+
"Couldn't determine if the file is for increasing or decreasing "
|
|
885
|
+
"magnetic field"
|
|
886
|
+
)
|
|
887
|
+
return False
|
|
888
|
+
self._set_map_in_data_processed(self._map_cols_results(data, inc_or_dec))
|
|
889
|
+
|
|
890
|
+
# Set extra data (to pass field-aligned check)
|
|
891
|
+
self.set_data_processed("magfield_time", self.get_data_processed("time_exp"))
|
|
892
|
+
|
|
893
|
+
return True
|
|
894
|
+
|
|
895
|
+
def _determine_inc_or_dec(
|
|
896
|
+
self, filepath: Path, keywords: tuple[str, str] = ("inc", "dec")
|
|
897
|
+
) -> Literal["inc", "dec", "unknown"]:
|
|
898
|
+
"""
|
|
899
|
+
Determine if CSV file is for increasing or decreasing magnetic field.
|
|
900
|
+
|
|
901
|
+
Parameters
|
|
902
|
+
----------
|
|
903
|
+
filepath : Path
|
|
904
|
+
Path to the CSV file.
|
|
905
|
+
keywords : tuple[str]
|
|
906
|
+
Two-elements tuple with the strings that allow to determine if it is
|
|
907
|
+
increasing or decreasing, respectively. Default is ("inc", "dec").
|
|
908
|
+
|
|
909
|
+
Returns
|
|
910
|
+
-------
|
|
911
|
+
inc_or_dec : {"inc", "dec", "unknown"}
|
|
912
|
+
"""
|
|
913
|
+
with open(filepath) as f:
|
|
914
|
+
header = f.readline()
|
|
915
|
+
|
|
916
|
+
ninc = header.count(keywords[0])
|
|
917
|
+
ndec = header.count(keywords[1])
|
|
918
|
+
|
|
919
|
+
if ninc > ndec:
|
|
920
|
+
return "inc"
|
|
921
|
+
elif ninc < ndec:
|
|
922
|
+
return "dec"
|
|
923
|
+
else:
|
|
924
|
+
return "unknown"
|
|
925
|
+
|
|
926
|
+
def _map_cols_legacy_results(
|
|
927
|
+
self, data: np.ndarray, kw: Literal["inc", "dec"]
|
|
928
|
+
) -> dict[str, np.ndarray]:
|
|
929
|
+
"""
|
|
930
|
+
Map a field name with a column in the legacy results CSV file.
|
|
931
|
+
|
|
932
|
+
Parameters
|
|
933
|
+
----------
|
|
934
|
+
data : np.ndarray
|
|
935
|
+
Tabular data read from the legacy results CSV file.
|
|
936
|
+
kw : {"inc", "dec"}
|
|
937
|
+
Increasing or decreasing magnetic field identifier (usually "inc" or "dec").
|
|
938
|
+
"""
|
|
939
|
+
where_nan = np.isnan(data)
|
|
940
|
+
|
|
941
|
+
def extract_col(idx):
|
|
942
|
+
return data[~where_nan[:, idx], idx]
|
|
943
|
+
|
|
944
|
+
return {
|
|
945
|
+
"time_exp": extract_col(0),
|
|
946
|
+
"magfield": extract_col(1),
|
|
947
|
+
self._tdo_name: extract_col(2),
|
|
948
|
+
self._tdo_name + "_fit_" + kw: extract_col(7),
|
|
949
|
+
self._tdo_det_dec_name
|
|
950
|
+
if kw == "dec"
|
|
951
|
+
else self._tdo_det_inc_name: extract_col(8),
|
|
952
|
+
"magfield_inverse_" + kw: extract_col(9),
|
|
953
|
+
self._tdo_inv_dec_name
|
|
954
|
+
if kw == "dec"
|
|
955
|
+
else self._tdo_inv_inc_name: extract_col(10),
|
|
956
|
+
"bfreq_" + kw: extract_col(11),
|
|
957
|
+
"fft_" + kw: extract_col(12),
|
|
958
|
+
}
|
|
959
|
+
|
|
960
|
+
def _map_cols_results(
|
|
961
|
+
self, data: np.ndarray, kw: Literal["inc", "dec"]
|
|
962
|
+
) -> dict[str, np.ndarray]:
|
|
963
|
+
"""
|
|
964
|
+
Map a field name with a column in the results CSV file.
|
|
965
|
+
|
|
966
|
+
Parameters
|
|
967
|
+
----------
|
|
968
|
+
data : np.ndarray
|
|
969
|
+
Tabular data read from the results CSV file.
|
|
970
|
+
kw : {"inc", "dec"}
|
|
971
|
+
Increasing or decreasing magnetic field identifier (usually "inc" or "dec").
|
|
972
|
+
"""
|
|
973
|
+
where_nan = np.isnan(data)
|
|
974
|
+
|
|
975
|
+
def extract_col(idx):
|
|
976
|
+
return data[~where_nan[:, idx], idx]
|
|
977
|
+
|
|
978
|
+
return {
|
|
979
|
+
"time_exp": extract_col(0),
|
|
980
|
+
"magfield": extract_col(1),
|
|
981
|
+
self._tdo_name: extract_col(2),
|
|
982
|
+
self._tdo_name + "_fit_" + kw: extract_col(5),
|
|
983
|
+
self._tdo_det_dec_name
|
|
984
|
+
if kw == "dec"
|
|
985
|
+
else self._tdo_det_inc_name: extract_col(6),
|
|
986
|
+
"magfield_inverse_" + kw: extract_col(7),
|
|
987
|
+
self._tdo_inv_dec_name
|
|
988
|
+
if kw == "dec"
|
|
989
|
+
else self._tdo_inv_inc_name: extract_col(8),
|
|
990
|
+
"bfreq_" + kw: extract_col(9),
|
|
991
|
+
"fft_" + kw: extract_col(10),
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
def _set_map_in_data_processed(self, data: dict[str, np.ndarray]):
|
|
995
|
+
"""Set data in `data_processed`."""
|
|
996
|
+
for key, value in data.items():
|
|
997
|
+
self.set_data_processed(key, value)
|
|
998
|
+
|
|
999
|
+
def _check_pickup_loaded(self) -> bool:
|
|
1000
|
+
"""Check if the pickup data was loaded."""
|
|
1001
|
+
if ("pickup" not in self.data_raw) or ("fs_pickup" not in self.metadata):
|
|
1002
|
+
return False
|
|
1003
|
+
else:
|
|
1004
|
+
return True
|
|
1005
|
+
|
|
1006
|
+
def _check_data_loaded(self) -> bool:
|
|
1007
|
+
"""Check if oscilloscope data was loaded."""
|
|
1008
|
+
return (self._meas_name in self.data_raw) and ("fs_signal" in self.metadata)
|
|
1009
|
+
|
|
1010
|
+
def _check_field_computed(self) -> bool:
|
|
1011
|
+
"""Check if magnetic field was computed."""
|
|
1012
|
+
return self.get_data_processed(
|
|
1013
|
+
"magfield_time", checkonly=True
|
|
1014
|
+
) and self.get_data_processed("magfield", checkonly=True)
|
|
1015
|
+
|
|
1016
|
+
def _check_spectro_computed(self) -> bool:
|
|
1017
|
+
"""Check if the spectrogram was computed."""
|
|
1018
|
+
return self.get_data_processed("spectro_s", checkonly=True)
|
|
1019
|
+
|
|
1020
|
+
def _check_barycenters_computed(self) -> bool:
|
|
1021
|
+
"""Check if the frequencies barycenters were computed."""
|
|
1022
|
+
return self.get_data_processed(
|
|
1023
|
+
self._tdo_name, checkonly=True
|
|
1024
|
+
) and self.get_data_processed("time_exp", checkonly=True)
|
|
1025
|
+
|
|
1026
|
+
def _check_field_aligned(self) -> bool:
|
|
1027
|
+
"""Check if the magnetic field was aligned on the TDO signal."""
|
|
1028
|
+
if not (self._check_field_computed() and self._check_barycenters_computed()):
|
|
1029
|
+
return False
|
|
1030
|
+
return (
|
|
1031
|
+
self.get_data_processed("time_exp").size
|
|
1032
|
+
== self.get_data_processed("magfield_time").size
|
|
1033
|
+
)
|
|
1034
|
+
|
|
1035
|
+
def _check_tdo_detrended(self) -> bool:
|
|
1036
|
+
"""Check if the TDO signal has been detrended."""
|
|
1037
|
+
return self.get_data_processed(
|
|
1038
|
+
self._tdo_det_inc_name, checkonly=True
|
|
1039
|
+
) and self.get_data_processed(self._tdo_det_dec_name, checkonly=True)
|
|
1040
|
+
|
|
1041
|
+
def _check_tdo_inverse(self) -> bool:
|
|
1042
|
+
"""Check if the TDO signal was oversampled in 1/B."""
|
|
1043
|
+
return self.get_data_processed(
|
|
1044
|
+
self._tdo_inv_inc_name, checkonly=True
|
|
1045
|
+
) and self.get_data_processed(self._tdo_inv_dec_name, checkonly=True)
|
|
1046
|
+
|
|
1047
|
+
def _reinit(self):
|
|
1048
|
+
"""Clear data and initialize objects."""
|
|
1049
|
+
super()._reinit()
|
|
1050
|
+
|
|
1051
|
+
self.inds_inc = slice(-1)
|
|
1052
|
+
self.inds_dec = slice(-1)
|