pymagnetos 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pymagnetos/__init__.py +15 -0
- pymagnetos/cli.py +40 -0
- pymagnetos/core/__init__.py +19 -0
- pymagnetos/core/_config.py +340 -0
- pymagnetos/core/_data.py +132 -0
- pymagnetos/core/_processor.py +905 -0
- pymagnetos/core/config_models.py +57 -0
- pymagnetos/core/gui/__init__.py +6 -0
- pymagnetos/core/gui/_base_mainwindow.py +819 -0
- pymagnetos/core/gui/widgets/__init__.py +19 -0
- pymagnetos/core/gui/widgets/_batch_processing.py +319 -0
- pymagnetos/core/gui/widgets/_configuration.py +167 -0
- pymagnetos/core/gui/widgets/_files.py +129 -0
- pymagnetos/core/gui/widgets/_graphs.py +93 -0
- pymagnetos/core/gui/widgets/_param_content.py +20 -0
- pymagnetos/core/gui/widgets/_popup_progressbar.py +29 -0
- pymagnetos/core/gui/widgets/_text_logger.py +32 -0
- pymagnetos/core/signal_processing.py +1004 -0
- pymagnetos/core/utils.py +85 -0
- pymagnetos/log.py +126 -0
- pymagnetos/py.typed +0 -0
- pymagnetos/pytdo/__init__.py +6 -0
- pymagnetos/pytdo/_config.py +24 -0
- pymagnetos/pytdo/_config_models.py +59 -0
- pymagnetos/pytdo/_tdoprocessor.py +1052 -0
- pymagnetos/pytdo/assets/config_default.toml +84 -0
- pymagnetos/pytdo/gui/__init__.py +26 -0
- pymagnetos/pytdo/gui/_worker.py +106 -0
- pymagnetos/pytdo/gui/main.py +617 -0
- pymagnetos/pytdo/gui/widgets/__init__.py +8 -0
- pymagnetos/pytdo/gui/widgets/_buttons.py +66 -0
- pymagnetos/pytdo/gui/widgets/_configuration.py +78 -0
- pymagnetos/pytdo/gui/widgets/_graphs.py +280 -0
- pymagnetos/pytdo/gui/widgets/_param_content.py +137 -0
- pymagnetos/pyuson/__init__.py +7 -0
- pymagnetos/pyuson/_config.py +26 -0
- pymagnetos/pyuson/_config_models.py +71 -0
- pymagnetos/pyuson/_echoprocessor.py +1901 -0
- pymagnetos/pyuson/assets/config_default.toml +92 -0
- pymagnetos/pyuson/gui/__init__.py +26 -0
- pymagnetos/pyuson/gui/_worker.py +135 -0
- pymagnetos/pyuson/gui/main.py +767 -0
- pymagnetos/pyuson/gui/widgets/__init__.py +7 -0
- pymagnetos/pyuson/gui/widgets/_buttons.py +95 -0
- pymagnetos/pyuson/gui/widgets/_configuration.py +85 -0
- pymagnetos/pyuson/gui/widgets/_graphs.py +248 -0
- pymagnetos/pyuson/gui/widgets/_param_content.py +193 -0
- pymagnetos-0.1.0.dist-info/METADATA +23 -0
- pymagnetos-0.1.0.dist-info/RECORD +51 -0
- pymagnetos-0.1.0.dist-info/WHEEL +4 -0
- pymagnetos-0.1.0.dist-info/entry_points.txt +7 -0
|
@@ -0,0 +1,905 @@
|
|
|
1
|
+
"""The processor base class."""
|
|
2
|
+
|
|
3
|
+
import gc
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
from collections.abc import Sequence
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Literal, Protocol, Self, runtime_checkable
|
|
10
|
+
|
|
11
|
+
import matplotlib.pyplot as plt
|
|
12
|
+
import nexusformat.nexus as nx
|
|
13
|
+
import numpy as np
|
|
14
|
+
|
|
15
|
+
from . import signal_processing as sp
|
|
16
|
+
from ._data import DataProcessed, DataRaw
|
|
17
|
+
|
|
18
|
+
# Global constants
|
|
19
|
+
PROGRAM_NAME = "pymagnetos" # program name, saved in the NeXus file
|
|
20
|
+
RESULTS_NAME = "results" # name of the main NeXus group
|
|
21
|
+
SERIE_NAME = "serie" # name of the data series group in the NeXus file
|
|
22
|
+
NEXUS_EXTENSION = ("nx5", "nxs", "h5", "hdf5") # NeXus file extension
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@runtime_checkable
|
|
28
|
+
class Config(Protocol):
|
|
29
|
+
def loads(): ...
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class BaseProcessor:
|
|
33
|
+
"""
|
|
34
|
+
Load, analyze and store data acquired during pulsed magnetic field experiment.
|
|
35
|
+
|
|
36
|
+
This is a base class meant to be subclassed to create specialized processor objects.
|
|
37
|
+
|
|
38
|
+
It provides methods shared between experiments :
|
|
39
|
+
- Load a configuration file (via the Config object),
|
|
40
|
+
- Load binary and text data,
|
|
41
|
+
- Store the data in-memory as NeXus objects (via the Data objects),
|
|
42
|
+
- Compute a magnetic field over time,
|
|
43
|
+
- Save the results as a NeXus file.
|
|
44
|
+
|
|
45
|
+
It offers a convenient way to get and set data in a NeXus hierarchy (raw and
|
|
46
|
+
processed data, sorted in different series) and can dump it in a NeXus-compliant
|
|
47
|
+
HDF5 file along with all the metadata required to recreate a Processor object from
|
|
48
|
+
such a file. The file can be optionnaly consolidated by specifying additional NeXus
|
|
49
|
+
fields in the configuration file.
|
|
50
|
+
|
|
51
|
+
The `_config_cls` attribute should be a Config class (the type, not an instance) and
|
|
52
|
+
be set by subclasses.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
_config_cls: type[Config]
|
|
56
|
+
|
|
57
|
+
def __init__(self, file_path: Path | str | None = None, **kwargs) -> None:
|
|
58
|
+
"""
|
|
59
|
+
Load, analyze and store data acquired during pulsed magnetic field experiment.
|
|
60
|
+
|
|
61
|
+
Parameters
|
|
62
|
+
----------
|
|
63
|
+
file_path : Path, str or None, optional
|
|
64
|
+
Full path to the configuration file or a previously saved NeXus file. Can be
|
|
65
|
+
None (default) to instantiate an empty object.
|
|
66
|
+
**kwargs : passed to the `load_file()` method.
|
|
67
|
+
"""
|
|
68
|
+
# Prepare internal variables in case it was not set before
|
|
69
|
+
if not hasattr(self, "_results_name"):
|
|
70
|
+
self._results_name = RESULTS_NAME
|
|
71
|
+
if not hasattr(self, "_serie_name"):
|
|
72
|
+
self._serie_name = SERIE_NAME
|
|
73
|
+
if not hasattr(self, "_program_name"):
|
|
74
|
+
self._program_name = PROGRAM_NAME
|
|
75
|
+
if not hasattr(self, "_config_cls"):
|
|
76
|
+
raise AttributeError(
|
|
77
|
+
"No Config class defined, subclasses must set the '_config_cls' "
|
|
78
|
+
"attribute."
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
# Load configuration or NeXus and trigger further initializations
|
|
82
|
+
self.load_file(file_path, **kwargs)
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def idx_serie(self) -> int:
|
|
86
|
+
"""
|
|
87
|
+
Current analysis serie index.
|
|
88
|
+
|
|
89
|
+
It is a property so that it can be stored and updated in the Config object as
|
|
90
|
+
well.
|
|
91
|
+
"""
|
|
92
|
+
return -1
|
|
93
|
+
|
|
94
|
+
@idx_serie.setter
|
|
95
|
+
def idx_serie(self, value: int):
|
|
96
|
+
raise NotImplementedError("Subclasses must implement this property setter")
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def analysis_window(self) -> Sequence[float]:
|
|
100
|
+
"""
|
|
101
|
+
Current analysis window.
|
|
102
|
+
|
|
103
|
+
It is a property so that it can be stored and updated in the Config object as
|
|
104
|
+
well.
|
|
105
|
+
"""
|
|
106
|
+
return [0.0]
|
|
107
|
+
|
|
108
|
+
@analysis_window.setter
|
|
109
|
+
def analysis_window(self, value: Sequence[float]):
|
|
110
|
+
raise NotImplementedError("Subclasses must implement this property setter")
|
|
111
|
+
|
|
112
|
+
def __repr__(self) -> str:
|
|
113
|
+
return f"{type(self)}\nDataset : {self.cfg.expid}"
|
|
114
|
+
|
|
115
|
+
def __str__(self) -> str:
|
|
116
|
+
return f"Dataset : {self.cfg.expid}"
|
|
117
|
+
|
|
118
|
+
def load_file(self, filepath: str | Path | None, **kwargs) -> None:
|
|
119
|
+
"""
|
|
120
|
+
Load a configuration or NeXus file with the Config class.
|
|
121
|
+
|
|
122
|
+
If loading a NeXus file, its extension should be "nx5", "nxs", "h5" or "hdf5" to
|
|
123
|
+
be detected as such.
|
|
124
|
+
|
|
125
|
+
Parameters
|
|
126
|
+
----------
|
|
127
|
+
filepath : str, Path or None
|
|
128
|
+
Full path to the configuration file or NeXus file.
|
|
129
|
+
**kwargs : passed to the Config class.
|
|
130
|
+
"""
|
|
131
|
+
if filepath is not None and str(filepath).endswith(NEXUS_EXTENSION):
|
|
132
|
+
# Load the NeXus file instead
|
|
133
|
+
self.is_config_file = False
|
|
134
|
+
self.cfg = self._config_cls(**kwargs) # initialize Config with defaults
|
|
135
|
+
self._init() # initialize default Processor
|
|
136
|
+
self.load(filepath) # load NeXus file
|
|
137
|
+
else:
|
|
138
|
+
# Load the configuration file
|
|
139
|
+
self.is_config_file = True
|
|
140
|
+
self.is_nexus_file = False
|
|
141
|
+
self.cfg = self._config_cls(user_file=filepath, **kwargs)
|
|
142
|
+
self.cfg.resolve_nexus(self._serie_name)
|
|
143
|
+
self._init() # initialize with Config
|
|
144
|
+
|
|
145
|
+
def _init(self) -> None:
|
|
146
|
+
"""
|
|
147
|
+
Initialize the object after loading a configuration file.
|
|
148
|
+
|
|
149
|
+
This method should be executed either *after* loading the configuration file or
|
|
150
|
+
*before* loading a NeXus file.
|
|
151
|
+
|
|
152
|
+
The Data objects from the `data` modules and the NeXus structure are initialized
|
|
153
|
+
here.
|
|
154
|
+
"""
|
|
155
|
+
# Get parameters and metadata
|
|
156
|
+
self.measurements = [*self.cfg.measurements.keys()]
|
|
157
|
+
attr_raw = self.cfg.nexus.groups["data"]
|
|
158
|
+
attr_processed = self.cfg.nexus.groups["analysis"]
|
|
159
|
+
name_entry = self.cfg.nexus.groups["root"]["name"]
|
|
160
|
+
|
|
161
|
+
# Prepare attributes storing the (meta)data.
|
|
162
|
+
self.metadata = dict()
|
|
163
|
+
self.data_raw = DataRaw(attr=attr_raw)
|
|
164
|
+
self.data_processed = DataProcessed(
|
|
165
|
+
program=self._program_name,
|
|
166
|
+
attr=attr_processed,
|
|
167
|
+
results_name=self._results_name,
|
|
168
|
+
serie_name=self._serie_name,
|
|
169
|
+
)
|
|
170
|
+
# NeXus Entry
|
|
171
|
+
self.nxentry = nx.NXentry(name=name_entry)
|
|
172
|
+
self.nxentry.attrs["default"] = "processed"
|
|
173
|
+
self._fill_nexus_entry()
|
|
174
|
+
# NeXus Root
|
|
175
|
+
self.nxroot = nx.NXroot(self.nxentry)
|
|
176
|
+
self.nxroot.attrs["default"] = name_entry
|
|
177
|
+
|
|
178
|
+
def _reinit(self) -> None:
|
|
179
|
+
"""Reinitialize the Processor, cleaning up the data."""
|
|
180
|
+
self._init()
|
|
181
|
+
gc.collect()
|
|
182
|
+
|
|
183
|
+
@staticmethod
|
|
184
|
+
def load_bin(
|
|
185
|
+
filepath: str | Path,
|
|
186
|
+
precision: int = 8,
|
|
187
|
+
endian: Literal["<", ">"] = "<",
|
|
188
|
+
**kwargs,
|
|
189
|
+
) -> np.ndarray:
|
|
190
|
+
"""
|
|
191
|
+
Read a binary file with the given precision and endian.
|
|
192
|
+
|
|
193
|
+
Simple wrapper around `numpy.fromfile()`.
|
|
194
|
+
|
|
195
|
+
Parameters
|
|
196
|
+
----------
|
|
197
|
+
filepath : str or Path
|
|
198
|
+
Full path to the file to read.
|
|
199
|
+
precision : int, optional
|
|
200
|
+
Floating-point precision, by default 8.
|
|
201
|
+
endian : {"<", ">"}, optional
|
|
202
|
+
"<" for little endian, ">" for big endian, by default "<".
|
|
203
|
+
**kwargs : passed to `numpy.fromfile()`.
|
|
204
|
+
|
|
205
|
+
Returns
|
|
206
|
+
-------
|
|
207
|
+
data : np.ndarray
|
|
208
|
+
Raw data from binary file.
|
|
209
|
+
|
|
210
|
+
"""
|
|
211
|
+
with open(filepath, "rb") as fid:
|
|
212
|
+
data = np.fromfile(fid, dtype=f"{endian}f{precision}", **kwargs)
|
|
213
|
+
|
|
214
|
+
return data
|
|
215
|
+
|
|
216
|
+
def _load_pickup(
|
|
217
|
+
self,
|
|
218
|
+
filename: Path | str,
|
|
219
|
+
precision: int,
|
|
220
|
+
endian: Literal["<", ">"] = "<",
|
|
221
|
+
order: Literal["F", "C"] = "F",
|
|
222
|
+
nseries: int = 1,
|
|
223
|
+
index: int = 0,
|
|
224
|
+
) -> np.ndarray:
|
|
225
|
+
"""
|
|
226
|
+
Load the pickup data from a binary file.
|
|
227
|
+
|
|
228
|
+
The array is reshaped given the number series (by default, 1).
|
|
229
|
+
|
|
230
|
+
Parameters
|
|
231
|
+
----------
|
|
232
|
+
filename : Path | str
|
|
233
|
+
Full path to the pickup binary file.
|
|
234
|
+
precision : int
|
|
235
|
+
Byte precision.
|
|
236
|
+
endian : {"<", ">"}, optional
|
|
237
|
+
"<" for little endian, ">" for big endian. Default is "<".
|
|
238
|
+
order : {"F", "C"}, optional
|
|
239
|
+
Array order, "F" for Fortran, "C" for C. Default is "F".
|
|
240
|
+
nseries : int, optional
|
|
241
|
+
Number of pickups time series in the binary file. Default is 1.
|
|
242
|
+
index : int, optional
|
|
243
|
+
Index of the time serie to read, 0-based. Default is 0 (first).
|
|
244
|
+
|
|
245
|
+
Returns
|
|
246
|
+
-------
|
|
247
|
+
pickup : np.ndarray
|
|
248
|
+
Pickup voltage time serie.
|
|
249
|
+
"""
|
|
250
|
+
# Read data
|
|
251
|
+
data = self.load_bin(filename, precision, endian)
|
|
252
|
+
|
|
253
|
+
# Reshape and get required time serie
|
|
254
|
+
data = data.reshape((-1, nseries), order=order)[:, index].astype(float).copy()
|
|
255
|
+
|
|
256
|
+
return data
|
|
257
|
+
|
|
258
|
+
def load_pickup(self):
|
|
259
|
+
"""
|
|
260
|
+
Load the pickup binary file, gathering metadata (if any) and the file.
|
|
261
|
+
|
|
262
|
+
Pickup binary file might be different accross experiments, so subclasses must
|
|
263
|
+
implement this method.
|
|
264
|
+
"""
|
|
265
|
+
raise NotImplementedError("Subclasses must implement this method")
|
|
266
|
+
|
|
267
|
+
def compute_field(self, method: str = "trapz") -> Self:
|
|
268
|
+
"""
|
|
269
|
+
Compute magnetic field by integrating the pickup signal.
|
|
270
|
+
|
|
271
|
+
The details are in the `_compute_field()` method.
|
|
272
|
+
|
|
273
|
+
Parameters
|
|
274
|
+
----------
|
|
275
|
+
method : str, optional
|
|
276
|
+
Method to perform integration, passed to `sp.integrate_pickup()`. Default is
|
|
277
|
+
"trapz" (which is only method currently supported).
|
|
278
|
+
"""
|
|
279
|
+
# Checks
|
|
280
|
+
if "pickup" not in self.data_raw:
|
|
281
|
+
self.load_pickup()
|
|
282
|
+
|
|
283
|
+
# Get parameters
|
|
284
|
+
surface = self.cfg.parameters.pickup_surface
|
|
285
|
+
|
|
286
|
+
# Compute
|
|
287
|
+
self._compute_field(surface=surface, method=method)
|
|
288
|
+
|
|
289
|
+
return self
|
|
290
|
+
|
|
291
|
+
def _compute_field(
|
|
292
|
+
self,
|
|
293
|
+
surface: float = 1.0,
|
|
294
|
+
method: str = "trapz",
|
|
295
|
+
) -> Self:
|
|
296
|
+
"""
|
|
297
|
+
Compute magnetic field from pickup coil data.
|
|
298
|
+
|
|
299
|
+
Wraps the `sp.integrate_pickup()` function. Resulting field is stored as
|
|
300
|
+
`magfield` in `data_processed`, with the corresponding time vector as
|
|
301
|
+
`magfield_time`.
|
|
302
|
+
|
|
303
|
+
Requires `pickup` and `pickup_time` in `data_raw`. If pickup signal is empty, a
|
|
304
|
+
synthetic one is generated.
|
|
305
|
+
|
|
306
|
+
Parameters
|
|
307
|
+
----------
|
|
308
|
+
surface : float
|
|
309
|
+
Pickup coil surface in m².
|
|
310
|
+
method : str, optional
|
|
311
|
+
Integration method. Default is "trapz" (which is the only one supported).
|
|
312
|
+
"""
|
|
313
|
+
if ("pickup_time" not in self.data_raw) or ("pickup" not in self.data_raw):
|
|
314
|
+
logger.error("Pickup signal was not loaded, can't compute magnetic field.")
|
|
315
|
+
return self
|
|
316
|
+
|
|
317
|
+
pu_time = self.get_data_raw("pickup_time")
|
|
318
|
+
pu_signal = self.get_data_raw("pickup")
|
|
319
|
+
|
|
320
|
+
# Check if we need to simulate a signal
|
|
321
|
+
if pu_time.shape == (0,) or pu_signal.shape == (0,):
|
|
322
|
+
# We need to simulate a pickup signal
|
|
323
|
+
if self.get_data_processed("time_exp", checkonly=True):
|
|
324
|
+
logger.info("There is no pickup signal, simulating one.")
|
|
325
|
+
self.create_fake_pickup()
|
|
326
|
+
# Reload
|
|
327
|
+
pu_time = self.get_data_raw("pickup_time")
|
|
328
|
+
pu_signal = self.get_data_raw("pickup")
|
|
329
|
+
else:
|
|
330
|
+
# Required data missing
|
|
331
|
+
logger.warning(
|
|
332
|
+
"There is no pickup signal, main data need to be loaded first to "
|
|
333
|
+
"simulate a field."
|
|
334
|
+
)
|
|
335
|
+
return self
|
|
336
|
+
|
|
337
|
+
# Integrate and store
|
|
338
|
+
logger.info("Computing magnetic field...")
|
|
339
|
+
self.set_data_processed(
|
|
340
|
+
"magfield", sp.integrate_pickup(pu_time, pu_signal, surface, method=method)
|
|
341
|
+
)
|
|
342
|
+
self.set_data_processed("magfield_time", self.get_data_raw("pickup_time"))
|
|
343
|
+
|
|
344
|
+
# Verbose
|
|
345
|
+
maxfield = self.get_data_processed("magfield").max()
|
|
346
|
+
logger.info(f"Done, max. field = {maxfield:2.2f}T.")
|
|
347
|
+
|
|
348
|
+
return self
|
|
349
|
+
|
|
350
|
+
def create_fake_pickup(self) -> Self:
|
|
351
|
+
"""
|
|
352
|
+
Create a fake pickup signal and store it in `data_raw`.
|
|
353
|
+
|
|
354
|
+
Requires "time_exp" in `data_processed`.
|
|
355
|
+
"""
|
|
356
|
+
self.set_data_raw(
|
|
357
|
+
"pickup",
|
|
358
|
+
self._make_fake_pickup(self.get_data_processed("time_exp").shape[0]),
|
|
359
|
+
)
|
|
360
|
+
self.set_data_raw("pickup_time", self.get_data_processed("time_exp"))
|
|
361
|
+
|
|
362
|
+
return self
|
|
363
|
+
|
|
364
|
+
@staticmethod
|
|
365
|
+
def _make_fake_pickup(npoints: int, start_at: float = 0.0) -> np.ndarray:
|
|
366
|
+
"""
|
|
367
|
+
Generate a fake pickup signal.
|
|
368
|
+
|
|
369
|
+
The generated signal is a simple linear function.
|
|
370
|
+
|
|
371
|
+
Parameters
|
|
372
|
+
----------
|
|
373
|
+
npoints : int
|
|
374
|
+
Number of data points.
|
|
375
|
+
start_at : float, optional
|
|
376
|
+
Starting point, default is 0.
|
|
377
|
+
"""
|
|
378
|
+
return np.linspace(start_at, npoints, npoints, endpoint=False)
|
|
379
|
+
|
|
380
|
+
def _fill_nexus_entry(self) -> Self:
|
|
381
|
+
"""
|
|
382
|
+
Fill the NXentry with the Data objects.
|
|
383
|
+
|
|
384
|
+
'data_processed' and 'data_raw' are added as NXsubentry.
|
|
385
|
+
"""
|
|
386
|
+
# Add data
|
|
387
|
+
self.nxentry["processed"] = nx.NXsubentry(self.data_processed)
|
|
388
|
+
self.nxentry["processed"].attrs["default"] = "analysis"
|
|
389
|
+
self.nxentry["raw"] = nx.NXsubentry(self.data_raw)
|
|
390
|
+
self.nxentry["raw"].attrs["default"] = "data"
|
|
391
|
+
|
|
392
|
+
return self
|
|
393
|
+
|
|
394
|
+
def _set_data(self, where: str, name: str, value: Any) -> None:
|
|
395
|
+
"""
|
|
396
|
+
Set data in the DataProcessed object.
|
|
397
|
+
|
|
398
|
+
Dataset will be written as an NXfield in `data_processed[where][name]`. If the
|
|
399
|
+
dataset already exists, it is replaced (modifying its type and shape
|
|
400
|
+
accordingly).
|
|
401
|
+
|
|
402
|
+
If the group name or the dataset name are present in the 'nexus' section of the
|
|
403
|
+
configuration, the attributes are written as well.
|
|
404
|
+
|
|
405
|
+
Parameters
|
|
406
|
+
----------
|
|
407
|
+
where : str
|
|
408
|
+
Location of the dataset in the DataProcessed object.
|
|
409
|
+
name : str
|
|
410
|
+
Dataset name.
|
|
411
|
+
value : Any
|
|
412
|
+
Dataset.
|
|
413
|
+
"""
|
|
414
|
+
# Put data
|
|
415
|
+
if name in self.data_processed[where]:
|
|
416
|
+
self.data_processed[where][name].replace(value)
|
|
417
|
+
else:
|
|
418
|
+
self.data_processed[where][name] = value
|
|
419
|
+
|
|
420
|
+
# Set group attributes
|
|
421
|
+
if where in self.cfg.nexus.groups:
|
|
422
|
+
for k, v in self.cfg.nexus.groups[where].items():
|
|
423
|
+
self.data_processed[where].attrs[k] = v
|
|
424
|
+
# Set dataset attributes
|
|
425
|
+
if name in self.cfg.nexus.datasets:
|
|
426
|
+
for k, v in self.cfg.nexus.datasets[name].items():
|
|
427
|
+
self.data_processed[where][name].attrs[k] = v
|
|
428
|
+
|
|
429
|
+
def _get_data(self, where: str, name: str, checkonly: bool = False) -> Any:
|
|
430
|
+
"""
|
|
431
|
+
Get data from the DataProcessed object.
|
|
432
|
+
|
|
433
|
+
The data is pulled from `data_processed[where][name]`. If `checkonly` is True,
|
|
434
|
+
only check if the dataset exists.
|
|
435
|
+
|
|
436
|
+
Note : the returned value is a *view*, if it is modified, the underlying data
|
|
437
|
+
is modified as well.
|
|
438
|
+
|
|
439
|
+
Parameters
|
|
440
|
+
----------
|
|
441
|
+
where : str
|
|
442
|
+
Location of the dataset in the DataProcessed object.
|
|
443
|
+
name : str
|
|
444
|
+
Dataset name.
|
|
445
|
+
checkonly : bool, optional
|
|
446
|
+
Perform a key existence check without returning the value. Default is False.
|
|
447
|
+
"""
|
|
448
|
+
if not checkonly:
|
|
449
|
+
return self.data_processed[where][name].nxdata
|
|
450
|
+
else:
|
|
451
|
+
if where not in self.data_processed:
|
|
452
|
+
return False
|
|
453
|
+
if name not in self.data_processed[where]:
|
|
454
|
+
return False
|
|
455
|
+
else:
|
|
456
|
+
return True
|
|
457
|
+
|
|
458
|
+
def _remove_data(self, where: str, name: str) -> None:
|
|
459
|
+
"""
|
|
460
|
+
Remove data from the DataProcessed object.
|
|
461
|
+
|
|
462
|
+
Parameters
|
|
463
|
+
----------
|
|
464
|
+
where : str
|
|
465
|
+
Location of the dataset in the DataProcessed object.
|
|
466
|
+
name : str
|
|
467
|
+
Dataset name.
|
|
468
|
+
"""
|
|
469
|
+
if self._get_data(where, name, checkonly=True):
|
|
470
|
+
del self.data_processed[where][name]
|
|
471
|
+
|
|
472
|
+
def get_data_raw(self, name: str, checkonly: bool = False) -> Any:
|
|
473
|
+
"""
|
|
474
|
+
Retrieve data from `data_raw[name]`.
|
|
475
|
+
|
|
476
|
+
Note : the returned value is a *view*, if it is modified, the underlying data
|
|
477
|
+
is modified as well.
|
|
478
|
+
|
|
479
|
+
Parameters
|
|
480
|
+
----------
|
|
481
|
+
name : str
|
|
482
|
+
Dataset name.
|
|
483
|
+
checkonly : bool, optional
|
|
484
|
+
Perform a key existence check without returning the value. Default is False.
|
|
485
|
+
"""
|
|
486
|
+
if checkonly:
|
|
487
|
+
return name in self.data_raw
|
|
488
|
+
else:
|
|
489
|
+
return self.data_raw[name].nxdata
|
|
490
|
+
|
|
491
|
+
def set_data_raw(self, name: str, value: Any) -> None:
|
|
492
|
+
"""
|
|
493
|
+
Put data in `data_raw[name]`.
|
|
494
|
+
|
|
495
|
+
Dataset will be written as an NXfield in `data_raw[name]`. If the dataset
|
|
496
|
+
already exists, it is replaced (modifying its type and shape accordingly).
|
|
497
|
+
|
|
498
|
+
If the dataset name is present in the 'nexus' section of the configuration, the
|
|
499
|
+
attributes are written as well.
|
|
500
|
+
|
|
501
|
+
Parameters
|
|
502
|
+
----------
|
|
503
|
+
name : str
|
|
504
|
+
Dataset name.
|
|
505
|
+
value : Any
|
|
506
|
+
Dataset.
|
|
507
|
+
"""
|
|
508
|
+
if name in self.data_raw:
|
|
509
|
+
self.data_raw[name].replace(value)
|
|
510
|
+
else:
|
|
511
|
+
self.data_raw[name] = value
|
|
512
|
+
|
|
513
|
+
# Set dataset attributes
|
|
514
|
+
if name in self.cfg.nexus.datasets:
|
|
515
|
+
for k, v in self.cfg.nexus.datasets[name].items():
|
|
516
|
+
self.data_raw[name].attrs[k] = v
|
|
517
|
+
|
|
518
|
+
def get_data_processed(self, name: str, checkonly: bool = False) -> Any:
|
|
519
|
+
"""
|
|
520
|
+
Get data from the 'results' group of the DataProcessed object.
|
|
521
|
+
|
|
522
|
+
The data is pulled from `data_processed[results][name]`. If `checkonly` is True,
|
|
523
|
+
only check if the dataset exists.
|
|
524
|
+
|
|
525
|
+
Note : the returned value is a *view*, if it is modified, the underlying data
|
|
526
|
+
is modified as well.
|
|
527
|
+
|
|
528
|
+
Parameters
|
|
529
|
+
----------
|
|
530
|
+
name : str
|
|
531
|
+
Dataset name.
|
|
532
|
+
checkonly : bool, optional
|
|
533
|
+
Perform a key existence check without returning the value. Default is False.
|
|
534
|
+
"""
|
|
535
|
+
where = f"{self._results_name}"
|
|
536
|
+
return self._get_data(where, name, checkonly=checkonly)
|
|
537
|
+
|
|
538
|
+
def set_data_processed(self, name: str, value: Any) -> None:
|
|
539
|
+
"""
|
|
540
|
+
Set data in the 'results' group of the DataProcessed object.
|
|
541
|
+
|
|
542
|
+
Dataset will be written as an NXfield in `data_processed[results][name]`. If the
|
|
543
|
+
dataset already exists, it is replaced (modifying its type and shape
|
|
544
|
+
accordingly).
|
|
545
|
+
|
|
546
|
+
If the group name or the dataset name are present in the 'nexus' section of the
|
|
547
|
+
configuration, the attributes are written as well.
|
|
548
|
+
|
|
549
|
+
Parameters
|
|
550
|
+
----------
|
|
551
|
+
name : str
|
|
552
|
+
Dataset name.
|
|
553
|
+
value : Any
|
|
554
|
+
Dataset.
|
|
555
|
+
"""
|
|
556
|
+
where = f"{self._results_name}"
|
|
557
|
+
self._set_data(where, name, value)
|
|
558
|
+
|
|
559
|
+
def remove_data_processed(self, name: str) -> None:
|
|
560
|
+
"""
|
|
561
|
+
Remove dataset from the 'results' group of the DataProcessed object.
|
|
562
|
+
|
|
563
|
+
The dataset stored at `data_processed[results][name]` is removed.
|
|
564
|
+
|
|
565
|
+
Parameters
|
|
566
|
+
----------
|
|
567
|
+
name : str
|
|
568
|
+
Dataset name.
|
|
569
|
+
"""
|
|
570
|
+
where = f"{self._results_name}"
|
|
571
|
+
self._remove_data(where, name)
|
|
572
|
+
|
|
573
|
+
def create_data_serie(self) -> None:
|
|
574
|
+
"""
|
|
575
|
+
Check if the dictionnary for serie data exists, if not, create it.
|
|
576
|
+
|
|
577
|
+
The group hosting the data for the current `idx_serie` is stored at :
|
|
578
|
+
`data_processed[results_{serie_name}{idx_serie}]`.
|
|
579
|
+
|
|
580
|
+
If 'results_{serie_name}' is present in the 'nexus' section of the
|
|
581
|
+
configuration, the attributes are written as well.
|
|
582
|
+
"""
|
|
583
|
+
baseloc = f"{self._results_name}_{self._serie_name}"
|
|
584
|
+
loc = f"{baseloc}{self.idx_serie}"
|
|
585
|
+
if loc not in self.data_processed:
|
|
586
|
+
self.data_processed.create_serie(self.idx_serie)
|
|
587
|
+
|
|
588
|
+
# Set NeXus attributes
|
|
589
|
+
if baseloc in self.cfg.nexus.groups:
|
|
590
|
+
for k, v in self.cfg.nexus.groups[baseloc].items():
|
|
591
|
+
self.data_processed[loc].attrs[k] = v
|
|
592
|
+
|
|
593
|
+
def get_data_serie(self, name: str, checkonly: bool = False) -> Any:
|
|
594
|
+
"""
|
|
595
|
+
Get data from the current 'results_serie' group of the DataProcessed object.
|
|
596
|
+
|
|
597
|
+
The data is pulled from `data_processed[results_{serie_name}{idx_serie}][name]`.
|
|
598
|
+
If `checkonly` is True, only check if the dataset exists.
|
|
599
|
+
|
|
600
|
+
Note : the returned value is a *view*, if it is modified, the underlying data
|
|
601
|
+
is modified as well.
|
|
602
|
+
|
|
603
|
+
Parameters
|
|
604
|
+
----------
|
|
605
|
+
name : str
|
|
606
|
+
Dataset name.
|
|
607
|
+
checkonly : bool, optional
|
|
608
|
+
Perform a key existence check without returning the value. Default is False.
|
|
609
|
+
"""
|
|
610
|
+
where = f"{self._results_name}_{self._serie_name}{self.idx_serie}"
|
|
611
|
+
return self._get_data(where, name, checkonly=checkonly)
|
|
612
|
+
|
|
613
|
+
def set_data_serie(self, name: str, value: Any) -> None:
|
|
614
|
+
"""
|
|
615
|
+
Set data in the current 'results_serie' group of the DataProcessed object.
|
|
616
|
+
|
|
617
|
+
Dataset will be written as an NXfield in
|
|
618
|
+
`data_processed[results_{serie_name}{idx_serie}][name]`. If the dataset already
|
|
619
|
+
exists, it is replaced (modifying its type and shape accordingly).
|
|
620
|
+
|
|
621
|
+
If the group name or the dataset name are present in the 'nexus' section of the
|
|
622
|
+
configuration, the attributes are written as well.
|
|
623
|
+
|
|
624
|
+
Parameters
|
|
625
|
+
----------
|
|
626
|
+
name : str
|
|
627
|
+
Dataset name.
|
|
628
|
+
value : Any
|
|
629
|
+
Dataset.
|
|
630
|
+
"""
|
|
631
|
+
where = f"{self._results_name}_{self._serie_name}{self.idx_serie}"
|
|
632
|
+
self._set_data(where, name, value)
|
|
633
|
+
|
|
634
|
+
def remove_date_serie(self, name: str) -> None:
|
|
635
|
+
"""
|
|
636
|
+
Remove data in the current 'results_serie' group of the DataProcessed object.
|
|
637
|
+
|
|
638
|
+
The dataset at `data_processed[results_{serie_name}{idx_serie}][name]` is
|
|
639
|
+
removed.
|
|
640
|
+
|
|
641
|
+
Parameters
|
|
642
|
+
----------
|
|
643
|
+
name : str
|
|
644
|
+
Dataset name.
|
|
645
|
+
"""
|
|
646
|
+
where = f"{self._results_name}_{self._serie_name}{self.idx_serie}"
|
|
647
|
+
self._remove_data(where, name)
|
|
648
|
+
|
|
649
|
+
def set_attr_serie(self, name: str, value: Any) -> None:
|
|
650
|
+
"""
|
|
651
|
+
Set attribute for the current 'results_serie' NXdata group in data_processed.
|
|
652
|
+
|
|
653
|
+
The attribute will be written for the group at
|
|
654
|
+
`data_processed[results_{serie_name}{idx_serie}][name]`.
|
|
655
|
+
|
|
656
|
+
Parameters
|
|
657
|
+
----------
|
|
658
|
+
name : str
|
|
659
|
+
Attribute name.
|
|
660
|
+
value : Any
|
|
661
|
+
Attribute value to set.
|
|
662
|
+
"""
|
|
663
|
+
where = f"{self._results_name}_{self._serie_name}{self.idx_serie}"
|
|
664
|
+
self.data_processed[where].attrs[name] = value
|
|
665
|
+
|
|
666
|
+
def plot_field(self) -> plt.Figure | None:
|
|
667
|
+
"""
|
|
668
|
+
Plot magnetic field.
|
|
669
|
+
|
|
670
|
+
Pull data from `data_processed[results]["magfield"]` and plot it against the
|
|
671
|
+
time vector.
|
|
672
|
+
"""
|
|
673
|
+
if not self.get_data_processed(
|
|
674
|
+
"magfield_time", checkonly=True
|
|
675
|
+
) or not self.get_data_processed("magfield", checkonly=True):
|
|
676
|
+
logger.warning("The magnetic field was not computed yet.")
|
|
677
|
+
return None
|
|
678
|
+
|
|
679
|
+
fig = plt.figure()
|
|
680
|
+
plt.plot(
|
|
681
|
+
self.get_data_processed("magfield_time"),
|
|
682
|
+
self.get_data_processed("magfield"),
|
|
683
|
+
)
|
|
684
|
+
plt.xlabel("time (s)")
|
|
685
|
+
plt.ylabel("B (T)")
|
|
686
|
+
plt.show()
|
|
687
|
+
|
|
688
|
+
return fig
|
|
689
|
+
|
|
690
|
+
def get_nexus_filename(self) -> str:
|
|
691
|
+
"""Generate output NeXus full file name."""
|
|
692
|
+
return os.path.join(self.cfg.data_directory, self.cfg.expid + ".nx5")
|
|
693
|
+
|
|
694
|
+
def consolidate(self) -> Self:
|
|
695
|
+
"""
|
|
696
|
+
Add supplementary NeXus entries from configuration file.
|
|
697
|
+
|
|
698
|
+
The "nx" section of the configuration is read to add new groups and datasets to
|
|
699
|
+
provide further details about the sample and experiment.
|
|
700
|
+
"""
|
|
701
|
+
for nxk, nxv in self.cfg.nx.items():
|
|
702
|
+
# [nx.xyz] section
|
|
703
|
+
nxclass = "NX" + nxk
|
|
704
|
+
nxgroup = nx.NXgroup(nxclass=nxclass)
|
|
705
|
+
for k, v in nxv.items():
|
|
706
|
+
# write dataset
|
|
707
|
+
nxgroup[k] = v
|
|
708
|
+
if nxk in self.nxentry:
|
|
709
|
+
del self.nxentry[nxk]
|
|
710
|
+
self.nxentry[nxk] = nxgroup
|
|
711
|
+
|
|
712
|
+
return self
|
|
713
|
+
|
|
714
|
+
def resolve_nexus_links(self, path: str = "/") -> Self:
|
|
715
|
+
"""
|
|
716
|
+
Replace placeholders in the NXroot group by actual links to other datasets.
|
|
717
|
+
|
|
718
|
+
To enable automatic plotting by NeXus programs (such as `nexpy`), the datasets
|
|
719
|
+
plotted one versus the other need to be in the same groups. Common x-axis, such
|
|
720
|
+
as time or magnetic field, are linked to the actual datasets found in
|
|
721
|
+
`data_processed[results]`.
|
|
722
|
+
|
|
723
|
+
In practice, this method recursively scans all datasets to find datasets that
|
|
724
|
+
are strings and contains the pre-defined placeholders formatted like so :
|
|
725
|
+
|
|
726
|
+
"!link to:path/to/dataset"
|
|
727
|
+
|
|
728
|
+
and are replaced with a NXlink targeting the specified dataset.
|
|
729
|
+
"""
|
|
730
|
+
nxgrp = self.nxroot[path]
|
|
731
|
+
for obj in nxgrp.values():
|
|
732
|
+
if isinstance(obj, nx.NXfield):
|
|
733
|
+
# is a dataset, not a group
|
|
734
|
+
if isinstance(obj.nxdata, bytes):
|
|
735
|
+
# dataset is a string
|
|
736
|
+
val = obj.nxdata.decode("utf-8")
|
|
737
|
+
elif isinstance(obj.nxdata, str):
|
|
738
|
+
# dataset is a string
|
|
739
|
+
val = obj.nxdata
|
|
740
|
+
else:
|
|
741
|
+
# not a string, continue scanning
|
|
742
|
+
continue
|
|
743
|
+
if val.startswith("!link to:"):
|
|
744
|
+
# begins with the placeholder flag, read the target
|
|
745
|
+
target = val.split("!link to:")[1]
|
|
746
|
+
else:
|
|
747
|
+
# not beginning with the link placeholder
|
|
748
|
+
continue
|
|
749
|
+
# replace placeholder with an actual link
|
|
750
|
+
self.nxroot[obj.nxpath] = nx.NXlink(
|
|
751
|
+
self.nxroot[self.nxentry.nxname][target]
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
else:
|
|
755
|
+
# is a group, continue scanning within the group
|
|
756
|
+
self.resolve_nexus_links(path=obj.nxpath)
|
|
757
|
+
|
|
758
|
+
return self
|
|
759
|
+
|
|
760
|
+
def save(
|
|
761
|
+
self, filename: str | Path | None = None, consolidate: bool = True, **kwargs
|
|
762
|
+
) -> bool:
|
|
763
|
+
"""
|
|
764
|
+
Save object to a NeXus[1] compliant HDF5 file.
|
|
765
|
+
|
|
766
|
+
It is written using the `NXFile` object (that is an `h5py.File` subclass).
|
|
767
|
+
If a filename is not specified, the `get_nexus_filename()` is used to get a path
|
|
768
|
+
to the output file.
|
|
769
|
+
|
|
770
|
+
The `Config` object and the `metadata` attribute are serialized and stored in
|
|
771
|
+
the file to allow for reconstruction of the processing state.
|
|
772
|
+
|
|
773
|
+
[1] https://www.nexusformat.org/
|
|
774
|
+
|
|
775
|
+
Parameters
|
|
776
|
+
----------
|
|
777
|
+
filename : str, Path or None
|
|
778
|
+
Full path to the output file, that should't exist. If None (default), the
|
|
779
|
+
filename is determined with the `get_nexus_filename()` method.
|
|
780
|
+
consolidate : bool, optional
|
|
781
|
+
Whether to consolidate the file to add attributes and groups from the
|
|
782
|
+
configuration file. Default is True.
|
|
783
|
+
**kwargs : passed to `nx.NXFile()`.
|
|
784
|
+
|
|
785
|
+
Returns
|
|
786
|
+
-------
|
|
787
|
+
status : bool
|
|
788
|
+
True if the file was saved successfully, False otherwise.
|
|
789
|
+
"""
|
|
790
|
+
if not filename:
|
|
791
|
+
filename = self.get_nexus_filename()
|
|
792
|
+
if "mode" not in kwargs:
|
|
793
|
+
kwargs["mode"] = "w-"
|
|
794
|
+
|
|
795
|
+
logger.info("Saving...")
|
|
796
|
+
# Consolidation
|
|
797
|
+
if consolidate:
|
|
798
|
+
self.consolidate()
|
|
799
|
+
self.resolve_nexus_links()
|
|
800
|
+
|
|
801
|
+
# Specify configuration for the program
|
|
802
|
+
self.data_processed["configuration"] = self.cfg.model_dump_json()
|
|
803
|
+
self.data_processed["configuration"].attrs["format"] = "json"
|
|
804
|
+
|
|
805
|
+
# Add metadata
|
|
806
|
+
self.data_processed["metadata"] = json.dumps(self.metadata)
|
|
807
|
+
self.data_processed["metadata"].attrs["format"] = "json"
|
|
808
|
+
|
|
809
|
+
# Save
|
|
810
|
+
try:
|
|
811
|
+
with nx.NXFile(filename, **kwargs) as f:
|
|
812
|
+
f.writefile(self.nxroot)
|
|
813
|
+
# release
|
|
814
|
+
self.release_nexus_file()
|
|
815
|
+
except Exception as e:
|
|
816
|
+
logger.error(f"Failed to save: {e}.")
|
|
817
|
+
return False
|
|
818
|
+
|
|
819
|
+
logger.info(f"Saved at {filename}.")
|
|
820
|
+
return True
|
|
821
|
+
|
|
822
|
+
def load(self, filename: str | Path | None) -> Self:
|
|
823
|
+
"""
|
|
824
|
+
Load a previously created NeXus file.
|
|
825
|
+
|
|
826
|
+
Note that only the first entry of the `NXroot` object will be loaded.
|
|
827
|
+
|
|
828
|
+
Parameters
|
|
829
|
+
----------
|
|
830
|
+
filename : str
|
|
831
|
+
Full path to the file to load.
|
|
832
|
+
|
|
833
|
+
Returns
|
|
834
|
+
-------
|
|
835
|
+
obj : BaseProcessor
|
|
836
|
+
Initialized Processor object ready to carry on analysis.
|
|
837
|
+
"""
|
|
838
|
+
# Load file
|
|
839
|
+
logger.info(f"Loading NeXus file at {filename}...")
|
|
840
|
+
try:
|
|
841
|
+
with nx.NXFile(filename, mode="rw", recursive=True) as f:
|
|
842
|
+
nxroot = f.readfile()
|
|
843
|
+
except Exception as e:
|
|
844
|
+
logger.error(f"\nFailed to load: {e}.")
|
|
845
|
+
return self
|
|
846
|
+
|
|
847
|
+
# Get first entry
|
|
848
|
+
nxentry = next(iter(nxroot.entries.values()))
|
|
849
|
+
|
|
850
|
+
# Re-generate Config object
|
|
851
|
+
if "configuration" in nxentry["processed"]["analysis"]:
|
|
852
|
+
self.cfg = self._config_cls.loads(
|
|
853
|
+
nxentry["processed"]["analysis"]["configuration"].nxdata
|
|
854
|
+
)
|
|
855
|
+
else:
|
|
856
|
+
logger.warning(
|
|
857
|
+
"No configuration found in the NeXus file, features will be limited."
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
# Set data
|
|
861
|
+
# Processed data
|
|
862
|
+
for key, value in nxentry["processed"]["analysis"].attrs.items():
|
|
863
|
+
# attributes
|
|
864
|
+
self.data_processed.attrs[key] = value
|
|
865
|
+
for key, value in nxentry["processed"]["analysis"].items():
|
|
866
|
+
# groups and fields
|
|
867
|
+
self.data_processed[key] = value
|
|
868
|
+
# Raw data
|
|
869
|
+
for key, value in nxentry["raw"]["data"].attrs.items():
|
|
870
|
+
# attributes
|
|
871
|
+
self.data_raw.attrs[key] = value
|
|
872
|
+
for key, value in nxentry["raw"]["data"].items():
|
|
873
|
+
# groups and fields
|
|
874
|
+
self.data_raw[key] = value
|
|
875
|
+
|
|
876
|
+
# Re-build NX data structure
|
|
877
|
+
name_entry = self.cfg.nexus.groups["root"]["name"]
|
|
878
|
+
self.nxentry.nxname = name_entry
|
|
879
|
+
self.nxroot.attrs["default"] = name_entry
|
|
880
|
+
|
|
881
|
+
# Recover metadata
|
|
882
|
+
if "metadata" in self.data_processed:
|
|
883
|
+
self.metadata = json.loads(self.data_processed["metadata"].nxdata)
|
|
884
|
+
else:
|
|
885
|
+
logger.warning(
|
|
886
|
+
"No metadata found in the NeXus file, features will be limited."
|
|
887
|
+
)
|
|
888
|
+
|
|
889
|
+
# Update flag
|
|
890
|
+
self.is_nexus_file = True
|
|
891
|
+
|
|
892
|
+
logger.info("Done.")
|
|
893
|
+
|
|
894
|
+
return self
|
|
895
|
+
|
|
896
|
+
def release_nexus_file(self) -> None:
|
|
897
|
+
"""
|
|
898
|
+
Uncouple NeXus data structure and the NeXus HDF5 file.
|
|
899
|
+
|
|
900
|
+
It *should* prevent changes made to the object be reflected in the file.
|
|
901
|
+
"""
|
|
902
|
+
self.nxroot.nxfile.clear_lock()
|
|
903
|
+
self.nxroot.nxfile.close()
|
|
904
|
+
self.nxroot._filename = None
|
|
905
|
+
self.nxroot._file = None
|