pymagnetos 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. pymagnetos/__init__.py +15 -0
  2. pymagnetos/cli.py +40 -0
  3. pymagnetos/core/__init__.py +19 -0
  4. pymagnetos/core/_config.py +340 -0
  5. pymagnetos/core/_data.py +132 -0
  6. pymagnetos/core/_processor.py +905 -0
  7. pymagnetos/core/config_models.py +57 -0
  8. pymagnetos/core/gui/__init__.py +6 -0
  9. pymagnetos/core/gui/_base_mainwindow.py +819 -0
  10. pymagnetos/core/gui/widgets/__init__.py +19 -0
  11. pymagnetos/core/gui/widgets/_batch_processing.py +319 -0
  12. pymagnetos/core/gui/widgets/_configuration.py +167 -0
  13. pymagnetos/core/gui/widgets/_files.py +129 -0
  14. pymagnetos/core/gui/widgets/_graphs.py +93 -0
  15. pymagnetos/core/gui/widgets/_param_content.py +20 -0
  16. pymagnetos/core/gui/widgets/_popup_progressbar.py +29 -0
  17. pymagnetos/core/gui/widgets/_text_logger.py +32 -0
  18. pymagnetos/core/signal_processing.py +1004 -0
  19. pymagnetos/core/utils.py +85 -0
  20. pymagnetos/log.py +126 -0
  21. pymagnetos/py.typed +0 -0
  22. pymagnetos/pytdo/__init__.py +6 -0
  23. pymagnetos/pytdo/_config.py +24 -0
  24. pymagnetos/pytdo/_config_models.py +59 -0
  25. pymagnetos/pytdo/_tdoprocessor.py +1052 -0
  26. pymagnetos/pytdo/assets/config_default.toml +84 -0
  27. pymagnetos/pytdo/gui/__init__.py +26 -0
  28. pymagnetos/pytdo/gui/_worker.py +106 -0
  29. pymagnetos/pytdo/gui/main.py +617 -0
  30. pymagnetos/pytdo/gui/widgets/__init__.py +8 -0
  31. pymagnetos/pytdo/gui/widgets/_buttons.py +66 -0
  32. pymagnetos/pytdo/gui/widgets/_configuration.py +78 -0
  33. pymagnetos/pytdo/gui/widgets/_graphs.py +280 -0
  34. pymagnetos/pytdo/gui/widgets/_param_content.py +137 -0
  35. pymagnetos/pyuson/__init__.py +7 -0
  36. pymagnetos/pyuson/_config.py +26 -0
  37. pymagnetos/pyuson/_config_models.py +71 -0
  38. pymagnetos/pyuson/_echoprocessor.py +1901 -0
  39. pymagnetos/pyuson/assets/config_default.toml +92 -0
  40. pymagnetos/pyuson/gui/__init__.py +26 -0
  41. pymagnetos/pyuson/gui/_worker.py +135 -0
  42. pymagnetos/pyuson/gui/main.py +767 -0
  43. pymagnetos/pyuson/gui/widgets/__init__.py +7 -0
  44. pymagnetos/pyuson/gui/widgets/_buttons.py +95 -0
  45. pymagnetos/pyuson/gui/widgets/_configuration.py +85 -0
  46. pymagnetos/pyuson/gui/widgets/_graphs.py +248 -0
  47. pymagnetos/pyuson/gui/widgets/_param_content.py +193 -0
  48. pymagnetos-0.1.0.dist-info/METADATA +23 -0
  49. pymagnetos-0.1.0.dist-info/RECORD +51 -0
  50. pymagnetos-0.1.0.dist-info/WHEEL +4 -0
  51. pymagnetos-0.1.0.dist-info/entry_points.txt +7 -0
pymagnetos/__init__.py ADDED
@@ -0,0 +1,15 @@
1
+ """
2
+ The pymagnetos package.
3
+
4
+ It provides analysis tools for high magnetic field experiments.
5
+ """
6
+
7
+ import nexusformat.nexus as nx
8
+
9
+ from . import core, pytdo, pyuson
10
+ from .core import sp, utils
11
+
12
+ __all__ = ["core", "pytdo", "pyuson", "sp", "utils"]
13
+
14
+ # Configure NeXus globally
15
+ nx.nxsetconfig(compression=None, encoding="utf-8", lock=0, memory=8000, recursive=True)
pymagnetos/cli.py ADDED
@@ -0,0 +1,40 @@
1
+ """Command line interface to run the various apps."""
2
+
3
+ import argparse
4
+
5
+
6
+ def pymagnetos_parser() -> argparse.ArgumentParser:
7
+ """Define the arguments of the CLI."""
8
+ parser = argparse.ArgumentParser(
9
+ description="pymagnetos - tools for high magnetic field experiments analysis",
10
+ )
11
+
12
+ subparsers = parser.add_subparsers(
13
+ dest="command", help="Available commands", required=True
14
+ )
15
+ subparsers.add_parser("pytdo", help="Run the app for TDO experiments")
16
+ subparsers.add_parser("pyuson", help="Run the app for ultra-sound experiments")
17
+
18
+ return parser
19
+
20
+
21
+ def main() -> None:
22
+ """Parse arguments and run the specified app."""
23
+ parser = pymagnetos_parser()
24
+ args = parser.parse_args()
25
+
26
+ match args.command:
27
+ case "pytdo":
28
+ from pymagnetos.pytdo import gui
29
+
30
+ gui.run()
31
+ case "pyuson":
32
+ from pymagnetos.pyuson import gui
33
+
34
+ gui.run()
35
+ case _:
36
+ raise NotImplementedError(f"Unkown command : {args.command}")
37
+
38
+
39
+ if __name__ == "__main__":
40
+ main()
@@ -0,0 +1,19 @@
1
+ """The core module, containing base classes and components."""
2
+
3
+ from . import config_models, gui, utils
4
+ from . import signal_processing as sp
5
+ from ._config import BaseConfig
6
+ from ._data import DataBase, DataProcessed, DataRaw
7
+ from ._processor import BaseProcessor
8
+
9
+ __all__ = [
10
+ "BaseConfig",
11
+ "BaseProcessor",
12
+ "DataBase",
13
+ "DataProcessed",
14
+ "DataRaw",
15
+ "config_models",
16
+ "gui",
17
+ "sp",
18
+ "utils",
19
+ ]
@@ -0,0 +1,340 @@
1
+ """A generic Config base class to load TOML configuration file, using Pydantic."""
2
+
3
+ import logging
4
+ import tomllib
5
+ from pathlib import Path
6
+ from typing import Any, Literal
7
+
8
+ import tomlkit
9
+ from pydantic import BaseModel
10
+
11
+ from .config_models import File
12
+ from .utils import merge_dict_nested, strip_none_dict
13
+
14
+ # Only attributes than can be set for the Config class, otherwise they're set in the
15
+ # underlying Pydantic model
16
+ SELF_ATTRIBUTES = ("_cfg", "_model", "_user_file", "_default_file", "_overrides")
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class BaseConfig:
22
+ """
23
+ Load and store parameters from user-defined configuration TOML file.
24
+
25
+ It uses an underlying Pydantic model stored in the `_cfg` attribute. Setting an
26
+ attribute will actually set it in the underlying model (raising an error if it is
27
+ not defined).
28
+ """
29
+
30
+ def __init__(
31
+ self,
32
+ model: type[BaseModel],
33
+ user_file: str | Path | None = None,
34
+ default_file: str | Path | None = None,
35
+ no_config: bool = False,
36
+ **overrides: Any,
37
+ ) -> None:
38
+ """
39
+ Load and store parameters from user-defined configuration TOML file.
40
+
41
+ It requires a fully-defined Pydantic model.
42
+
43
+ If `user_file` is a JSON file, the file is read and given to the Pydantic
44
+ `model_validate_json()` method and the default file and overrides are ignored as
45
+ it is assumed that a JSON configuration file comes from a previously serialized
46
+ Config object.
47
+
48
+ Otherwise, the configuration is set from, in this order of preference :
49
+ 1. **overrides
50
+ 2. Configuration file (`user_file`)
51
+ 3. Default configuration file (`default_file`). If not provided, the one bundled
52
+ with the package is used.
53
+
54
+ To load a JSON string (not a file), use the `loads()` class method.
55
+
56
+ Parameters
57
+ ----------
58
+ model : pydantic.BaseModel
59
+ A fully-defined (custom) Pydantic BaseModel.
60
+ user_file : str or Path or None
61
+ Path to a user configuration file.
62
+ default_file : str, Path or None
63
+ Path to the default configuration file used as fallback for missing
64
+ parameters in the user file.
65
+ no_config : bool, optional
66
+ Initialize an empty Config object, with just the Pydantic model. Default is
67
+ False.
68
+ **overrides : overriding keyword arguments
69
+ Keyword arguments that overwrite parameters from other sources.
70
+ """
71
+ self._model = model
72
+
73
+ if no_config:
74
+ self._default_file = None
75
+ self._user_file = None
76
+ self._overrides = overrides
77
+ else:
78
+ self._default_file = Path(default_file) if default_file else None
79
+ self._user_file = Path(user_file) if user_file else None
80
+ self._overrides = overrides
81
+ self.load()
82
+ self._patch_old_version()
83
+ self._resolve_data_directory()
84
+ self.build_filenames()
85
+ self._greeting()
86
+
87
+ def __getattr__(self, name: str, /) -> Any:
88
+ """
89
+ Return `name` from the underlying Pydantic model.
90
+
91
+ This magic method is called only when an attribute is not found.
92
+ """
93
+ return getattr(self._cfg, name)
94
+
95
+ def __setattr__(self, name: str, value: Any, /) -> None:
96
+ """
97
+ Set `name` to `value` in the underlying Pydantic model.
98
+
99
+ Only a restricted list of attributes can be set in the Config object itself (see
100
+ `SELF_ATTRIBUTES`).
101
+ """
102
+ if name in SELF_ATTRIBUTES:
103
+ super().__setattr__(name, value)
104
+ else:
105
+ setattr(self._cfg, name, value)
106
+
107
+ def load(self) -> None:
108
+ """Load a configuration file."""
109
+ if self._default_file:
110
+ default_config = tomllib.loads(self._default_file.read_text())
111
+ else:
112
+ default_config = dict()
113
+
114
+ if self._user_file:
115
+ if self._user_file.suffix == ".json":
116
+ # Use Pydantic built-in method for JSON
117
+ self._cfg = self._model.model_validate_json(self._user_file.read_text())
118
+ return
119
+
120
+ # Do not use tomlkit as it does not play nicely with Pydantic
121
+ user_config = tomllib.loads(self._user_file.read_text())
122
+ new_values = merge_dict_nested(user_config, default_config)
123
+ # Restore the measurement section if it exists and not empty, because the
124
+ # default names were merged
125
+ if len(user_config.get("measurements", [])):
126
+ new_values["measurements"] = user_config["measurements"]
127
+
128
+ else:
129
+ new_values = default_config
130
+
131
+ if len(self._overrides) > 0:
132
+ newer_values = merge_dict_nested(self._overrides, new_values)
133
+ else:
134
+ newer_values = new_values
135
+
136
+ self._cfg = self._model(**newer_values)
137
+
138
+ def _model_validate_json(self, json_data: str | bytes, **kwargs):
139
+ """Read and set the configuration from a JSON string."""
140
+ self._cfg = self._model.model_validate_json(json_data, **kwargs)
141
+ self._greeting()
142
+
143
+ def _greeting(self):
144
+ logger.info(
145
+ f"Configuration loaded for experiment: '{self.expid}'."
146
+ f" Data directory: {self.data_directory}"
147
+ )
148
+
149
+ @classmethod
150
+ def loads(cls, json_data: str | bytes, **kwargs):
151
+ """
152
+ Load a JSON-formatted string as the Config.
153
+
154
+ Subclasses must implement this with the `_loads()` method and the relevant
155
+ Pydantic model.
156
+ """
157
+ raise NotImplementedError(
158
+ "Subclasses must implement this method with Pydantic model."
159
+ )
160
+
161
+ @classmethod
162
+ def _loads(cls, model: type[BaseModel], json_data: str | bytes, **kwargs):
163
+ """Create a Config object from a JSON string."""
164
+ cfg = cls(model, no_config=True)
165
+ cfg._model_validate_json(json_data, **kwargs)
166
+ return cfg
167
+
168
+ def build_filenames(self):
169
+ """
170
+ Build data file paths with the parameters found in `files`.
171
+
172
+ Files names are built like this :
173
+ {data_directory}/{expid}{ext}
174
+ For oscilloscope data saved as Tektronix WFM files :
175
+ + {data_directory}/{expid}_ch{n}{ext}
176
+ """
177
+ self.filenames = self._build_filenames(self.files)
178
+
179
+ def resolve_nexus(self, serie_name: str):
180
+ """
181
+ Patch the `[nexus.groups]` section of the configuration.
182
+
183
+ Replace "serie" in keys with `serie_name`, and if no name is specified for the
184
+ main NXentry, set it to the dataset name.
185
+
186
+ Parameters
187
+ ----------
188
+ serie_name : str
189
+ Name of the series.
190
+ """
191
+ # Set the NXroot object name if not specified
192
+ if not self.nexus.groups["root"]["name"]:
193
+ self.nexus.groups["root"]["name"] = self.expid
194
+
195
+ # Replace "serie" in key names with `serie_name`
196
+ groups = self.nexus.groups.copy()
197
+ for key in groups:
198
+ self.nexus.groups[key.replace("serie", serie_name)] = self.nexus.groups.pop(
199
+ key
200
+ )
201
+
202
+ def write(
203
+ self,
204
+ output_file: str | Path,
205
+ format: Literal["guess", "toml", "json"] = "guess",
206
+ overwrite: bool = False,
207
+ ) -> bool:
208
+ """
209
+ Save current configuration to file.
210
+
211
+ If the target file exists and `overwrite` is False (default), the file is not
212
+ written. By default, the write mode is guessed from the file extension (json or
213
+ toml), to force a mode, use the `format` keyword argument.
214
+
215
+ Parameters
216
+ ----------
217
+ output_file : str or Path
218
+ Path to the output file.
219
+ format : {"guess", "json", "toml"}, optional
220
+ Output file format. `"guess"` infers from file extension, this is the
221
+ default.
222
+ overwrite: bool, optional
223
+ Whether to overwrite the output file if it exists. Default is False.
224
+ """
225
+ output_file = Path(output_file)
226
+ # Check there is something to do
227
+ if output_file.is_file() and not overwrite:
228
+ logger.warning(f"{output_file.name} already exists, not saving.")
229
+ return False
230
+ if format not in ("guess", "toml", "json"):
231
+ logger.error("File format not allowed, choose 'guess', 'toml' or 'json'.")
232
+ return False
233
+
234
+ # Determine file format
235
+ if format == "guess":
236
+ if output_file.suffix.endswith(".toml"):
237
+ format = "toml"
238
+ elif output_file.suffix.endswith(".json"):
239
+ format = "json"
240
+ else:
241
+ logger.error(
242
+ f"Couldn't infer file format from file name: {output_file.name}"
243
+ )
244
+ return False
245
+
246
+ try:
247
+ if format == "toml":
248
+ self._save_toml(output_file)
249
+ elif format == "json":
250
+ self._save_json(output_file)
251
+ logger.info(f"Configuration saved at {output_file}.")
252
+ return True
253
+ except Exception as e:
254
+ logger.error(f"Failed to save configuration file ({e}).")
255
+ return False
256
+
257
+ def model_dump_json(self, *args, **kwargs) -> str:
258
+ """
259
+ Dump the model in a JSON string.
260
+
261
+ Just a wrapper for the underlying Pydantic model `model_dump_json()` method.
262
+ Because of the `__getattr()__` magic method, this method is not mandatory. It is
263
+ still implemented for clarity.
264
+ """
265
+ return self._cfg.model_dump_json(*args, **kwargs)
266
+
267
+ def _patch_old_version(self):
268
+ """
269
+ Further adjustments to adapt to previous configuration file version.
270
+
271
+ The 'filenames' section should be used to specify direct path to the data files.
272
+ + 'base' is replaced by a more explicit 'expid' and set at the root of the file,
273
+ outside of any section.
274
+ + 'data_directory' should be set at the root of the file outside of any section.
275
+ """
276
+ if "base" in self.filenames:
277
+ logger.warning(
278
+ "Setting 'base' in the 'filenames' section is deprecated. "
279
+ "Set the 'expid' parameter at the top of the file instead."
280
+ )
281
+ self.expid = str(self.filenames.pop("base"))
282
+ if "data_directory" in self.filenames:
283
+ logger.warning(
284
+ "Setting 'data_directory' in the 'filenames' section is deprecated. "
285
+ "Set the 'data_directory' parameter at the top of the file instead."
286
+ )
287
+ self.data_directory = Path(self.filenames.pop("data_directory"))
288
+
289
+ def _resolve_data_directory(self):
290
+ """
291
+ Resolve the data directory entry from the configuration file.
292
+
293
+ If it is "." or omitted, the directory where the file is is used. Otherwise,
294
+ the location specified by the user is used.
295
+ """
296
+ if self.data_directory in (None, Path(".")) and self._user_file is not None:
297
+ self.data_directory = self._user_file.parent
298
+ elif isinstance(self.data_directory, str):
299
+ self.data_directory = Path(self.data_directory)
300
+
301
+ def _build_filenames(
302
+ self, files_dic: dict[str, File]
303
+ ) -> dict[str, Path]:
304
+ """
305
+ Build file names based on configuration.
306
+
307
+ Files names are built like this :
308
+ {data_directory}/{expid}{ext}
309
+ For oscilloscope data saved as Tektronix WFM files :
310
+ + {data_directory}/{expid}_ch{n}{ext}
311
+
312
+ Parameters
313
+ ----------
314
+ files_dic : dict
315
+ """
316
+ # Resolve the various files that should be entries in the [files] section
317
+ datadir = Path(self.data_directory) # for convenience in this function
318
+ filenames_dic = dict()
319
+ for entry in files_dic:
320
+ # Get file extension
321
+ ext = files_dic[entry].ext
322
+ if ext.endswith("wfm"):
323
+ # Special case for oscilloscope : one file per channel, add a keyword
324
+ filenames_dic[entry] = datadir / f"{self.expid}_!CHANNELID{ext}"
325
+
326
+ else:
327
+ filenames_dic[entry] = datadir / f"{self.expid}{ext}"
328
+
329
+ return filenames_dic
330
+
331
+ def _save_json(self, output_file: Path):
332
+ """Save current configuration to a JSON file."""
333
+ output_file.write_text(self.model_dump_json(), encoding="utf8")
334
+
335
+ def _save_toml(self, output_file: Path):
336
+ """Save current configuration to a TOML file."""
337
+ output_file.write_text(
338
+ tomlkit.dumps(strip_none_dict(self._cfg.model_dump(mode="json"))),
339
+ encoding="utf8",
340
+ )
@@ -0,0 +1,132 @@
1
+ """Classes to store data as NeXus objects."""
2
+
3
+ import importlib.metadata
4
+
5
+ import nexusformat.nexus as nx
6
+
7
+
8
+ class DataBase(nx.NXgroup):
9
+ """Base Data class integrating NXgroup."""
10
+
11
+ def __init__(self, attr: dict = {}, **kwargs) -> None:
12
+ """
13
+ Base Data class integrating NXgroup [1].
14
+
15
+ `DataBase` should be used for subclassing. It allows to create a NXgroup with
16
+ attributes specified as a dictionary.
17
+
18
+ [1] https://nexpy.github.io/nexpy/treeapi.html#nexusformat.nexus.tree.NXgroup
19
+
20
+ Parameters
21
+ ----------
22
+ attr : dict, optional
23
+ Attributes for the NXgroup. Default is an empty dictionary.
24
+ **kwargs : passed to `nexusformat.NXgroup()`.
25
+ """
26
+ super().__init__(**kwargs)
27
+
28
+ # Set attributes
29
+ for k, v in attr.items():
30
+ self.attrs[k] = v
31
+
32
+
33
+ class DataRaw(DataBase):
34
+ """Store raw data in a NeXus NXdata group."""
35
+
36
+ def __init__(self, attr: dict = {}, **kwargs) -> None:
37
+ """
38
+ Store raw data in a NeXus group.
39
+
40
+ The instantiated object will be of type `nexusformat.nexus.NXdata` [1].
41
+
42
+ [1] https://nexpy.github.io/nexpy/treeapi.html#nexusformat.nexus.tree.NXdata
43
+
44
+ Parameters
45
+ ----------
46
+ attr : dict, optional
47
+ Attribute for the NXdata group. Default is an empty dictionary.
48
+ **kwargs : passed to `pyuson.data.DataBase()`.
49
+ """
50
+ super().__init__(attr=attr, **kwargs)
51
+
52
+ # Convert to NXdata
53
+ self.nxclass = "NXdata"
54
+ self.nxname = "data"
55
+
56
+
57
+ class DataProcessed(DataBase):
58
+ """Store processed data in a NeXus NXprocess group."""
59
+
60
+ def __init__(
61
+ self,
62
+ program: str = "generic",
63
+ results_name: str = "results",
64
+ serie_name: str = "serie",
65
+ attr: dict = {},
66
+ **kwargs,
67
+ ) -> None:
68
+ """
69
+ Store processed data in a NeXus group.
70
+
71
+ The instantiated object will be of type `nexusformat.nexus.NXprocess` [1]. It
72
+ behaves like it, with an additional `create_serie()` method, that adds an NXData
73
+ group.
74
+
75
+ It has the following structure :
76
+ ```
77
+ analysis:NXprocess
78
+ date = '2025-08-05T13:28:48.974923'
79
+ program = '{program}'
80
+ {results_name}:NXdata
81
+ {results_name}_{serie_name}{serie_index}:NXdata
82
+ version = '0.1.10'
83
+ ```
84
+
85
+ [1] https://nexpy.github.io/nexpy/treeapi.html#nexusformat.nexus.tree.NXprocess
86
+
87
+ Parameters
88
+ ----------
89
+ program : str, optional
90
+ Name of the package used to create the object. Must be an installed Python
91
+ package. Default is the `PKG_NAME` global variable.
92
+ results_name : str, optional
93
+ Name of the results Nexus NXData group. Default is 'results'.
94
+ serie_name : str, optional
95
+ Name that will be appended to `results_name`, along with the serie index, to
96
+ build the name of the per-serie Nexus NXData group. Default is 'serie'.
97
+ attr : dict, optional
98
+ Attribute for the NXdata group. Default is an empty dictionary.
99
+ **kwargs : passed to `pyuson.data.DataBase()`.
100
+ """
101
+ self._res_name = results_name
102
+ self._serie_name = serie_name
103
+
104
+ super().__init__(attr=attr, **kwargs)
105
+
106
+ # Inject method `create_serie()` before NXgroup changes __class__, making any
107
+ # standard instance methods unbound
108
+ nx.NXgroup.create_serie = lambda self, index: _create_serie(self, index) # ty:ignore[unresolved-attribute]
109
+
110
+ # Convert to NXprocess
111
+ self.nxclass = "NXprocess"
112
+ self.nxname = "analysis"
113
+
114
+ # Create base datasets
115
+ self["program"] = program
116
+ self["version"] = importlib.metadata.version(program)
117
+ self["results"] = nx.NXdata()
118
+
119
+ def _create_serie(self: DataProcessed, index: int) -> None:
120
+ """
121
+ Create an NXdata group for serie data.
122
+
123
+ The name of the created group is : '{results_name}_{serie_name}{index}'.
124
+
125
+ Parameters
126
+ ----------
127
+ index : int
128
+ Number appended to the serie name.
129
+ """
130
+ group_name = f"{self._res_name}_{self._serie_name}{index}"
131
+ if group_name not in self:
132
+ self[group_name] = nx.NXdata()