pymodaq 3.6.13__py3-none-any.whl → 4.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pymodaq might be problematic. Click here for more details.
- pymodaq/__init__.py +13 -6
- pymodaq/control_modules/__init__.py +0 -7
- pymodaq/control_modules/daq_move.py +965 -2
- pymodaq/control_modules/daq_move_ui.py +319 -0
- pymodaq/control_modules/daq_viewer.py +1573 -3
- pymodaq/control_modules/daq_viewer_ui.py +393 -0
- pymodaq/control_modules/mocks.py +51 -0
- pymodaq/control_modules/move_utility_classes.py +709 -8
- pymodaq/control_modules/utils.py +256 -0
- pymodaq/control_modules/viewer_utility_classes.py +663 -6
- pymodaq/daq_utils.py +89 -0
- pymodaq/dashboard.py +91 -72
- pymodaq/examples/custom_app.py +12 -11
- pymodaq/examples/custom_viewer.py +10 -10
- pymodaq/examples/function_plotter.py +16 -13
- pymodaq/examples/nonlinearscanner.py +8 -6
- pymodaq/examples/parameter_ex.py +7 -7
- pymodaq/examples/preset_MockCamera.xml +1 -0
- pymodaq/extensions/__init__.py +16 -0
- pymodaq/extensions/console.py +76 -0
- pymodaq/{daq_logger.py → extensions/daq_logger.py} +115 -65
- pymodaq/extensions/daq_scan.py +1339 -0
- pymodaq/extensions/daq_scan_ui.py +240 -0
- pymodaq/extensions/h5browser.py +23 -0
- pymodaq/{pid → extensions/pid}/__init__.py +4 -2
- pymodaq/{pid → extensions/pid}/daq_move_PID.py +2 -2
- pymodaq/{pid → extensions/pid}/pid_controller.py +48 -36
- pymodaq/{pid → extensions/pid}/utils.py +52 -6
- pymodaq/extensions/utils.py +40 -0
- pymodaq/post_treatment/__init__.py +6 -0
- pymodaq/{daq_analysis → post_treatment/daq_analysis}/daq_analysis_main.py +17 -17
- pymodaq/{daq_measurement → post_treatment/daq_measurement}/daq_measurement_main.py +8 -14
- pymodaq/post_treatment/load_and_plot.py +219 -0
- pymodaq/post_treatment/process_to_scalar.py +263 -0
- pymodaq/resources/QtDesigner_Ressources/Icon_Library/run_all.png +0 -0
- pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop_all.png +0 -0
- pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.bat +1 -1
- pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.qrc +1 -0
- pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources_rc.py +109784 -109173
- pymodaq/resources/QtDesigner_Ressources/icons.svg +142 -0
- pymodaq/resources/VERSION +1 -1
- pymodaq/resources/config_template.toml +32 -13
- pymodaq/resources/preset_default.xml +1 -1
- pymodaq/{daq_utils → utils}/Tuto innosetup/script_full_setup.iss +1 -1
- pymodaq/utils/__init__.py +0 -29
- pymodaq/utils/abstract/__init__.py +48 -0
- pymodaq/{daq_utils → utils}/abstract/logger.py +7 -3
- pymodaq/utils/array_manipulation.py +379 -8
- pymodaq/{daq_utils → utils}/calibration_camera.py +6 -6
- pymodaq/{daq_utils → utils}/chrono_timer.py +1 -1
- pymodaq/utils/config.py +448 -0
- pymodaq/utils/conftests.py +5 -0
- pymodaq/utils/daq_utils.py +828 -8
- pymodaq/utils/data.py +1873 -7
- pymodaq/{daq_utils → utils}/db/db_logger/db_logger.py +86 -47
- pymodaq/{daq_utils → utils}/db/db_logger/db_logger_models.py +31 -10
- pymodaq/{daq_utils → utils}/enums.py +12 -7
- pymodaq/utils/exceptions.py +37 -0
- pymodaq/utils/factory.py +82 -0
- pymodaq/{daq_utils → utils}/gui_utils/__init__.py +1 -1
- pymodaq/utils/gui_utils/custom_app.py +129 -0
- pymodaq/utils/gui_utils/file_io.py +66 -0
- pymodaq/{daq_utils → utils}/gui_utils/layout.py +2 -2
- pymodaq/{daq_utils → utils}/gui_utils/utils.py +13 -3
- pymodaq/{daq_utils → utils}/gui_utils/widgets/__init__.py +2 -2
- pymodaq/utils/gui_utils/widgets/label.py +24 -0
- pymodaq/{daq_utils → utils}/gui_utils/widgets/lcd.py +12 -7
- pymodaq/{daq_utils → utils}/gui_utils/widgets/push.py +66 -2
- pymodaq/{daq_utils → utils}/gui_utils/widgets/qled.py +6 -4
- pymodaq/utils/gui_utils/widgets/spinbox.py +24 -0
- pymodaq/{daq_utils → utils}/gui_utils/widgets/table.py +2 -2
- pymodaq/utils/h5modules/__init__.py +1 -0
- pymodaq/{daq_utils/h5backend.py → utils/h5modules/backends.py} +200 -112
- pymodaq/utils/h5modules/browsing.py +683 -0
- pymodaq/utils/h5modules/data_saving.py +839 -0
- pymodaq/utils/h5modules/h5logging.py +110 -0
- pymodaq/utils/h5modules/module_saving.py +350 -0
- pymodaq/utils/h5modules/saving.py +914 -0
- pymodaq/utils/h5modules/utils.py +85 -0
- pymodaq/utils/logger.py +64 -6
- pymodaq/utils/managers/action_manager.py +460 -0
- pymodaq/{daq_utils → utils}/managers/batchscan_manager.py +144 -112
- pymodaq/{daq_utils → utils}/managers/modules_manager.py +188 -114
- pymodaq/{daq_utils → utils}/managers/overshoot_manager.py +3 -3
- pymodaq/utils/managers/parameter_manager.py +110 -0
- pymodaq/{daq_utils → utils}/managers/preset_manager.py +17 -13
- pymodaq/{daq_utils → utils}/managers/preset_manager_utils.py +8 -7
- pymodaq/{daq_utils → utils}/managers/remote_manager.py +7 -6
- pymodaq/{daq_utils → utils}/managers/roi_manager.py +148 -57
- pymodaq/utils/math_utils.py +546 -10
- pymodaq/{daq_utils → utils}/messenger.py +5 -1
- pymodaq/utils/parameter/__init__.py +2 -15
- pymodaq/{daq_utils → utils}/parameter/ioxml.py +12 -6
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/__init__.py +1 -3
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/filedir.py +1 -1
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/itemselect.py +3 -0
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/led.py +1 -1
- pymodaq/utils/parameter/pymodaq_ptypes/pixmap.py +161 -0
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/slide.py +1 -1
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/table.py +1 -1
- pymodaq/utils/parameter/utils.py +206 -11
- pymodaq/utils/plotting/data_viewers/__init__.py +6 -0
- pymodaq/utils/plotting/data_viewers/viewer.py +393 -0
- pymodaq/utils/plotting/data_viewers/viewer0D.py +251 -0
- pymodaq/utils/plotting/data_viewers/viewer1D.py +574 -0
- pymodaq/{daq_utils → utils}/plotting/data_viewers/viewer1Dbasic.py +8 -3
- pymodaq/{daq_utils → utils}/plotting/data_viewers/viewer2D.py +292 -357
- pymodaq/{daq_utils → utils}/plotting/data_viewers/viewer2D_basic.py +58 -75
- pymodaq/utils/plotting/data_viewers/viewerND.py +738 -0
- pymodaq/{daq_utils → utils}/plotting/gant_chart.py +2 -2
- pymodaq/{daq_utils → utils}/plotting/items/axis_scaled.py +4 -2
- pymodaq/{daq_utils → utils}/plotting/items/image.py +8 -6
- pymodaq/utils/plotting/navigator.py +355 -0
- pymodaq/utils/plotting/scan_selector.py +480 -0
- pymodaq/utils/plotting/utils/axes_viewer.py +88 -0
- pymodaq/utils/plotting/utils/filter.py +538 -0
- pymodaq/utils/plotting/utils/lineout.py +224 -0
- pymodaq/{daq_utils → utils}/plotting/utils/plot_utils.py +196 -84
- pymodaq/{daq_utils → utils}/plotting/utils/signalND.py +21 -13
- pymodaq/utils/plotting/widgets.py +76 -0
- pymodaq/utils/scanner/__init__.py +10 -0
- pymodaq/utils/scanner/scan_factory.py +204 -0
- pymodaq/utils/scanner/scanner.py +271 -0
- pymodaq/utils/scanner/scanners/_1d_scanners.py +117 -0
- pymodaq/utils/scanner/scanners/_2d_scanners.py +293 -0
- pymodaq/utils/scanner/scanners/sequential.py +192 -0
- pymodaq/utils/scanner/scanners/tabular.py +294 -0
- pymodaq/utils/scanner/utils.py +83 -0
- pymodaq/utils/slicing.py +47 -0
- pymodaq/utils/svg/__init__.py +6 -0
- pymodaq/utils/svg/svg_renderer.py +20 -0
- pymodaq/utils/svg/svg_view.py +35 -0
- pymodaq/utils/svg/svg_viewer2D.py +51 -0
- pymodaq/{daq_utils → utils}/tcp_server_client.py +36 -37
- pymodaq/{daq_utils → utils}/tree_layout/tree_layout_main.py +50 -35
- pymodaq/utils/units.py +216 -0
- pymodaq-4.0.1.dist-info/METADATA +159 -0
- {pymodaq-3.6.13.dist-info → pymodaq-4.0.1.dist-info}/RECORD +167 -170
- {pymodaq-3.6.13.dist-info → pymodaq-4.0.1.dist-info}/WHEEL +1 -2
- pymodaq-4.0.1.dist-info/entry_points.txt +8 -0
- pymodaq/daq_move/daq_move_gui.py +0 -279
- pymodaq/daq_move/daq_move_gui.ui +0 -534
- pymodaq/daq_move/daq_move_main.py +0 -1042
- pymodaq/daq_move/process_from_QtDesigner_DAQ_Move_GUI.bat +0 -2
- pymodaq/daq_move/utility_classes.py +0 -686
- pymodaq/daq_scan.py +0 -2160
- pymodaq/daq_utils/array_manipulation.py +0 -386
- pymodaq/daq_utils/config.py +0 -273
- pymodaq/daq_utils/conftests.py +0 -7
- pymodaq/daq_utils/custom_parameter_tree.py +0 -9
- pymodaq/daq_utils/daq_enums.py +0 -133
- pymodaq/daq_utils/daq_utils.py +0 -1402
- pymodaq/daq_utils/exceptions.py +0 -71
- pymodaq/daq_utils/gui_utils/custom_app.py +0 -103
- pymodaq/daq_utils/gui_utils/file_io.py +0 -75
- pymodaq/daq_utils/gui_utils/widgets/spinbox.py +0 -9
- pymodaq/daq_utils/h5exporter_hyperspy.py +0 -115
- pymodaq/daq_utils/h5exporters.py +0 -242
- pymodaq/daq_utils/h5modules.py +0 -1559
- pymodaq/daq_utils/h5utils.py +0 -241
- pymodaq/daq_utils/managers/action_manager.py +0 -236
- pymodaq/daq_utils/managers/parameter_manager.py +0 -57
- pymodaq/daq_utils/math_utils.py +0 -705
- pymodaq/daq_utils/parameter/__init__.py +0 -1
- pymodaq/daq_utils/parameter/oldpymodaq_ptypes.py +0 -1626
- pymodaq/daq_utils/parameter/pymodaq_ptypes/pixmap.py +0 -85
- pymodaq/daq_utils/parameter/utils.py +0 -136
- pymodaq/daq_utils/plotting/data_viewers/__init__.py +0 -0
- pymodaq/daq_utils/plotting/data_viewers/process_from_QtDesigner_0DViewer_GUI.bat +0 -2
- pymodaq/daq_utils/plotting/data_viewers/viewer0D.py +0 -204
- pymodaq/daq_utils/plotting/data_viewers/viewer0D_GUI.py +0 -89
- pymodaq/daq_utils/plotting/data_viewers/viewer0D_GUI.ui +0 -131
- pymodaq/daq_utils/plotting/data_viewers/viewer1D.py +0 -781
- pymodaq/daq_utils/plotting/data_viewers/viewerND.py +0 -894
- pymodaq/daq_utils/plotting/data_viewers/viewerbase.py +0 -64
- pymodaq/daq_utils/plotting/items/__init__.py +0 -0
- pymodaq/daq_utils/plotting/navigator.py +0 -500
- pymodaq/daq_utils/plotting/scan_selector.py +0 -289
- pymodaq/daq_utils/plotting/utils/__init__.py +0 -0
- pymodaq/daq_utils/plotting/utils/filter.py +0 -236
- pymodaq/daq_utils/plotting/viewer0D/__init__.py +0 -0
- pymodaq/daq_utils/plotting/viewer0D/viewer0D_main.py +0 -4
- pymodaq/daq_utils/plotting/viewer1D/__init__.py +0 -0
- pymodaq/daq_utils/plotting/viewer1D/viewer1D_main.py +0 -4
- pymodaq/daq_utils/plotting/viewer1D/viewer1Dbasic.py +0 -4
- pymodaq/daq_utils/plotting/viewer2D/viewer_2D_basic.py +0 -4
- pymodaq/daq_utils/plotting/viewer2D/viewer_2D_main.py +0 -4
- pymodaq/daq_utils/plotting/viewerND/__init__.py +0 -0
- pymodaq/daq_utils/plotting/viewerND/viewerND_main.py +0 -4
- pymodaq/daq_utils/scanner.py +0 -1289
- pymodaq/daq_utils/tree_layout/__init__.py +0 -0
- pymodaq/daq_viewer/__init__.py +0 -0
- pymodaq/daq_viewer/daq_gui_settings.py +0 -237
- pymodaq/daq_viewer/daq_gui_settings.ui +0 -441
- pymodaq/daq_viewer/daq_viewer_main.py +0 -2225
- pymodaq/daq_viewer/process_from_QtDesigner_DAQ_GUI_settings.bat +0 -2
- pymodaq/daq_viewer/utility_classes.py +0 -673
- pymodaq/examples/logger_image/__init__.py +0 -0
- pymodaq/examples/logger_image/logger_displayer.py +0 -121
- pymodaq/examples/logger_image/setup.svg +0 -3119
- pymodaq/examples/logger_image/setup_svg.py +0 -114
- pymodaq/h5browser.py +0 -39
- pymodaq/utils/scanner.py +0 -15
- pymodaq-3.6.13.dist-info/METADATA +0 -39
- pymodaq-3.6.13.dist-info/entry_points.txt +0 -8
- pymodaq-3.6.13.dist-info/top_level.txt +0 -1
- /pymodaq/{daq_analysis → post_treatment/daq_analysis}/__init__.py +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/__init__.py +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/daq_measurement_GUI.py +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/daq_measurement_GUI.ui +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/process_from_QtDesigner_DAQ_Measurement_GUI.bat +0 -0
- /pymodaq/{daq_utils → utils}/Tuto innosetup/Tuto innosetup.odt +0 -0
- /pymodaq/{daq_utils → utils}/Tuto innosetup/Tuto innosetup.pdf +0 -0
- /pymodaq/{daq_move → utils/db}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils/db/db_logger}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/gui_utils/dock.py +0 -0
- /pymodaq/{daq_utils → utils}/gui_utils/list_picker.py +0 -0
- /pymodaq/{daq_utils/abstract → utils/managers}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/bool.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/date.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/list.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/numeric.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/tableview.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/text.py +0 -0
- /pymodaq/{daq_utils/db → utils/plotting}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/plotting/image_viewer.py +0 -0
- /pymodaq/{daq_utils/db/db_logger → utils/plotting/items}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/plotting/items/crosshair.py +0 -0
- /pymodaq/{daq_utils/managers → utils/plotting/utils}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/qvariant.py +0 -0
- /pymodaq/{daq_utils/plotting/viewer2D → utils/scanner/scanners}/__init__.py +0 -0
- /pymodaq/{daq_utils/plotting → utils/tree_layout}/__init__.py +0 -0
- {pymodaq-3.6.13.dist-info → pymodaq-4.0.1.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,839 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
Created the 21/11/2022
|
|
4
|
+
|
|
5
|
+
@author: Sebastien Weber
|
|
6
|
+
"""
|
|
7
|
+
from time import time
|
|
8
|
+
from typing import Union, List, Tuple
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
|
|
12
|
+
from pymodaq.utils.abstract import ABCMeta, abstract_attribute
|
|
13
|
+
from pymodaq.utils.enums import enum_checker
|
|
14
|
+
from pymodaq.utils.data import Axis, DataDim, DataWithAxes, DataToExport, DataDistribution
|
|
15
|
+
from .saving import DataType, H5Saver
|
|
16
|
+
from .backends import GROUP, CARRAY, Node, EARRAY
|
|
17
|
+
from pymodaq.utils.daq_utils import capitalize
|
|
18
|
+
from pymodaq.utils.scanner.utils import ScanType
|
|
19
|
+
|
|
20
|
+
SPECIAL_GROUP_NAMES = dict(nav_axes='NavAxes')
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class AxisError(Exception):
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class DataManagement(metaclass=ABCMeta):
|
|
28
|
+
"""Base abstract class to be used for all specialized object saving and loading data to/from a h5file
|
|
29
|
+
|
|
30
|
+
Attributes
|
|
31
|
+
----------
|
|
32
|
+
data_type: DataType
|
|
33
|
+
The enum for this type of data, here abstract and should be redefined
|
|
34
|
+
"""
|
|
35
|
+
data_type: DataType = abstract_attribute()
|
|
36
|
+
_h5saver: H5Saver = abstract_attribute()
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def _format_node_name(cls, ind: int) -> str:
|
|
40
|
+
""" Format the saved node following the data_type attribute and an integer index
|
|
41
|
+
|
|
42
|
+
Parameters
|
|
43
|
+
----------
|
|
44
|
+
ind: int
|
|
45
|
+
|
|
46
|
+
Returns
|
|
47
|
+
-------
|
|
48
|
+
str: the future name of the node
|
|
49
|
+
"""
|
|
50
|
+
return f'{capitalize(cls.data_type.value)}{ind:02d}'
|
|
51
|
+
|
|
52
|
+
def _get_next_node_name(self, where) -> str:
|
|
53
|
+
"""Get the formatted next node name given the ones already saved
|
|
54
|
+
|
|
55
|
+
Parameters
|
|
56
|
+
----------
|
|
57
|
+
where: Union[Node, str]
|
|
58
|
+
the path of a given node or the node itself
|
|
59
|
+
|
|
60
|
+
Returns
|
|
61
|
+
-------
|
|
62
|
+
str: the future name of the node
|
|
63
|
+
"""
|
|
64
|
+
return self._format_node_name(self._get_next_data_type_index_in_group(where))
|
|
65
|
+
|
|
66
|
+
def get_last_node_name(self, where) -> Union[str, None]:
|
|
67
|
+
"""Get the last node name among the ones already saved
|
|
68
|
+
|
|
69
|
+
Parameters
|
|
70
|
+
----------
|
|
71
|
+
where: Union[Node, str]
|
|
72
|
+
the path of a given node or the node itself
|
|
73
|
+
|
|
74
|
+
Returns
|
|
75
|
+
-------
|
|
76
|
+
str: the name of the last saved node or None if none saved
|
|
77
|
+
"""
|
|
78
|
+
index = self._get_next_data_type_index_in_group(where) - 1
|
|
79
|
+
if index == -1:
|
|
80
|
+
return None
|
|
81
|
+
else:
|
|
82
|
+
return self._format_node_name(index)
|
|
83
|
+
|
|
84
|
+
def get_node_from_index(self, where, index):
|
|
85
|
+
return self._h5saver.get_node(where, self._format_node_name(index))
|
|
86
|
+
|
|
87
|
+
def _get_next_data_type_index_in_group(self, where: Union[Node, str]) -> int:
|
|
88
|
+
"""Check how much node with a given data_type are already present within the GROUP where
|
|
89
|
+
Parameters
|
|
90
|
+
----------
|
|
91
|
+
where: Union[Node, str]
|
|
92
|
+
the path of a given node or the node itself
|
|
93
|
+
|
|
94
|
+
Returns
|
|
95
|
+
-------
|
|
96
|
+
int: the next available integer to index the node name
|
|
97
|
+
"""
|
|
98
|
+
ind = 0
|
|
99
|
+
for node in self._h5saver.walk_nodes(where):
|
|
100
|
+
if 'data_type' in node.attrs:
|
|
101
|
+
if node.attrs['data_type'] == self.data_type.name:
|
|
102
|
+
ind += 1
|
|
103
|
+
return ind
|
|
104
|
+
|
|
105
|
+
def _is_node_of_data_type(self, where):
|
|
106
|
+
"""Check if a given node is of the data_type of the real class implementation
|
|
107
|
+
|
|
108
|
+
eg 'axis' for the AxisSaverLoader
|
|
109
|
+
|
|
110
|
+
Parameters
|
|
111
|
+
----------
|
|
112
|
+
where: Union[Node, str]
|
|
113
|
+
the path of a given node or the node itself
|
|
114
|
+
|
|
115
|
+
Returns
|
|
116
|
+
-------
|
|
117
|
+
bool
|
|
118
|
+
"""
|
|
119
|
+
node = self._get_node(where)
|
|
120
|
+
return 'data_type' in node.attrs and node.attrs['data_type'] == self.data_type
|
|
121
|
+
|
|
122
|
+
def _get_node(self, where: Union[str, Node]) -> Node:
|
|
123
|
+
"""Utility method to get a node from a node or a string"""
|
|
124
|
+
return self._h5saver.get_node(where)
|
|
125
|
+
|
|
126
|
+
def _get_nodes(self, where: Union[str, Node]) -> List[Node]:
|
|
127
|
+
"""Get Nodes hanging from where including where
|
|
128
|
+
|
|
129
|
+
Parameters
|
|
130
|
+
----------
|
|
131
|
+
where: Union[Node, str]
|
|
132
|
+
the path of a given node or the node itself
|
|
133
|
+
|
|
134
|
+
Returns
|
|
135
|
+
-------
|
|
136
|
+
List[Node]
|
|
137
|
+
"""
|
|
138
|
+
node = self._get_node(where)
|
|
139
|
+
if isinstance(node, GROUP):
|
|
140
|
+
return [child_node for child_node in self._h5saver.walk_nodes(node)]
|
|
141
|
+
else:
|
|
142
|
+
return [node]
|
|
143
|
+
|
|
144
|
+
def _get_nodes_from_data_type(self, where):
|
|
145
|
+
"""Get the node list hanging from a parent and having the same data type as self
|
|
146
|
+
|
|
147
|
+
Parameters
|
|
148
|
+
----------
|
|
149
|
+
where: Union[Node, str]
|
|
150
|
+
the path of a given node
|
|
151
|
+
|
|
152
|
+
Returns
|
|
153
|
+
-------
|
|
154
|
+
list of Nodes
|
|
155
|
+
"""
|
|
156
|
+
node = self._get_node(where)
|
|
157
|
+
if isinstance(node, GROUP):
|
|
158
|
+
parent_node = node
|
|
159
|
+
else:
|
|
160
|
+
parent_node = node.parent_node
|
|
161
|
+
|
|
162
|
+
nodes = []
|
|
163
|
+
for child_node in self._h5saver.walk_nodes(parent_node):
|
|
164
|
+
if self._is_node_of_data_type(child_node):
|
|
165
|
+
nodes.append(child_node)
|
|
166
|
+
return nodes
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class AxisSaverLoader(DataManagement):
|
|
170
|
+
"""Specialized Object to save and load Axis object to and from a h5file
|
|
171
|
+
|
|
172
|
+
Parameters
|
|
173
|
+
----------
|
|
174
|
+
h5saver: H5Saver
|
|
175
|
+
|
|
176
|
+
Attributes
|
|
177
|
+
----------
|
|
178
|
+
data_type: DataType
|
|
179
|
+
The enum for this type of data, here 'axis'
|
|
180
|
+
"""
|
|
181
|
+
data_type = DataType['axis']
|
|
182
|
+
|
|
183
|
+
def __init__(self, h5saver: H5Saver):
|
|
184
|
+
self._h5saver = h5saver
|
|
185
|
+
self.data_type = enum_checker(DataType, self.data_type)
|
|
186
|
+
|
|
187
|
+
def add_axis(self, where: Union[Node, str], axis: Axis, enlargeable=False):
|
|
188
|
+
"""Write Axis info at a given position within a h5 file
|
|
189
|
+
|
|
190
|
+
Parameters
|
|
191
|
+
----------
|
|
192
|
+
where: Union[Node, str]
|
|
193
|
+
the path of a given node or the node itself
|
|
194
|
+
axis: Axis
|
|
195
|
+
the Axis object to add as a node in the h5file
|
|
196
|
+
"""
|
|
197
|
+
if axis.data is None:
|
|
198
|
+
axis.create_linear_data(axis.size)
|
|
199
|
+
|
|
200
|
+
array = self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type, title=axis.label,
|
|
201
|
+
array_to_save=axis.data, data_dimension=DataDim['Data1D'],
|
|
202
|
+
enlargeable=enlargeable,
|
|
203
|
+
metadata=dict(size=axis.size, label=axis.label, units=axis.units,
|
|
204
|
+
index=axis.index, offset=axis.offset, scaling=axis.scaling,
|
|
205
|
+
distribution='uniform' if axis.is_axis_linear() else 'spread',
|
|
206
|
+
spread_order=axis.spread_order))
|
|
207
|
+
return array
|
|
208
|
+
|
|
209
|
+
def load_axis(self, where: Union[Node, str]) -> Axis:
|
|
210
|
+
"""create an Axis object from the data and metadata at a given node if of data_type: 'axis
|
|
211
|
+
|
|
212
|
+
Parameters
|
|
213
|
+
----------
|
|
214
|
+
where: Union[Node, str]
|
|
215
|
+
the path of a given node or the node itself
|
|
216
|
+
|
|
217
|
+
Returns
|
|
218
|
+
-------
|
|
219
|
+
Axis
|
|
220
|
+
"""
|
|
221
|
+
axis_node = self._get_node(where)
|
|
222
|
+
if not self._is_node_of_data_type(axis_node):
|
|
223
|
+
raise AxisError(f'Could not create an Axis object from this node: {axis_node}')
|
|
224
|
+
return Axis(label=axis_node.attrs['label'], units=axis_node.attrs['units'],
|
|
225
|
+
data=axis_node.read(), index=axis_node.attrs['index'], spread_order=axis_node.attrs['spread_order'])
|
|
226
|
+
|
|
227
|
+
def get_axes(self, where: Union[Node, str]) -> List[Axis]:
|
|
228
|
+
"""Return a list of Axis objects from the Axis Nodes hanging from (or among) a given Node
|
|
229
|
+
|
|
230
|
+
Parameters
|
|
231
|
+
----------
|
|
232
|
+
where: Union[Node, str]
|
|
233
|
+
the path of a given node or the node itself
|
|
234
|
+
|
|
235
|
+
Returns
|
|
236
|
+
-------
|
|
237
|
+
List[Axis]: the list of all Axis object
|
|
238
|
+
"""
|
|
239
|
+
return [self.load_axis(node) for node in self._get_nodes_from_data_type(where)]
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class DataSaverLoader(DataManagement):
|
|
243
|
+
"""Specialized Object to save and load DataWithAxes object to and from a h5file
|
|
244
|
+
|
|
245
|
+
Parameters
|
|
246
|
+
----------
|
|
247
|
+
h5saver: H5Saver
|
|
248
|
+
|
|
249
|
+
Attributes
|
|
250
|
+
----------
|
|
251
|
+
data_type: DataType
|
|
252
|
+
The enum for this type of data, here 'data'
|
|
253
|
+
"""
|
|
254
|
+
data_type = DataType['data']
|
|
255
|
+
|
|
256
|
+
def __init__(self, h5saver: H5Saver):
|
|
257
|
+
self.data_type = enum_checker(DataType, self.data_type)
|
|
258
|
+
self._h5saver = h5saver
|
|
259
|
+
self._axis_saver = AxisSaverLoader(h5saver)
|
|
260
|
+
|
|
261
|
+
def add_data(self, where: Union[Node, str], data: DataWithAxes, save_axes=True, **kwargs):
|
|
262
|
+
"""Adds Array nodes to a given location adding eventually axes as others nodes and metadata
|
|
263
|
+
|
|
264
|
+
Parameters
|
|
265
|
+
----------
|
|
266
|
+
where: Union[Node, str]
|
|
267
|
+
the path of a given node or the node itself
|
|
268
|
+
data: DataWithAxes
|
|
269
|
+
save_axes: bool
|
|
270
|
+
"""
|
|
271
|
+
|
|
272
|
+
for ind_data in range(len(data)):
|
|
273
|
+
metadata = dict(timestamp=data.timestamp, label=data.labels[ind_data],
|
|
274
|
+
source=data.source.name, distribution=data.distribution.name,
|
|
275
|
+
origin=data.origin,
|
|
276
|
+
nav_indexes=tuple(data.nav_indexes)
|
|
277
|
+
if data.nav_indexes is not None else None)
|
|
278
|
+
for name in data.extra_attributes:
|
|
279
|
+
metadata[name] = getattr(data, name)
|
|
280
|
+
self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type, title=data.name,
|
|
281
|
+
array_to_save=data[ind_data], data_dimension=data.dim.name, metadata=metadata)
|
|
282
|
+
if save_axes:
|
|
283
|
+
for axis in data.axes:
|
|
284
|
+
self._axis_saver.add_axis(where, axis)
|
|
285
|
+
|
|
286
|
+
def get_axes(self, where: Union[Node, str]) -> List[Axis]:
|
|
287
|
+
"""
|
|
288
|
+
|
|
289
|
+
Parameters
|
|
290
|
+
----------
|
|
291
|
+
where: Union[Node, str]
|
|
292
|
+
the path of a given node or the node itself
|
|
293
|
+
|
|
294
|
+
Returns
|
|
295
|
+
-------
|
|
296
|
+
|
|
297
|
+
"""
|
|
298
|
+
return self._axis_saver.get_axes(where)
|
|
299
|
+
|
|
300
|
+
def get_bkg_nodes(self, where: Union[Node, str]):
|
|
301
|
+
bkg_nodes = []
|
|
302
|
+
for node in self._h5saver.walk_nodes(where):
|
|
303
|
+
if 'data_type' in node.attrs and node.attrs['data_type'] == 'bkg':
|
|
304
|
+
bkg_nodes.append(node)
|
|
305
|
+
return bkg_nodes
|
|
306
|
+
|
|
307
|
+
def get_data_arrays(self, where: Union[Node, str], with_bkg=False, load_all=False) -> List[np.ndarray]:
|
|
308
|
+
"""
|
|
309
|
+
|
|
310
|
+
Parameters
|
|
311
|
+
----------
|
|
312
|
+
where: Union[Node, str]
|
|
313
|
+
the path of a given node or the node itself
|
|
314
|
+
with_bkg: bool
|
|
315
|
+
If True try to load background node and return the array with background subtraction
|
|
316
|
+
load_all: bool
|
|
317
|
+
If True load all similar nodes hanging from a parent
|
|
318
|
+
|
|
319
|
+
Returns
|
|
320
|
+
-------
|
|
321
|
+
list of ndarray
|
|
322
|
+
"""
|
|
323
|
+
where = self._get_node(where)
|
|
324
|
+
if with_bkg:
|
|
325
|
+
bkg_nodes = []
|
|
326
|
+
if with_bkg:
|
|
327
|
+
bkg_nodes = self.get_bkg_nodes(where.parent_node)
|
|
328
|
+
if len(bkg_nodes) == 0:
|
|
329
|
+
with_bkg = False
|
|
330
|
+
|
|
331
|
+
if load_all:
|
|
332
|
+
getter = self._get_nodes_from_data_type
|
|
333
|
+
else:
|
|
334
|
+
getter = self._get_nodes
|
|
335
|
+
|
|
336
|
+
if with_bkg:
|
|
337
|
+
return [array.read()-bkg.read() for array, bkg in zip(getter(where), bkg_nodes)]
|
|
338
|
+
else:
|
|
339
|
+
return [array.read() for array in getter(where)]
|
|
340
|
+
|
|
341
|
+
def load_data(self, where, with_bkg=False, load_all=False) -> DataWithAxes:
|
|
342
|
+
"""Return a DataWithAxes object from the Data and Axis Nodes hanging from (or among) a given Node
|
|
343
|
+
|
|
344
|
+
Does not include navigation axes stored elsewhere in the h5file. The node path is stored in the DatWithAxis
|
|
345
|
+
using the attribute path
|
|
346
|
+
|
|
347
|
+
Parameters
|
|
348
|
+
----------
|
|
349
|
+
where: Union[Node, str]
|
|
350
|
+
the path of a given node or the node itself
|
|
351
|
+
with_bkg: bool
|
|
352
|
+
If True try to load background node and return the data with background subtraction
|
|
353
|
+
load_all: bool
|
|
354
|
+
If True, will load all data hanging from the same parent node
|
|
355
|
+
|
|
356
|
+
See Also
|
|
357
|
+
--------
|
|
358
|
+
load_data
|
|
359
|
+
"""
|
|
360
|
+
|
|
361
|
+
data_node = self._get_node(where)
|
|
362
|
+
|
|
363
|
+
if load_all:
|
|
364
|
+
parent_node = data_node.parent_node
|
|
365
|
+
data_nodes = self._get_nodes_from_data_type(parent_node)
|
|
366
|
+
data_node = data_nodes[0]
|
|
367
|
+
else:
|
|
368
|
+
parent_node = data_node.parent_node
|
|
369
|
+
if not isinstance(data_node, CARRAY):
|
|
370
|
+
return
|
|
371
|
+
data_nodes = [data_node]
|
|
372
|
+
|
|
373
|
+
if 'axis' in self.data_type.name:
|
|
374
|
+
ndarrays = [data_node.read() for data_node in data_nodes]
|
|
375
|
+
axes = [Axis(label=data_node.attrs['label'], units=data_node.attrs['units'],
|
|
376
|
+
data=np.linspace(0, ndarrays[0].size-1, ndarrays[0].size-1))]
|
|
377
|
+
else:
|
|
378
|
+
ndarrays = self.get_data_arrays(data_node, with_bkg=with_bkg, load_all=load_all)
|
|
379
|
+
axes = self.get_axes(parent_node)
|
|
380
|
+
|
|
381
|
+
extra_attributes = data_node.attrs.to_dict()
|
|
382
|
+
for name in ['TITLE', 'CLASS', 'VERSION', 'backend', 'source', 'data_dimension', 'distribution', 'label',
|
|
383
|
+
'origin', 'nav_indexes', 'dtype', 'data_type', 'subdtype', 'shape', 'size']:
|
|
384
|
+
extra_attributes.pop(name, None)
|
|
385
|
+
|
|
386
|
+
data = DataWithAxes(data_node.attrs['TITLE'],
|
|
387
|
+
source=data_node.attrs['source'] if 'source' in data_node.attrs else 'raw',
|
|
388
|
+
dim=data_node.attrs['data_dimension'],
|
|
389
|
+
distribution=data_node.attrs['distribution'],
|
|
390
|
+
data=ndarrays,
|
|
391
|
+
labels=[node.attrs['label'] for node in data_nodes],
|
|
392
|
+
origin=data_node.attrs['origin'] if 'origin' in data_node.attrs else '',
|
|
393
|
+
nav_indexes=data_node.attrs['nav_indexes'] if 'nav_indexes' in data_node.attrs else (),
|
|
394
|
+
axes=axes,
|
|
395
|
+
path=data_node.path,
|
|
396
|
+
**extra_attributes)
|
|
397
|
+
return data
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
class BkgSaver(DataSaverLoader):
|
|
401
|
+
"""Specialized Object to save and load DataWithAxes background object to and from a h5file
|
|
402
|
+
|
|
403
|
+
Parameters
|
|
404
|
+
----------
|
|
405
|
+
hsaver: H5Saver
|
|
406
|
+
|
|
407
|
+
Attributes
|
|
408
|
+
----------
|
|
409
|
+
data_type: DataType
|
|
410
|
+
The enum for this type of data, here 'data'
|
|
411
|
+
"""
|
|
412
|
+
data_type = DataType['bkg']
|
|
413
|
+
|
|
414
|
+
def __init__(self, h5saver: H5Saver):
|
|
415
|
+
super().__init__(h5saver)
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
class DataEnlargeableSaver(DataSaverLoader):
|
|
419
|
+
"""Specialized Object to save and load enlargeable DataWithAxes saved object to and from a h5file
|
|
420
|
+
|
|
421
|
+
Parameters
|
|
422
|
+
----------
|
|
423
|
+
h5saver: H5Saver
|
|
424
|
+
|
|
425
|
+
Attributes
|
|
426
|
+
----------
|
|
427
|
+
data_type: DataType
|
|
428
|
+
The enum for this type of data, here 'data_enlargeable'
|
|
429
|
+
|
|
430
|
+
Notes
|
|
431
|
+
-----
|
|
432
|
+
To be used to save data from a timed logger (DAQViewer continuous saving or DAQLogger extension) or from an
|
|
433
|
+
adaptive scan where the final shape is unknown
|
|
434
|
+
"""
|
|
435
|
+
data_type = DataType['data_enlargeable']
|
|
436
|
+
|
|
437
|
+
def __init__(self, h5saver: H5Saver):
|
|
438
|
+
super().__init__(h5saver)
|
|
439
|
+
|
|
440
|
+
def _create_data_arrays(self, where: Union[Node, str], data: DataWithAxes, save_axes=True):
|
|
441
|
+
""" Create enlargeable array to store data
|
|
442
|
+
|
|
443
|
+
Parameters
|
|
444
|
+
----------
|
|
445
|
+
where: Union[Node, str]
|
|
446
|
+
the path of a given node or the node itself
|
|
447
|
+
data: DataWithAxes
|
|
448
|
+
save_axes: bool
|
|
449
|
+
|
|
450
|
+
Notes
|
|
451
|
+
-----
|
|
452
|
+
Because data will be saved at a given index in the enlargeable array, related axes will have their index
|
|
453
|
+
increased by one unity
|
|
454
|
+
"""
|
|
455
|
+
|
|
456
|
+
if self.get_last_node_name(where) is None:
|
|
457
|
+
for ind_data in range(len(data)):
|
|
458
|
+
nav_indexes = list(data.nav_indexes)
|
|
459
|
+
nav_indexes = [0] + list(np.array(nav_indexes, dtype=int) + 1)
|
|
460
|
+
|
|
461
|
+
self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type, title=data.name,
|
|
462
|
+
array_to_save=data[ind_data],
|
|
463
|
+
data_shape=data[ind_data].shape,
|
|
464
|
+
array_type=data[ind_data].dtype,
|
|
465
|
+
enlargeable=True,
|
|
466
|
+
data_dimension=data.dim.name,
|
|
467
|
+
metadata=dict(timestamp=data.timestamp, label=data.labels[ind_data],
|
|
468
|
+
source=data.source.name, distribution=data.distribution.name,
|
|
469
|
+
origin=data.origin,
|
|
470
|
+
nav_indexes=tuple(nav_indexes)))
|
|
471
|
+
if save_axes:
|
|
472
|
+
for axis in data.axes:
|
|
473
|
+
axis.index += 1 # because of enlargeable data will have an extra shape
|
|
474
|
+
self._axis_saver.add_axis(where, axis)
|
|
475
|
+
|
|
476
|
+
def add_data(self, where: Union[Node, str], data: DataWithAxes):
|
|
477
|
+
if self.get_last_node_name(where) is None:
|
|
478
|
+
self._create_data_arrays(where, data, save_axes=True)
|
|
479
|
+
|
|
480
|
+
for ind_data in range(len(data)):
|
|
481
|
+
array: EARRAY = self.get_node_from_index(where, ind_data)
|
|
482
|
+
array.append(data[ind_data])
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
class DataExtendedSaver(DataSaverLoader):
|
|
486
|
+
"""Specialized Object to save and load DataWithAxes saved object to and from a h5file in extended arrays
|
|
487
|
+
|
|
488
|
+
Parameters
|
|
489
|
+
----------
|
|
490
|
+
h5saver: H5Saver
|
|
491
|
+
extended_shape: Tuple[int]
|
|
492
|
+
the extra shape compared to the data the h5array will have
|
|
493
|
+
|
|
494
|
+
Attributes
|
|
495
|
+
----------
|
|
496
|
+
data_type: DataType
|
|
497
|
+
The enum for this type of data, here 'data'
|
|
498
|
+
"""
|
|
499
|
+
data_type = DataType['data']
|
|
500
|
+
|
|
501
|
+
def __init__(self, h5saver: H5Saver, extended_shape: Tuple[int]):
|
|
502
|
+
super().__init__(h5saver)
|
|
503
|
+
self.extended_shape = extended_shape
|
|
504
|
+
|
|
505
|
+
def _create_data_arrays(self, where: Union[Node, str], data: DataWithAxes, save_axes=True,
|
|
506
|
+
distribution=DataDistribution['uniform']):
|
|
507
|
+
""" Create array with extra dimensions (from scan) to store data
|
|
508
|
+
|
|
509
|
+
Parameters
|
|
510
|
+
----------
|
|
511
|
+
where: Union[Node, str]
|
|
512
|
+
the path of a given node or the node itself
|
|
513
|
+
data: DataWithAxes
|
|
514
|
+
save_axes: bool
|
|
515
|
+
|
|
516
|
+
Notes
|
|
517
|
+
-----
|
|
518
|
+
Because data will be saved at a given index in the "scan" array, related axes will have their index
|
|
519
|
+
increased by the length of the scan dim (1 for scan1D, 2 for scan2D, ...)
|
|
520
|
+
"""
|
|
521
|
+
if self.get_last_node_name(where) is None:
|
|
522
|
+
for ind_data in range(len(data)):
|
|
523
|
+
nav_indexes = list(data.nav_indexes)
|
|
524
|
+
nav_indexes = [ind for ind in range(len(self.extended_shape))] +\
|
|
525
|
+
list(np.array(nav_indexes, dtype=int) + len(self.extended_shape))
|
|
526
|
+
|
|
527
|
+
self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type, title=data.name,
|
|
528
|
+
data_shape=data[ind_data].shape,
|
|
529
|
+
array_type=data[ind_data].dtype,
|
|
530
|
+
scan_shape=self.extended_shape,
|
|
531
|
+
add_scan_dim=True,
|
|
532
|
+
data_dimension=data.dim.name,
|
|
533
|
+
metadata=dict(timestamp=data.timestamp, label=data.labels[ind_data],
|
|
534
|
+
source=data.source.name, distribution=distribution.name,
|
|
535
|
+
origin=data.origin,
|
|
536
|
+
nav_indexes=tuple(nav_indexes)))
|
|
537
|
+
|
|
538
|
+
if save_axes:
|
|
539
|
+
for axis in data.axes:
|
|
540
|
+
axis.index += len(self.extended_shape)
|
|
541
|
+
# because there will be len(self.extended_shape) extra navigation axes
|
|
542
|
+
self._axis_saver.add_axis(where, axis)
|
|
543
|
+
|
|
544
|
+
def add_data(self, where: Union[Node, str], data: DataWithAxes, indexes: List[int],
|
|
545
|
+
distribution=DataDistribution['uniform']):
|
|
546
|
+
"""Adds given DataWithAxes at a location within the initialized h5 array
|
|
547
|
+
|
|
548
|
+
Parameters
|
|
549
|
+
----------
|
|
550
|
+
where: Union[Node, str]
|
|
551
|
+
the path of a given node or the node itself
|
|
552
|
+
data: DataWithAxes
|
|
553
|
+
indexes: List[int]
|
|
554
|
+
indexes where to save data in the init h5array (should have the same length as extended_shape and with values
|
|
555
|
+
coherent with this shape
|
|
556
|
+
"""
|
|
557
|
+
if len(indexes) != len(self.extended_shape):
|
|
558
|
+
raise IndexError(f'Cannot put data into the h5array with extended indexes {indexes}')
|
|
559
|
+
for ind in range(len(indexes)):
|
|
560
|
+
if indexes[ind] > self.extended_shape[ind]:
|
|
561
|
+
raise IndexError(f'Indexes cannot be higher than the array shape')
|
|
562
|
+
|
|
563
|
+
if self.get_last_node_name(where) is None:
|
|
564
|
+
self._create_data_arrays(where, data, save_axes=True, distribution=distribution)
|
|
565
|
+
|
|
566
|
+
for ind_data in range(len(data)):
|
|
567
|
+
#todo check that getting with index is safe...
|
|
568
|
+
array: CARRAY = self.get_node_from_index(where, ind_data)
|
|
569
|
+
array[tuple(indexes)] = data[ind_data] # maybe use array.__setitem__(indexes, data[ind_data]) if it's not working
|
|
570
|
+
|
|
571
|
+
|
|
572
|
+
class DataToExportSaver:
|
|
573
|
+
"""Object used to save DataToExport object into a h5file following the PyMoDAQ convention
|
|
574
|
+
|
|
575
|
+
Parameters
|
|
576
|
+
----------
|
|
577
|
+
h5saver: H5Saver
|
|
578
|
+
|
|
579
|
+
"""
|
|
580
|
+
def __init__(self, h5saver: H5Saver):
|
|
581
|
+
self._h5saver = h5saver
|
|
582
|
+
self._data_saver = DataSaverLoader(h5saver)
|
|
583
|
+
self._bkg_saver = BkgSaver(h5saver)
|
|
584
|
+
|
|
585
|
+
def _get_node(self, where: Union[Node, str]) -> Node:
|
|
586
|
+
return self._h5saver.get_node(where)
|
|
587
|
+
|
|
588
|
+
@staticmethod
|
|
589
|
+
def channel_formatter(ind: int):
|
|
590
|
+
"""All DataWithAxes included in the DataToExport will be saved into a channel group indexed and
|
|
591
|
+
formatted as below"""
|
|
592
|
+
return f'CH{ind:02d}'
|
|
593
|
+
|
|
594
|
+
def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='', metadata={}):
|
|
595
|
+
"""
|
|
596
|
+
|
|
597
|
+
Parameters
|
|
598
|
+
----------
|
|
599
|
+
where: Union[Node, str]
|
|
600
|
+
the path of a given node or the node itself
|
|
601
|
+
data: DataToExport
|
|
602
|
+
settings_as_xml: str
|
|
603
|
+
The settings parameter as an XML string
|
|
604
|
+
metadata: dict
|
|
605
|
+
all extra metadata to be saved in the group node where data will be saved
|
|
606
|
+
|
|
607
|
+
"""
|
|
608
|
+
dims = data.get_dim_presents()
|
|
609
|
+
for dim in dims:
|
|
610
|
+
dim_group = self._h5saver.get_set_group(where, dim)
|
|
611
|
+
for ind, dwa in enumerate(data.get_data_from_dim(dim)): # dwa: DataWithAxes filtered by dim
|
|
612
|
+
dwa_group = self._h5saver.get_set_group(dim_group, self.channel_formatter(ind), dwa.name)
|
|
613
|
+
# dwa_group = self._h5saver.add_ch_group(dim_group, dwa.name)
|
|
614
|
+
self._data_saver.add_data(dwa_group, dwa)
|
|
615
|
+
|
|
616
|
+
def add_bkg(self, where: Union[Node, str], data: DataToExport):
|
|
617
|
+
dims = data.get_dim_presents()
|
|
618
|
+
for dim in dims:
|
|
619
|
+
dim_group = self._h5saver.get_set_group(where, dim)
|
|
620
|
+
for ind, dwa in enumerate(data.get_data_from_dim(dim)): # dwa: DataWithAxes filtered by dim
|
|
621
|
+
dwa_group = self._h5saver.get_set_group(dim_group, self.channel_formatter(ind), dwa.name)
|
|
622
|
+
# dwa_group = self._get_node_from_title(dim_group, dwa.name)
|
|
623
|
+
if dwa_group is not None:
|
|
624
|
+
self._bkg_saver.add_data(dwa_group, dwa, save_axes=False)
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
class DataToExportEnlargeableSaver(DataToExportSaver):
|
|
628
|
+
"""Generic object to save DataToExport objects in an enlargeable h5 array
|
|
629
|
+
|
|
630
|
+
The next enlarged value should be specified in the add_data method
|
|
631
|
+
|
|
632
|
+
Parameters
|
|
633
|
+
----------
|
|
634
|
+
h5saver: H5Saver
|
|
635
|
+
axis_name: str
|
|
636
|
+
the name of the enlarged axis array
|
|
637
|
+
axis_units: str
|
|
638
|
+
the units of the enlarged axis array
|
|
639
|
+
"""
|
|
640
|
+
def __init__(self, h5saver: H5Saver, axis_name: str = 'nav axis', axis_units: str = ''):
|
|
641
|
+
|
|
642
|
+
super().__init__(h5saver)
|
|
643
|
+
self._data_saver = DataEnlargeableSaver(h5saver)
|
|
644
|
+
self._nav_axis_saver = AxisSaverLoader(h5saver)
|
|
645
|
+
self._axis_name = axis_name
|
|
646
|
+
self._axis_units = axis_units
|
|
647
|
+
|
|
648
|
+
def add_data(self, where: Union[Node, str], data: DataToExport, axis_value: float, settings_as_xml='', metadata={}):
|
|
649
|
+
"""
|
|
650
|
+
|
|
651
|
+
Parameters
|
|
652
|
+
----------
|
|
653
|
+
where: Union[Node, str]
|
|
654
|
+
the path of a given node or the node itself
|
|
655
|
+
data: DataToExport
|
|
656
|
+
The data to be saved into an enlargeable array
|
|
657
|
+
axis_value: float
|
|
658
|
+
The next value of the enlarged axis
|
|
659
|
+
settings_as_xml: str
|
|
660
|
+
The settings parameter as an XML string
|
|
661
|
+
metadata: dict
|
|
662
|
+
all extra metadata to be saved in the group node where data will be saved
|
|
663
|
+
"""
|
|
664
|
+
super().add_data(where, data, settings_as_xml, metadata)
|
|
665
|
+
where = self._get_node(where)
|
|
666
|
+
nav_group = self._h5saver.get_set_group(where, SPECIAL_GROUP_NAMES['nav_axes'])
|
|
667
|
+
if self._nav_axis_saver.get_last_node_name(nav_group) is None:
|
|
668
|
+
axis = Axis(label=self._axis_name, units=self._axis_units, data=np.array([0., 1.]), index=0)
|
|
669
|
+
axis_array = self._nav_axis_saver.add_axis(nav_group, axis, enlargeable=True)
|
|
670
|
+
axis_array.attrs['size'] = 0
|
|
671
|
+
|
|
672
|
+
axis_array = self._nav_axis_saver.get_node_from_index(nav_group, 0)
|
|
673
|
+
axis_array.append(np.array([axis_value]))
|
|
674
|
+
axis_array.attrs['size'] += 1
|
|
675
|
+
|
|
676
|
+
|
|
677
|
+
class DataToExportTimedSaver(DataToExportEnlargeableSaver):
|
|
678
|
+
"""Specialized DataToExportEnlargeableSaver to save data as a function of a time axis
|
|
679
|
+
|
|
680
|
+
Notes
|
|
681
|
+
-----
|
|
682
|
+
This object is made for continuous saving mode of DAQViewer and logging to h5file for DAQLogger
|
|
683
|
+
"""
|
|
684
|
+
def __init__(self, h5saver: H5Saver):
|
|
685
|
+
super().__init__(h5saver, 'time', 's')
|
|
686
|
+
|
|
687
|
+
def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='', metadata={}):
|
|
688
|
+
super().add_data(where, data, axis_value=time(), settings_as_xml=settings_as_xml, metadata=metadata)
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
class DataToExportExtendedSaver(DataToExportSaver):
|
|
692
|
+
"""Object to save DataToExport at given indexes within arrays including extended shape
|
|
693
|
+
|
|
694
|
+
Mostly used for data generated from the DAQScan
|
|
695
|
+
|
|
696
|
+
Parameters
|
|
697
|
+
----------
|
|
698
|
+
h5saver: H5Saver
|
|
699
|
+
extended_shape: Tuple[int]
|
|
700
|
+
the extra shape compared to the data the h5array will have
|
|
701
|
+
"""
|
|
702
|
+
|
|
703
|
+
def __init__(self, h5saver: H5Saver, extended_shape: Tuple[int]):
|
|
704
|
+
super().__init__(h5saver)
|
|
705
|
+
self._data_saver = DataExtendedSaver(h5saver, extended_shape)
|
|
706
|
+
self._nav_axis_saver = AxisSaverLoader(h5saver)
|
|
707
|
+
|
|
708
|
+
def add_nav_axes(self, where: Union[Node, str], axes: List[Axis]):
|
|
709
|
+
"""Used to add navigation axes related to the extended array
|
|
710
|
+
|
|
711
|
+
Notes
|
|
712
|
+
-----
|
|
713
|
+
For instance the scan axes in the DAQScan
|
|
714
|
+
"""
|
|
715
|
+
where = self._get_node(where)
|
|
716
|
+
nav_group = self._h5saver.get_set_group(where, SPECIAL_GROUP_NAMES['nav_axes'])
|
|
717
|
+
if self._nav_axis_saver.get_last_node_name(nav_group) is None:
|
|
718
|
+
for axis in axes:
|
|
719
|
+
self._nav_axis_saver.add_axis(nav_group, axis)
|
|
720
|
+
|
|
721
|
+
def add_data(self, where: Union[Node, str], data: DataToExport, indexes: List[int],
|
|
722
|
+
distribution=DataDistribution['uniform'],
|
|
723
|
+
settings_as_xml='', metadata={}):
|
|
724
|
+
"""
|
|
725
|
+
|
|
726
|
+
Parameters
|
|
727
|
+
----------
|
|
728
|
+
where: Union[Node, str]
|
|
729
|
+
the path of a given node or the node itself
|
|
730
|
+
data: DataToExport
|
|
731
|
+
indexes: List[int]
|
|
732
|
+
indexes where to save data in the init h5array (should have the same length as extended_shape and with values
|
|
733
|
+
coherent with this shape
|
|
734
|
+
settings_as_xml: str
|
|
735
|
+
The settings parameter as an XML string
|
|
736
|
+
metadata: dict
|
|
737
|
+
all extra metadata to be saved in the group node where data will be saved
|
|
738
|
+
|
|
739
|
+
"""
|
|
740
|
+
dims = data.get_dim_presents()
|
|
741
|
+
for dim in dims:
|
|
742
|
+
dim_group = self._h5saver.get_set_group(where, dim)
|
|
743
|
+
for ind, dwa in enumerate(data.get_data_from_dim(dim)): # dwa: DataWithAxes filtered by dim
|
|
744
|
+
dwa_group = self._h5saver.get_set_group(dim_group, self.channel_formatter(ind), dwa.name)
|
|
745
|
+
self._data_saver.add_data(dwa_group, dwa, indexes=indexes, distribution=distribution)
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
class DataLoader:
|
|
749
|
+
"""Specialized Object to load DataWithAxes object from a h5file
|
|
750
|
+
|
|
751
|
+
Parameters
|
|
752
|
+
----------
|
|
753
|
+
h5saver: H5Saver
|
|
754
|
+
"""
|
|
755
|
+
|
|
756
|
+
def __init__(self, h5saver: H5Saver):
|
|
757
|
+
self._axis_loader: AxisSaverLoader = None
|
|
758
|
+
self._data_loader: DataSaverLoader = None
|
|
759
|
+
|
|
760
|
+
self.h5saver = h5saver
|
|
761
|
+
|
|
762
|
+
@property
|
|
763
|
+
def h5saver(self):
|
|
764
|
+
return self._h5saver
|
|
765
|
+
|
|
766
|
+
@h5saver.setter
|
|
767
|
+
def h5saver(self, h5saver: H5Saver):
|
|
768
|
+
self._h5saver = h5saver
|
|
769
|
+
self._axis_loader = AxisSaverLoader(h5saver)
|
|
770
|
+
self._data_loader = DataSaverLoader(h5saver)
|
|
771
|
+
|
|
772
|
+
def get_nav_group(self, where: Union[Node, str]) -> Union[Node, None]:
|
|
773
|
+
"""
|
|
774
|
+
|
|
775
|
+
Parameters
|
|
776
|
+
----------
|
|
777
|
+
where: Union[Node, str]
|
|
778
|
+
the path of a given node or the node itself
|
|
779
|
+
|
|
780
|
+
Returns
|
|
781
|
+
-------
|
|
782
|
+
GROUP: returns the group named SPECIAL_GROUP_NAMES['nav_axes'] holding all NavAxis for those data
|
|
783
|
+
|
|
784
|
+
See Also
|
|
785
|
+
--------
|
|
786
|
+
SPECIAL_GROUP_NAMES
|
|
787
|
+
"""
|
|
788
|
+
node = self._h5saver.get_node(where)
|
|
789
|
+
while node is not None: # means we reached the root level
|
|
790
|
+
if isinstance(node, GROUP):
|
|
791
|
+
if self._h5saver.is_node_in_group(node, SPECIAL_GROUP_NAMES['nav_axes']):
|
|
792
|
+
return self._h5saver.get_node(node, SPECIAL_GROUP_NAMES['nav_axes'])
|
|
793
|
+
node = node.parent_node
|
|
794
|
+
|
|
795
|
+
def load_data(self, where: Union[Node, str], with_bkg=False, load_all=False) -> DataWithAxes:
|
|
796
|
+
"""Load data from a node (or channel node)
|
|
797
|
+
|
|
798
|
+
Loaded data contains also nav_axes if any and with optional background subtraction
|
|
799
|
+
|
|
800
|
+
Parameters
|
|
801
|
+
----------
|
|
802
|
+
where: Union[Node, str]
|
|
803
|
+
the path of a given node or the node itself
|
|
804
|
+
with_bkg: bool
|
|
805
|
+
If True will attempt to substract a background data node before loading
|
|
806
|
+
load_all: bool
|
|
807
|
+
If True, will load all data hanging from the same parent node
|
|
808
|
+
|
|
809
|
+
Returns
|
|
810
|
+
-------
|
|
811
|
+
|
|
812
|
+
"""
|
|
813
|
+
node_data_type = DataType[self._h5saver.get_node(where).attrs['data_type']]
|
|
814
|
+
self._data_loader.data_type = node_data_type
|
|
815
|
+
data = self._data_loader.load_data(where, with_bkg=with_bkg, load_all=load_all)
|
|
816
|
+
if 'axis' not in node_data_type.name:
|
|
817
|
+
nav_group = self.get_nav_group(where)
|
|
818
|
+
if nav_group is not None:
|
|
819
|
+
nav_axes = self._axis_loader.get_axes(nav_group)
|
|
820
|
+
data.axes.extend(nav_axes)
|
|
821
|
+
data.get_dim_from_data_axes()
|
|
822
|
+
data.create_missing_axes()
|
|
823
|
+
return data
|
|
824
|
+
|
|
825
|
+
def load_all(self, where: GROUP, data: DataToExport, with_bkg=False) -> DataToExport:
|
|
826
|
+
|
|
827
|
+
where = self._h5saver.get_node(where)
|
|
828
|
+
children_dict = where.children()
|
|
829
|
+
data_list = []
|
|
830
|
+
for child in children_dict:
|
|
831
|
+
if isinstance(children_dict[child], GROUP):
|
|
832
|
+
self.load_all(children_dict[child], data, with_bkg=with_bkg)
|
|
833
|
+
elif 'data_type' in children_dict[child].attrs and 'data' in children_dict[child].attrs['data_type']:
|
|
834
|
+
|
|
835
|
+
data_list.append(self.load_data(children_dict[child].path, with_bkg=with_bkg, load_all=True))
|
|
836
|
+
break
|
|
837
|
+
data_tmp = DataToExport(name=where.name, data=data_list)
|
|
838
|
+
data.append(data_tmp)
|
|
839
|
+
|