pymodaq 4.4.6__py3-none-any.whl → 4.4.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pymodaq might be problematic. Click here for more details.
- pymodaq/control_modules/daq_move.py +5 -2
- pymodaq/control_modules/daq_move_ui.py +12 -5
- pymodaq/control_modules/move_utility_classes.py +9 -5
- pymodaq/control_modules/utils.py +0 -1
- pymodaq/extensions/daq_scan.py +26 -4
- pymodaq/extensions/daq_scan_ui.py +1 -1
- pymodaq/extensions/pid/daq_move_PID.py +1 -1
- pymodaq/post_treatment/load_and_plot.py +43 -10
- pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources_rc.py +1 -1
- pymodaq/resources/VERSION +1 -1
- pymodaq/resources/config_template.toml +1 -0
- pymodaq/utils/data.py +59 -11
- pymodaq/utils/gui_utils/widgets/__init__.py +1 -1
- pymodaq/utils/gui_utils/widgets/spinbox.py +15 -2
- pymodaq/utils/h5modules/data_saving.py +23 -5
- pymodaq/utils/h5modules/module_saving.py +9 -4
- pymodaq/utils/managers/modules_manager.py +2 -2
- pymodaq/utils/managers/remote_manager.py +1 -1
- pymodaq/utils/parameter/pymodaq_ptypes/slide.py +1 -1
- pymodaq/utils/plotting/data_viewers/viewer2D.py +5 -0
- pymodaq/utils/slicing.py +3 -3
- pymodaq/utils/tcp_ip/serializer.py +73 -40
- {pymodaq-4.4.6.dist-info → pymodaq-4.4.9.dist-info}/METADATA +6 -4
- {pymodaq-4.4.6.dist-info → pymodaq-4.4.9.dist-info}/RECORD +28 -27
- {pymodaq-4.4.6.dist-info → pymodaq-4.4.9.dist-info}/WHEEL +1 -1
- pymodaq-4.4.9.dist-info/licenses/AUTHORS.md +46 -0
- {pymodaq-4.4.6.dist-info → pymodaq-4.4.9.dist-info}/entry_points.txt +0 -0
- {pymodaq-4.4.6.dist-info → pymodaq-4.4.9.dist-info}/licenses/LICENSE +0 -0
|
@@ -616,10 +616,12 @@ class DAQ_Move(ParameterControlModule):
|
|
|
616
616
|
if ('°' in unit or 'degree' in unit) and not '°C' in unit:
|
|
617
617
|
# special cas as pint base unit for angles are radians
|
|
618
618
|
return '°'
|
|
619
|
-
elif 'W' in unit or 'watt' in unit:
|
|
619
|
+
elif 'W' in unit or 'watt' in unit.lower():
|
|
620
620
|
return 'W'
|
|
621
621
|
elif '°C' in unit or 'Celsius' in unit:
|
|
622
622
|
return '°C'
|
|
623
|
+
elif 'V' in unit or 'volt' in unit.lower():
|
|
624
|
+
return 'V'
|
|
623
625
|
else:
|
|
624
626
|
return str(Q_(1, unit).to_base_units().units)
|
|
625
627
|
|
|
@@ -711,7 +713,8 @@ class DAQ_Move_Hardware(QObject):
|
|
|
711
713
|
Uninitialize the stage closing the hardware.
|
|
712
714
|
|
|
713
715
|
"""
|
|
714
|
-
self.hardware
|
|
716
|
+
if self.hardware is not None:
|
|
717
|
+
self.hardware.close()
|
|
715
718
|
|
|
716
719
|
return "Stage uninitialized"
|
|
717
720
|
|
|
@@ -15,7 +15,7 @@ from qtpy.QtWidgets import QHBoxLayout, QVBoxLayout, QGridLayout, QWidget, QTool
|
|
|
15
15
|
|
|
16
16
|
from pymodaq.utils.daq_utils import ThreadCommand
|
|
17
17
|
from pymodaq.utils.gui_utils.custom_app import CustomApp
|
|
18
|
-
from pymodaq.utils.gui_utils.widgets import PushButtonIcon, LabelWithFont, SpinBox, QSpinBox_ro, QLED
|
|
18
|
+
from pymodaq.utils.gui_utils.widgets import PushButtonIcon, LabelWithFont, SpinBox, QSpinBox_ro, QLED, QSpinBoxWithShortcut
|
|
19
19
|
from pymodaq.control_modules.utils import ControlModuleUI
|
|
20
20
|
from pymodaq.utils.gui_utils import DockArea
|
|
21
21
|
from pymodaq.utils.plotting.data_viewers.viewer import ViewerDispatcher
|
|
@@ -199,9 +199,9 @@ class DAQ_Move_UI(ControlModuleUI):
|
|
|
199
199
|
self.main_ui.layout().addWidget(self.toolbar, 0, 0, 1, 2)
|
|
200
200
|
self.main_ui.layout().addWidget(self.move_toolbar, 1, 0, 1, 2)
|
|
201
201
|
|
|
202
|
-
self.abs_value_sb =
|
|
202
|
+
self.abs_value_sb = QSpinBoxWithShortcut(step=0.1, dec=True, siPrefix=config('actuator', 'siprefix'))
|
|
203
203
|
self.abs_value_sb.setStyleSheet("background-color : lightgreen; color: black")
|
|
204
|
-
self.abs_value_sb_2 =
|
|
204
|
+
self.abs_value_sb_2 = QSpinBoxWithShortcut(step=0.1, dec=True, siPrefix=config('actuator', 'siprefix'))
|
|
205
205
|
self.abs_value_sb_2.setStyleSheet("background-color : lightcoral; color: black")
|
|
206
206
|
self.move_toolbar.addWidget(self.abs_value_sb)
|
|
207
207
|
self.move_toolbar.addWidget(self.abs_value_sb_2)
|
|
@@ -227,7 +227,7 @@ class DAQ_Move_UI(ControlModuleUI):
|
|
|
227
227
|
self.control_ui.layout().addWidget(LabelWithFont('Abs. Value'), 0, 0)
|
|
228
228
|
self.find_home_pb = PushButtonIcon('home2', 'Find Home')
|
|
229
229
|
self.control_ui.layout().addWidget(self.find_home_pb, 0, 1)
|
|
230
|
-
self.abs_value_sb_bis =
|
|
230
|
+
self.abs_value_sb_bis = QSpinBoxWithShortcut(step=0.1, dec=True, siPrefix=config('actuator', 'siprefix'))
|
|
231
231
|
self.control_ui.layout().addWidget(self.abs_value_sb_bis, 1, 0)
|
|
232
232
|
self.move_abs_pb = PushButtonIcon('Move', 'Set Abs.',
|
|
233
233
|
tip='Set the value of the actuator to the set absolute value')
|
|
@@ -236,7 +236,7 @@ class DAQ_Move_UI(ControlModuleUI):
|
|
|
236
236
|
self.move_rel_plus_pb = PushButtonIcon('MoveUp', 'Set Rel. (+)')
|
|
237
237
|
self.control_ui.layout().addWidget(self.move_rel_plus_pb, 2, 1)
|
|
238
238
|
|
|
239
|
-
self.rel_value_sb =
|
|
239
|
+
self.rel_value_sb = QSpinBoxWithShortcut(step=0.1, dec=True, siPrefix=config('actuator', 'siprefix'), key_sequences=("Ctrl+Enter","Ctrl+Shift+Enter"),)
|
|
240
240
|
self.control_ui.layout().addWidget(self.rel_value_sb, 3, 0)
|
|
241
241
|
self.move_rel_minus_pb = PushButtonIcon('MoveDown', 'Set Rel. (-)')
|
|
242
242
|
self.control_ui.layout().addWidget(self.move_rel_minus_pb, 3, 1)
|
|
@@ -302,9 +302,16 @@ class DAQ_Move_UI(ControlModuleUI):
|
|
|
302
302
|
self.connect_action('show_config', lambda: self.command_sig.emit(ThreadCommand('show_config', )))
|
|
303
303
|
|
|
304
304
|
self.move_abs_pb.clicked.connect(lambda: self.emit_move_abs(self.abs_value_sb_bis))
|
|
305
|
+
self.abs_value_sb.shortcut["Ctrl+Enter"].activated.connect(lambda: self.emit_move_abs(self.abs_value_sb))
|
|
306
|
+
self.abs_value_sb_2.shortcut["Ctrl+Enter"].activated.connect(lambda: self.emit_move_abs(self.abs_value_sb_2))
|
|
307
|
+
self.abs_value_sb_bis.shortcut["Ctrl+Enter"].activated.connect(lambda: self.emit_move_abs(self.abs_value_sb_bis))
|
|
305
308
|
|
|
306
309
|
self.rel_value_sb.valueChanged.connect(lambda: self.command_sig.emit(
|
|
307
310
|
ThreadCommand('rel_value', self.rel_value_sb.value())))
|
|
311
|
+
|
|
312
|
+
self.rel_value_sb.shortcut["Ctrl+Enter"].activated.connect(lambda: self.emit_move_rel('+'))
|
|
313
|
+
self.rel_value_sb.shortcut["Ctrl+Shift+Enter"].activated.connect(lambda: self.emit_move_rel('-'))
|
|
314
|
+
|
|
308
315
|
self.move_rel_plus_pb.clicked.connect(lambda: self.emit_move_rel('+'))
|
|
309
316
|
self.move_rel_minus_pb.clicked.connect(lambda: self.emit_move_rel('-'))
|
|
310
317
|
|
|
@@ -67,11 +67,11 @@ class DataActuatorType(BaseEnum):
|
|
|
67
67
|
def comon_parameters(epsilon=config('actuator', 'epsilon_default'),
|
|
68
68
|
epsilons=None):
|
|
69
69
|
if epsilons is not None:
|
|
70
|
-
epsilon=epsilons
|
|
70
|
+
epsilon = epsilons
|
|
71
71
|
if isinstance(epsilon, list):
|
|
72
|
-
epsilon=epsilon[0]
|
|
72
|
+
epsilon = epsilon[0]
|
|
73
73
|
elif isinstance(epsilon, dict):
|
|
74
|
-
epsilon=epsilon[list[epsilon.keys()][0]]
|
|
74
|
+
epsilon = epsilon[list[epsilon.keys()][0]]
|
|
75
75
|
|
|
76
76
|
return [{'title': 'Units:', 'name': 'units', 'type': 'str', 'value': '', 'readonly': True},
|
|
77
77
|
{'title': 'Epsilon:', 'name': 'epsilon', 'type': 'float',
|
|
@@ -124,7 +124,7 @@ def comon_parameters_fun(is_multiaxes=False, axes_names=None,
|
|
|
124
124
|
|
|
125
125
|
Parameters
|
|
126
126
|
----------
|
|
127
|
-
is_multiaxes: bool
|
|
127
|
+
is_multiaxes: bool
|
|
128
128
|
If True, display the particular settings to define which axis the controller is driving
|
|
129
129
|
axes_names: deprecated, use axis_names
|
|
130
130
|
axis_names: list of str or dictionnary of string as key and integer as value
|
|
@@ -140,7 +140,7 @@ def comon_parameters_fun(is_multiaxes=False, axes_names=None,
|
|
|
140
140
|
axes_names = ['']
|
|
141
141
|
axis_names = axes_names
|
|
142
142
|
|
|
143
|
-
is_multiaxes = len(axis_names) > 1
|
|
143
|
+
is_multiaxes = len(axis_names) > 1 or is_multiaxes
|
|
144
144
|
if isinstance(axis_names, list):
|
|
145
145
|
if len(axis_names) > 0:
|
|
146
146
|
axis_name = axis_names[0]
|
|
@@ -617,6 +617,10 @@ class DAQ_Move_base(QObject):
|
|
|
617
617
|
else:
|
|
618
618
|
raise NotImplementedError
|
|
619
619
|
|
|
620
|
+
|
|
621
|
+
def close(self):
|
|
622
|
+
raise NotImplementedError
|
|
623
|
+
|
|
620
624
|
def move_abs(self, value: Union[float, DataActuator]):
|
|
621
625
|
if hasattr(self, 'move_Abs'):
|
|
622
626
|
deprecation_msg('move_Abs method in plugins is deprecated, use move_abs', 3)
|
pymodaq/control_modules/utils.py
CHANGED
|
@@ -358,7 +358,6 @@ class ParameterControlModule(ParameterManager, ControlModule):
|
|
|
358
358
|
listener_class: Type[ActorListener] = ActorListener
|
|
359
359
|
|
|
360
360
|
def __init__(self, **kwargs):
|
|
361
|
-
QObject.__init__(self)
|
|
362
361
|
ParameterManager.__init__(self, action_list=('save', 'update'))
|
|
363
362
|
ControlModule.__init__(self)
|
|
364
363
|
|
pymodaq/extensions/daq_scan.py
CHANGED
|
@@ -82,7 +82,12 @@ class DAQScan(QObject, ParameterManager):
|
|
|
82
82
|
{'title': 'Timeout (ms)', 'name': 'timeout', 'type': 'int', 'value': 10000},
|
|
83
83
|
]},
|
|
84
84
|
{'title': 'Scan options', 'name': 'scan_options', 'type': 'group', 'children': [
|
|
85
|
-
{'title': 'Naverage:', 'name': 'scan_average', 'type': 'int',
|
|
85
|
+
{'title': 'Naverage:', 'name': 'scan_average', 'type': 'int',
|
|
86
|
+
'value': config('scan', 'Naverage'), 'min': 1},
|
|
87
|
+
{'title': 'Plot on top:', 'name': 'average_on_top', 'type': 'bool',
|
|
88
|
+
'value': config('scan', 'average_on_top'),
|
|
89
|
+
'tip': 'At the second iteration will plot the averaged scan on top (True) of the current one'
|
|
90
|
+
'or in a second panel (False)'},
|
|
86
91
|
]},
|
|
87
92
|
|
|
88
93
|
{'title': 'Plotting options', 'name': 'plot_options', 'type': 'group', 'children': [
|
|
@@ -629,6 +634,10 @@ class DAQScan(QObject, ParameterManager):
|
|
|
629
634
|
viewers_enum.extend([ViewersEnum('Data1D').increase_dim(self.scanner.n_axes)
|
|
630
635
|
for _ in range(len(self.settings['plot_options', 'plot_1d']['selected']))])
|
|
631
636
|
data_names.extend(self.settings['plot_options', 'plot_1d']['selected'][:])
|
|
637
|
+
if not self.settings['scan_options', 'average_on_top']:
|
|
638
|
+
|
|
639
|
+
viewers_enum = viewers_enum + viewers_enum
|
|
640
|
+
data_names = data_names + [f'{data_name}_averaged' for data_name in data_names]
|
|
632
641
|
self.live_plotter.prepare_viewers(viewers_enum, viewers_name=data_names)
|
|
633
642
|
|
|
634
643
|
def update_status(self, txt: str, wait_time=0):
|
|
@@ -668,6 +677,7 @@ class DAQScan(QObject, ParameterManager):
|
|
|
668
677
|
self.ui.set_scan_step_average(status.attribute[1] + 1)
|
|
669
678
|
|
|
670
679
|
elif status.command == "Scan_done":
|
|
680
|
+
|
|
671
681
|
self.modules_manager.reset_signals()
|
|
672
682
|
self.live_timer.stop()
|
|
673
683
|
self.ui.set_scan_done()
|
|
@@ -732,6 +742,7 @@ class DAQScan(QObject, ParameterManager):
|
|
|
732
742
|
self.live_plotter.load_plot_data(group_0D=self.settings['plot_options', 'group0D'],
|
|
733
743
|
average_axis=average_axis,
|
|
734
744
|
average_index=self.ind_average,
|
|
745
|
+
separate_average= not self.settings['scan_options', 'average_on_top'],
|
|
735
746
|
target_at=self.scanner.positions[self.ind_scan],
|
|
736
747
|
last_step=(self.ind_scan ==
|
|
737
748
|
self.scanner.positions.size - 1 and
|
|
@@ -765,6 +776,8 @@ class DAQScan(QObject, ParameterManager):
|
|
|
765
776
|
messagebox(text="There are not enough or too much selected move modules for this scan")
|
|
766
777
|
return False
|
|
767
778
|
|
|
779
|
+
## TODO the stuff about adaptive scans have to be moved into a dedicated extension. M
|
|
780
|
+
## Most similat to the Bayesian one!
|
|
768
781
|
if self.scanner.scan_sub_type == 'Adaptive':
|
|
769
782
|
#todo include this in scanners objects for the adaptive scanners
|
|
770
783
|
if len(self.modules_manager.get_selected_probed_data('0D')) == 0:
|
|
@@ -852,9 +865,15 @@ class DAQScan(QObject, ParameterManager):
|
|
|
852
865
|
self.save_metadata(scan_node, 'scan_info')
|
|
853
866
|
|
|
854
867
|
self._init_live()
|
|
868
|
+
Naverage = self.settings['scan_options', 'scan_average']
|
|
869
|
+
if Naverage > 1:
|
|
870
|
+
scan_shape = [Naverage]
|
|
871
|
+
scan_shape.extend(self.scanner.get_scan_shape())
|
|
872
|
+
else:
|
|
873
|
+
scan_shape = self.scanner.get_scan_shape()
|
|
855
874
|
for det in self.modules_manager.detectors:
|
|
856
875
|
det.module_and_data_saver = (
|
|
857
|
-
module_saving.DetectorExtendedSaver(det,
|
|
876
|
+
module_saving.DetectorExtendedSaver(det, scan_shape))
|
|
858
877
|
self.module_and_data_saver.h5saver = self.h5saver # force the update as the h5saver ill also be set on each detectors
|
|
859
878
|
|
|
860
879
|
# mandatory to deal with multithreads
|
|
@@ -1034,7 +1053,7 @@ class DAQScanAcquisition(QObject):
|
|
|
1034
1053
|
|
|
1035
1054
|
def start_acquisition(self):
|
|
1036
1055
|
try:
|
|
1037
|
-
#todo hoaw to apply newlayout to adaptive mode?
|
|
1056
|
+
#todo hoaw to apply newlayout to adaptive mode? => cannot has to be a new extension
|
|
1038
1057
|
|
|
1039
1058
|
self.modules_manager.connect_actuators()
|
|
1040
1059
|
self.modules_manager.connect_detectors()
|
|
@@ -1106,6 +1125,7 @@ class DAQScanAcquisition(QObject):
|
|
|
1106
1125
|
# daq_scan wait time
|
|
1107
1126
|
QThread.msleep(self.scan_settings.child('time_flow', 'wait_time').value())
|
|
1108
1127
|
|
|
1128
|
+
self.modules_manager.timeout_signal.disconnect()
|
|
1109
1129
|
self.modules_manager.connect_actuators(False)
|
|
1110
1130
|
self.modules_manager.connect_detectors(False)
|
|
1111
1131
|
|
|
@@ -1113,6 +1133,7 @@ class DAQScanAcquisition(QObject):
|
|
|
1113
1133
|
attribute="Acquisition has finished"))
|
|
1114
1134
|
self.status_sig.emit(utils.ThreadCommand("Scan_done"))
|
|
1115
1135
|
|
|
1136
|
+
|
|
1116
1137
|
except Exception as e:
|
|
1117
1138
|
logger.exception(str(e))
|
|
1118
1139
|
|
|
@@ -1148,7 +1169,8 @@ class DAQScanAcquisition(QObject):
|
|
|
1148
1169
|
full_names: list = self.scan_settings['plot_options', 'plot_0d']['selected'][:]
|
|
1149
1170
|
full_names.extend(self.scan_settings['plot_options', 'plot_1d']['selected'][:])
|
|
1150
1171
|
data_temp = det_done_datas.get_data_from_full_names(full_names, deepcopy=False)
|
|
1151
|
-
|
|
1172
|
+
n_nav_axis_selection = 2-len(indexes) + 1 if self.Naverage > 1 else 2-len(indexes)
|
|
1173
|
+
data_temp = data_temp.get_data_with_naxes_lower_than(n_nav_axis_selection) # maximum Data2D included nav indexes
|
|
1152
1174
|
|
|
1153
1175
|
self.scan_data_tmp.emit(ScanDataTemp(self.ind_scan, indexes, data_temp))
|
|
1154
1176
|
|
|
@@ -114,7 +114,7 @@ class DAQScanUI(CustomApp, ViewerDispatcher):
|
|
|
114
114
|
|
|
115
115
|
settings_widget = QtWidgets.QWidget()
|
|
116
116
|
settings_widget.setLayout(QtWidgets.QVBoxLayout())
|
|
117
|
-
settings_widget.setMinimumWidth(220)
|
|
117
|
+
#settings_widget.setMinimumWidth(220)
|
|
118
118
|
|
|
119
119
|
splitter_v_widget.addWidget(self.module_widget)
|
|
120
120
|
splitter_v_widget.addWidget(self.plotting_widget)
|
|
@@ -67,7 +67,7 @@ class LoaderPlotter:
|
|
|
67
67
|
def load_data(self, filter_dims: List[Union[DataDim, str]] = None,
|
|
68
68
|
filter_full_names: List[str] = None, remove_navigation: bool = True,
|
|
69
69
|
group_0D=False, average_axis: int=None, average_index: int = 0,
|
|
70
|
-
last_step=False):
|
|
70
|
+
last_step=False, separate_average=False):
|
|
71
71
|
"""Load Data from the h5 node of the dataloader and apply some filtering/manipulation before
|
|
72
72
|
plotting
|
|
73
73
|
|
|
@@ -89,6 +89,8 @@ class LoaderPlotter:
|
|
|
89
89
|
which step in the averaging process are we in.
|
|
90
90
|
last_step: bool
|
|
91
91
|
tells if this is the very last step of the (averaged) scan
|
|
92
|
+
separate_average: bool
|
|
93
|
+
Tells if the averaged data are to be plotted on the same data viewer panel or another one
|
|
92
94
|
|
|
93
95
|
Returns
|
|
94
96
|
-------
|
|
@@ -99,7 +101,8 @@ class LoaderPlotter:
|
|
|
99
101
|
self.dataloader.load_all('/', self._data)
|
|
100
102
|
|
|
101
103
|
if average_axis is not None:
|
|
102
|
-
self.average_axis(average_axis, average_index, last_step=last_step
|
|
104
|
+
self.average_axis(average_axis, average_index, last_step=last_step,
|
|
105
|
+
separate_average=separate_average)
|
|
103
106
|
|
|
104
107
|
if filter_dims is not None:
|
|
105
108
|
filter_dims[:] = [enum_checker(DataDim, dim) for dim in filter_dims]
|
|
@@ -110,14 +113,15 @@ class LoaderPlotter:
|
|
|
110
113
|
filter_full_names]
|
|
111
114
|
|
|
112
115
|
if group_0D: # 0D initial data
|
|
113
|
-
self.group_0D_data()
|
|
116
|
+
self.group_0D_data(separate_average=separate_average)
|
|
114
117
|
|
|
115
118
|
if remove_navigation:
|
|
116
119
|
self.remove_navigation_axes()
|
|
117
120
|
|
|
118
121
|
return self._data
|
|
119
122
|
|
|
120
|
-
def average_axis(self, average_axis, average_index, last_step=False
|
|
123
|
+
def average_axis(self, average_axis, average_index, last_step=False,
|
|
124
|
+
separate_average=False) -> None:
|
|
121
125
|
""" Average the data along their average axis
|
|
122
126
|
|
|
123
127
|
Parameters
|
|
@@ -129,7 +133,12 @@ class LoaderPlotter:
|
|
|
129
133
|
which step in the averaging process are we in.
|
|
130
134
|
last_step: bool
|
|
131
135
|
tells if this is the very last step of the (averaged) scan
|
|
136
|
+
separate_average: bool
|
|
137
|
+
Tells if the averaged data are to be plotted on the same data viewer panel or another one
|
|
138
|
+
|
|
132
139
|
"""
|
|
140
|
+
if separate_average and average_index > 0:
|
|
141
|
+
averaged_data = DataToExport('Averaged')
|
|
133
142
|
for ind, data in enumerate(self._data):
|
|
134
143
|
current_data = data.inav[average_index, ...]
|
|
135
144
|
if average_index > 0:
|
|
@@ -140,10 +149,16 @@ class LoaderPlotter:
|
|
|
140
149
|
data_to_append = data.inav[0, ...]
|
|
141
150
|
else:
|
|
142
151
|
data_to_append = data.inav[0:average_index, ...].mean(axis=average_axis)
|
|
143
|
-
|
|
152
|
+
data_to_append.name = f'{data_to_append.name}_averaged'
|
|
144
153
|
data_to_append.labels = [f'{label}_averaged' for label in data_to_append.labels]
|
|
145
|
-
|
|
154
|
+
if not (separate_average and average_index > 0):
|
|
155
|
+
current_data.append(data_to_append)
|
|
156
|
+
else:
|
|
157
|
+
averaged_data.append(data_to_append)
|
|
146
158
|
self._data[ind] = current_data
|
|
159
|
+
if separate_average and average_index > 0:
|
|
160
|
+
self._data.append(averaged_data.data)
|
|
161
|
+
|
|
147
162
|
|
|
148
163
|
def remove_navigation_axes(self):
|
|
149
164
|
"""Make the navigation axes as signal axes
|
|
@@ -156,18 +171,27 @@ class LoaderPlotter:
|
|
|
156
171
|
data.transpose() # because usual ND data should be plotted here as 2D with the nav axes as the minor
|
|
157
172
|
# (horizontal)
|
|
158
173
|
|
|
159
|
-
def group_0D_data(self):
|
|
174
|
+
def group_0D_data(self, separate_average=False):
|
|
160
175
|
"""Group in a single DataFromPlugins all data that are initialy Data0D
|
|
161
176
|
|
|
162
177
|
"""
|
|
163
178
|
data = self._data.get_data_from_sig_axes(0)
|
|
164
179
|
if len(data) > 0:
|
|
165
180
|
data0D_arrays = []
|
|
181
|
+
data0D_arrays_averaged = []
|
|
166
182
|
labels = []
|
|
183
|
+
labels_averaged = []
|
|
167
184
|
for dwa in data:
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
185
|
+
if 'averaged' in dwa.name and separate_average:
|
|
186
|
+
data0D_arrays_averaged.extend(dwa.data)
|
|
187
|
+
labels_averaged.extend([f'{dwa.get_full_name()}/{label}' for label in dwa.labels])
|
|
188
|
+
self._data.remove(dwa)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
else:
|
|
192
|
+
data0D_arrays.extend(dwa.data)
|
|
193
|
+
labels.extend([f'{dwa.get_full_name()}/{label}' for label in dwa.labels])
|
|
194
|
+
self._data.remove(dwa)
|
|
171
195
|
|
|
172
196
|
data0D = DataFromPlugins(self.grouped_data0D_fullname.split('/')[1],
|
|
173
197
|
data=data0D_arrays, labels=labels,
|
|
@@ -176,6 +200,15 @@ class LoaderPlotter:
|
|
|
176
200
|
axes=dwa.axes, nav_indexes=dwa.nav_indexes,
|
|
177
201
|
)
|
|
178
202
|
self._data.append(data0D)
|
|
203
|
+
if 'averaged' in dwa.name and separate_average:
|
|
204
|
+
data0D_averaged = DataFromPlugins(
|
|
205
|
+
f"{self.grouped_data0D_fullname.split('/')[1]}_averaged",
|
|
206
|
+
data=data0D_arrays_averaged, labels=labels_averaged,
|
|
207
|
+
dim='DataND',
|
|
208
|
+
origin=self.grouped_data0D_fullname.split('/')[0],
|
|
209
|
+
axes=dwa.axes, nav_indexes=dwa.nav_indexes,
|
|
210
|
+
)
|
|
211
|
+
self._data.append(data0D_averaged)
|
|
179
212
|
|
|
180
213
|
def load_plot_data(self, **kwargs):
|
|
181
214
|
"""Load and plot all data from the current H5Saver
|
pymodaq/resources/VERSION
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
version = '4.4.
|
|
1
|
+
version = '4.4.9'
|
|
2
2
|
|
pymodaq/utils/data.py
CHANGED
|
@@ -20,7 +20,6 @@ from time import time
|
|
|
20
20
|
import copy
|
|
21
21
|
import pint
|
|
22
22
|
from multipledispatch import dispatch
|
|
23
|
-
import pymodaq
|
|
24
23
|
from pymodaq.utils.enums import BaseEnum, enum_checker
|
|
25
24
|
from pymodaq.utils.messenger import deprecation_msg
|
|
26
25
|
from pymodaq.utils.daq_utils import find_objects_in_list_from_attr_name_val
|
|
@@ -98,6 +97,7 @@ class DataUnitError(Exception):
|
|
|
98
97
|
|
|
99
98
|
|
|
100
99
|
class DwaType(BaseEnum):
|
|
100
|
+
"""Different types of `DataWithAxes`."""
|
|
101
101
|
DataWithAxes = 0
|
|
102
102
|
DataRaw = 1
|
|
103
103
|
DataActuator = 2
|
|
@@ -156,6 +156,24 @@ class DataDistribution(BaseEnum):
|
|
|
156
156
|
spread = 1
|
|
157
157
|
|
|
158
158
|
|
|
159
|
+
def _compute_slices_from_axis(axis: Axis, _slice, *ignored, is_index=True, **ignored_also):
|
|
160
|
+
if not is_index:
|
|
161
|
+
if isinstance(_slice, numbers.Number):
|
|
162
|
+
if not is_index:
|
|
163
|
+
_slice = axis.find_index(_slice)
|
|
164
|
+
elif _slice is Ellipsis:
|
|
165
|
+
return _slice
|
|
166
|
+
elif isinstance(_slice, slice):
|
|
167
|
+
if not (_slice.start is None and
|
|
168
|
+
_slice.stop is None and _slice.step is None):
|
|
169
|
+
start = axis.find_index(
|
|
170
|
+
_slice.start if _slice.start is not None else axis.get_data()[0])
|
|
171
|
+
stop = axis.find_index(
|
|
172
|
+
_slice.stop if _slice.stop is not None else axis.get_data()[-1])
|
|
173
|
+
_slice = slice(start, stop)
|
|
174
|
+
return _slice
|
|
175
|
+
|
|
176
|
+
|
|
159
177
|
class Axis:
|
|
160
178
|
"""Object holding info and data about physical axis of some data
|
|
161
179
|
|
|
@@ -366,8 +384,9 @@ class Axis:
|
|
|
366
384
|
def __len__(self):
|
|
367
385
|
return self.size
|
|
368
386
|
|
|
369
|
-
def _compute_slices(self,
|
|
370
|
-
|
|
387
|
+
def _compute_slices(self, _slice, *ignored, is_index=True, **ignored_also):
|
|
388
|
+
_slice = _compute_slices_from_axis(self, _slice, is_index=is_index)
|
|
389
|
+
return _slice, _slice
|
|
371
390
|
|
|
372
391
|
def _slicer(self, _slice, *ignored, **ignored_also):
|
|
373
392
|
ax: Axis = copy.deepcopy(self)
|
|
@@ -2187,11 +2206,22 @@ class DataWithAxes(DataBase):
|
|
|
2187
2206
|
axes.append(ax)
|
|
2188
2207
|
self.axes = axes
|
|
2189
2208
|
|
|
2190
|
-
def _compute_slices(self, slices, is_navigation=True):
|
|
2209
|
+
def _compute_slices(self, slices, is_navigation=True, is_index=True):
|
|
2191
2210
|
"""Compute the total slice to apply to the data
|
|
2192
2211
|
|
|
2193
2212
|
Filling in Ellipsis when no slicing should be done
|
|
2213
|
+
Parameters
|
|
2214
|
+
----------
|
|
2215
|
+
slices: List of slice
|
|
2216
|
+
is_navigation: bool
|
|
2217
|
+
is_index: bool
|
|
2218
|
+
if False, the slice are on the values of the underlying axes
|
|
2219
|
+
Returns
|
|
2220
|
+
-------
|
|
2221
|
+
list(slice): the computed slices as index (eventually for all axes)
|
|
2222
|
+
list(slice): a version as index of the input argument
|
|
2194
2223
|
"""
|
|
2224
|
+
_slices_as_index = []
|
|
2195
2225
|
if isinstance(slices, numbers.Number) or isinstance(slices, slice):
|
|
2196
2226
|
slices = [slices]
|
|
2197
2227
|
if is_navigation:
|
|
@@ -2202,13 +2232,29 @@ class DataWithAxes(DataBase):
|
|
|
2202
2232
|
slices = list(slices)
|
|
2203
2233
|
for ind in range(len(self.shape)):
|
|
2204
2234
|
if ind in indexes:
|
|
2205
|
-
|
|
2235
|
+
_slice = slices.pop(0)
|
|
2236
|
+
if not is_index:
|
|
2237
|
+
axis = self.get_axis_from_index(ind)[0]
|
|
2238
|
+
_slice = _compute_slices_from_axis(axis, _slice, is_index=is_index)
|
|
2239
|
+
_slices_as_index.append(_slice)
|
|
2240
|
+
total_slices.append(_slice)
|
|
2206
2241
|
elif len(total_slices) == 0:
|
|
2207
2242
|
total_slices.append(Ellipsis)
|
|
2208
2243
|
elif not (Ellipsis in total_slices and total_slices[-1] is Ellipsis):
|
|
2209
2244
|
total_slices.append(slice(None))
|
|
2245
|
+
if len(slices) == 0 and self.distribution == DataDistribution.uniform and is_navigation:
|
|
2246
|
+
if total_slices[-1] is Ellipsis:
|
|
2247
|
+
for ind in range(len(total_slices), len(indexes)):
|
|
2248
|
+
_slices_as_index.append(slice(None))
|
|
2249
|
+
else:
|
|
2250
|
+
for ind in range(len(total_slices), len(indexes)):
|
|
2251
|
+
_slices_as_index.insert(0, Ellipsis)
|
|
2252
|
+
for ind in range(len(indexes), len(self.shape)):
|
|
2253
|
+
total_slices.append(slice(None))
|
|
2254
|
+
|
|
2255
|
+
break
|
|
2210
2256
|
total_slices = tuple(total_slices)
|
|
2211
|
-
return total_slices
|
|
2257
|
+
return total_slices, _slices_as_index
|
|
2212
2258
|
|
|
2213
2259
|
def check_squeeze(self, total_slices: List[slice], is_navigation: bool):
|
|
2214
2260
|
|
|
@@ -2220,7 +2266,7 @@ class DataWithAxes(DataBase):
|
|
|
2220
2266
|
do_squeeze = False
|
|
2221
2267
|
return do_squeeze
|
|
2222
2268
|
|
|
2223
|
-
def _slicer(self, slices, is_navigation=True):
|
|
2269
|
+
def _slicer(self, slices, is_navigation=True, is_index=True):
|
|
2224
2270
|
"""Apply a given slice to the data either navigation or signal dimension
|
|
2225
2271
|
|
|
2226
2272
|
Parameters
|
|
@@ -2229,6 +2275,8 @@ class DataWithAxes(DataBase):
|
|
|
2229
2275
|
the slices to apply to the data
|
|
2230
2276
|
is_navigation: bool
|
|
2231
2277
|
if True apply the slices to the navigation dimension else to the signal ones
|
|
2278
|
+
is_index: bool
|
|
2279
|
+
if True the slices are indexes otherwise the slices are axes values to be indexed first
|
|
2232
2280
|
|
|
2233
2281
|
Returns
|
|
2234
2282
|
-------
|
|
@@ -2236,10 +2284,10 @@ class DataWithAxes(DataBase):
|
|
|
2236
2284
|
Object of the same type as the initial data, derived from DataWithAxes. But with lower
|
|
2237
2285
|
data size due to the slicing and with eventually less axes.
|
|
2238
2286
|
"""
|
|
2239
|
-
|
|
2240
2287
|
if isinstance(slices, numbers.Number) or isinstance(slices, slice):
|
|
2241
2288
|
slices = [slices]
|
|
2242
|
-
|
|
2289
|
+
|
|
2290
|
+
total_slices, slices = self._compute_slices(slices, is_navigation, is_index=is_index)
|
|
2243
2291
|
|
|
2244
2292
|
do_squeeze = self.check_squeeze(total_slices, is_navigation)
|
|
2245
2293
|
new_arrays_data = [squeeze(dat[total_slices], do_squeeze) for dat in self.data]
|
|
@@ -2289,11 +2337,11 @@ class DataWithAxes(DataBase):
|
|
|
2289
2337
|
if len(nav_indexes) != 0:
|
|
2290
2338
|
distribution = self.distribution
|
|
2291
2339
|
else:
|
|
2292
|
-
distribution = DataDistribution
|
|
2340
|
+
distribution = DataDistribution.uniform
|
|
2293
2341
|
|
|
2294
2342
|
data = DataWithAxes(self.name, data=new_arrays_data, nav_indexes=tuple(nav_indexes),
|
|
2295
2343
|
axes=axes,
|
|
2296
|
-
source=
|
|
2344
|
+
source=DataSource.calculated, origin=self.origin,
|
|
2297
2345
|
labels=self.labels[:],
|
|
2298
2346
|
distribution=distribution)
|
|
2299
2347
|
return data
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from qtpy import QtWidgets, QtGui
|
|
1
|
+
from qtpy import QtWidgets, QtGui,QtCore
|
|
2
2
|
from pyqtgraph.widgets.SpinBox import SpinBox
|
|
3
3
|
|
|
4
4
|
|
|
@@ -15,7 +15,20 @@ class SpinBox(SpinBox):
|
|
|
15
15
|
self.setFont(font)
|
|
16
16
|
self.setMinimumHeight(min_height)
|
|
17
17
|
|
|
18
|
-
|
|
18
|
+
class QSpinBoxWithShortcut(SpinBox):
|
|
19
|
+
"""
|
|
20
|
+
QSpinBox but which accepts key sequences and store them as attribute
|
|
21
|
+
For now, it does not apply to regular input such as text or numerics.
|
|
22
|
+
"""
|
|
23
|
+
def __init__(self, *args, key_sequences=("Ctrl+Enter",), **kwargs):
|
|
24
|
+
super().__init__(*args, **kwargs)
|
|
25
|
+
|
|
26
|
+
self.shortcut = dict() #Store shortcuts in a dictionnary
|
|
27
|
+
for key_sequence in key_sequences:
|
|
28
|
+
shortcut = QtWidgets.QShortcut(QtGui.QKeySequence(key_sequence), self)
|
|
29
|
+
shortcut.setContext(QtCore.Qt.ShortcutContext.WidgetWithChildrenShortcut)
|
|
30
|
+
self.shortcut[key_sequence] = shortcut
|
|
31
|
+
|
|
19
32
|
class QSpinBox_ro(SpinBox):
|
|
20
33
|
def __init__(self, *args, readonly=True, **kwargs):
|
|
21
34
|
super().__init__(*args, **kwargs)
|
|
@@ -58,8 +58,12 @@ class DataManagement(metaclass=ABCMeta):
|
|
|
58
58
|
self.close_file()
|
|
59
59
|
|
|
60
60
|
def close_file(self):
|
|
61
|
+
self._h5saver.flush()
|
|
61
62
|
self._h5saver.close_file()
|
|
62
63
|
|
|
64
|
+
def close(self):
|
|
65
|
+
self.close_file()
|
|
66
|
+
|
|
63
67
|
def _get_next_node_name(self, where: Union[str, Node]) -> str:
|
|
64
68
|
"""Get the formatted next node name given the ones already saved
|
|
65
69
|
|
|
@@ -600,7 +604,7 @@ class DataEnlargeableSaver(DataSaverLoader):
|
|
|
600
604
|
self._axis_saver.add_axis(where, axis)
|
|
601
605
|
|
|
602
606
|
def add_data(self, where: Union[Node, str], data: DataWithAxes,
|
|
603
|
-
axis_values: Iterable[float] = None):
|
|
607
|
+
axis_values: Iterable[float] = None, **kwargs):
|
|
604
608
|
""" Append data to an enlargeable array node
|
|
605
609
|
|
|
606
610
|
Data of dim (0, 1 or 2) will be just appended to the enlargeable array.
|
|
@@ -624,17 +628,31 @@ class DataEnlargeableSaver(DataSaverLoader):
|
|
|
624
628
|
if self.get_last_node_name(where) is None:
|
|
625
629
|
if len(data.nav_indexes) == 0:
|
|
626
630
|
data_init = data
|
|
631
|
+
elif len(data.nav_indexes) == 1: # special case of DataND data
|
|
632
|
+
data_init = data.inav[0]
|
|
633
|
+
add_enl_axes = True
|
|
634
|
+
axis = data.get_axis_from_index(data.nav_indexes[0])[0]
|
|
635
|
+
axis_values = (axis.get_data(),)
|
|
636
|
+
self._enl_axis_names = (axis.label,)
|
|
637
|
+
self._enl_axis_units = (axis.units,)
|
|
627
638
|
else:
|
|
628
639
|
raise DataDimError('It is not possible to append DataND')
|
|
629
640
|
self._create_data_arrays(where, data_init, save_axes=True, add_enl_axes=add_enl_axes)
|
|
641
|
+
elif len(data.nav_indexes) == 1: # special case of DataND data
|
|
642
|
+
add_enl_axes = True
|
|
643
|
+
axis = data.get_axis_from_index(data.nav_indexes[0])[0]
|
|
644
|
+
axis_values = (axis.get_data(),)
|
|
630
645
|
|
|
631
646
|
for ind_data in range(len(data)):
|
|
632
647
|
array: EARRAY = self.get_node_from_index(where, ind_data)
|
|
633
648
|
array.append(data[ind_data])
|
|
634
|
-
if add_enl_axes
|
|
649
|
+
if add_enl_axes:
|
|
635
650
|
for ind_axis in range(self._n_enl_axes):
|
|
636
651
|
axis_array: EARRAY = self._axis_saver.get_node_from_index(where, ind_axis)
|
|
637
|
-
|
|
652
|
+
if not isinstance(axis_values[ind_axis], np.ndarray):
|
|
653
|
+
axis_array.append(np.array([axis_values[ind_axis]]))
|
|
654
|
+
else:
|
|
655
|
+
axis_array.append(axis_values[ind_axis], expand=False)
|
|
638
656
|
axis_array.attrs['size'] += 1
|
|
639
657
|
|
|
640
658
|
|
|
@@ -862,7 +880,7 @@ class DataToExportEnlargeableSaver(DataToExportSaver):
|
|
|
862
880
|
def add_data(self, where: Union[Node, str], data: DataToExport,
|
|
863
881
|
axis_values: List[Union[float, np.ndarray]] = None,
|
|
864
882
|
axis_value: Union[float, np.ndarray] = None,
|
|
865
|
-
settings_as_xml='', metadata=None,
|
|
883
|
+
settings_as_xml='', metadata=None, **kwargs
|
|
866
884
|
):
|
|
867
885
|
"""
|
|
868
886
|
|
|
@@ -885,7 +903,7 @@ class DataToExportEnlargeableSaver(DataToExportSaver):
|
|
|
885
903
|
if axis_values is None and axis_value is not None:
|
|
886
904
|
axis_values = [axis_value]
|
|
887
905
|
|
|
888
|
-
super().add_data(where, data, settings_as_xml, metadata)
|
|
906
|
+
super().add_data(where, data, settings_as_xml, metadata, **kwargs)
|
|
889
907
|
# a parent navigation group (same for all data nodes)
|
|
890
908
|
|
|
891
909
|
where = self._get_node(where)
|