pymodaq 4.1.0__py3-none-any.whl → 4.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pymodaq might be problematic. Click here for more details.
- pymodaq/control_modules/daq_move.py +4 -4
- pymodaq/extensions/daq_scan.py +26 -8
- pymodaq/post_treatment/load_and_plot.py +44 -21
- pymodaq/resources/VERSION +1 -1
- pymodaq/utils/data.py +7 -0
- pymodaq/utils/h5modules/data_saving.py +54 -34
- pymodaq/utils/h5modules/h5logging.py +4 -47
- pymodaq/utils/h5modules/module_saving.py +16 -17
- pymodaq/utils/h5modules/saving.py +7 -21
- pymodaq/utils/managers/modules_manager.py +2 -2
- pymodaq/utils/parameter/pymodaq_ptypes/itemselect.py +30 -26
- pymodaq/utils/plotting/data_viewers/viewerND.py +39 -24
- pymodaq/utils/scanner/scanners/_1d_scanners.py +2 -1
- {pymodaq-4.1.0.dist-info → pymodaq-4.1.1.dist-info}/METADATA +1 -1
- {pymodaq-4.1.0.dist-info → pymodaq-4.1.1.dist-info}/RECORD +18 -18
- {pymodaq-4.1.0.dist-info → pymodaq-4.1.1.dist-info}/WHEEL +0 -0
- {pymodaq-4.1.0.dist-info → pymodaq-4.1.1.dist-info}/entry_points.txt +0 -0
- {pymodaq-4.1.0.dist-info → pymodaq-4.1.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -172,7 +172,7 @@ class DAQ_Move(ParameterManager, ControlModule):
|
|
|
172
172
|
elif cmd.command == 'rel_value':
|
|
173
173
|
self._relative_value = cmd.attribute
|
|
174
174
|
|
|
175
|
-
def append_data(self,
|
|
175
|
+
def append_data(self, dte: DataToExport = None, where: Union[Node, str] = None):
|
|
176
176
|
"""Appends current DataToExport to an ActuatorEnlargeableSaver
|
|
177
177
|
|
|
178
178
|
Parameters
|
|
@@ -183,9 +183,9 @@ class DAQ_Move(ParameterManager, ControlModule):
|
|
|
183
183
|
--------
|
|
184
184
|
ActuatorEnlargeableSaver
|
|
185
185
|
"""
|
|
186
|
-
if
|
|
187
|
-
|
|
188
|
-
self._add_data_to_saver(
|
|
186
|
+
if dte is None:
|
|
187
|
+
dte = DataToExport(name=self.title, data=[self._current_value])
|
|
188
|
+
self._add_data_to_saver(dte, where=where)
|
|
189
189
|
# todo: test this for logging
|
|
190
190
|
|
|
191
191
|
def _add_data_to_saver(self, data: DataToExport, where=None, **kwargs):
|
pymodaq/extensions/daq_scan.py
CHANGED
|
@@ -70,7 +70,8 @@ class DAQScan(QObject, ParameterManager):
|
|
|
70
70
|
live_data_1D_signal = Signal(list)
|
|
71
71
|
|
|
72
72
|
params = [
|
|
73
|
-
{'title': 'Time Flow:', 'name': 'time_flow', 'type': 'group', 'expanded': False,
|
|
73
|
+
{'title': 'Time Flow:', 'name': 'time_flow', 'type': 'group', 'expanded': False,
|
|
74
|
+
'children': [
|
|
74
75
|
{'title': 'Wait time step (ms)', 'name': 'wait_time', 'type': 'int', 'value': 0,
|
|
75
76
|
'tip': 'Wait time in ms after each step of acquisition (move and grab)'},
|
|
76
77
|
{'title': 'Wait time between (ms)', 'name': 'wait_time_between', 'type': 'int',
|
|
@@ -85,11 +86,13 @@ class DAQScan(QObject, ParameterManager):
|
|
|
85
86
|
{'title': 'Plotting options', 'name': 'plot_options', 'type': 'group', 'children': [
|
|
86
87
|
{'title': 'Get data', 'name': 'plot_probe', 'type': 'bool_push'},
|
|
87
88
|
{'title': 'Group 0D data:', 'name': 'group0D', 'type': 'bool', 'value': True},
|
|
88
|
-
{'title': 'Plot 0Ds:', 'name': 'plot_0d', 'type': 'itemselect'},
|
|
89
|
-
{'title': 'Plot 1Ds:', 'name': 'plot_1d', 'type': 'itemselect'},
|
|
89
|
+
{'title': 'Plot 0Ds:', 'name': 'plot_0d', 'type': 'itemselect', 'checkbox': True},
|
|
90
|
+
{'title': 'Plot 1Ds:', 'name': 'plot_1d', 'type': 'itemselect', 'checkbox': True},
|
|
90
91
|
{'title': 'Prepare Viewers', 'name': 'prepare_viewers', 'type': 'bool_push'},
|
|
91
|
-
{'title': 'Plot at each step?', 'name': 'plot_at_each_step', 'type': 'bool',
|
|
92
|
-
|
|
92
|
+
{'title': 'Plot at each step?', 'name': 'plot_at_each_step', 'type': 'bool',
|
|
93
|
+
'value': True},
|
|
94
|
+
{'title': 'Refresh Plots (ms)', 'name': 'refresh_live', 'type': 'int',
|
|
95
|
+
'value': 1000, 'visible': False},
|
|
93
96
|
]},
|
|
94
97
|
]
|
|
95
98
|
|
|
@@ -679,6 +682,14 @@ class DAQScan(QObject, ParameterManager):
|
|
|
679
682
|
def save_temp_live_data(self, scan_data: ScanDataTemp):
|
|
680
683
|
if scan_data.scan_index == 0:
|
|
681
684
|
nav_axes = self.scanner.get_nav_axes()
|
|
685
|
+
Naverage = self.settings['scan_options', 'scan_average']
|
|
686
|
+
if Naverage > 1:
|
|
687
|
+
for nav_axis in nav_axes:
|
|
688
|
+
nav_axis.index += 1
|
|
689
|
+
nav_axes.append(data_mod.Axis('Average',
|
|
690
|
+
data=np.linspace(0, Naverage - 1, Naverage),
|
|
691
|
+
index=0))
|
|
692
|
+
|
|
682
693
|
self.extended_saver.add_nav_axes(self.h5temp.raw_group, nav_axes)
|
|
683
694
|
|
|
684
695
|
self.extended_saver.add_data(self.h5temp.raw_group, scan_data.data, scan_data.indexes,
|
|
@@ -694,8 +705,14 @@ class DAQScan(QObject, ParameterManager):
|
|
|
694
705
|
average_axis = None
|
|
695
706
|
try:
|
|
696
707
|
self.live_plotter.load_plot_data(group_0D=self.settings['plot_options', 'group0D'],
|
|
697
|
-
average_axis=average_axis,
|
|
698
|
-
|
|
708
|
+
average_axis=average_axis,
|
|
709
|
+
average_index=self.ind_average,
|
|
710
|
+
target_at=self.scanner.positions[self.ind_scan],
|
|
711
|
+
last_step=(self.ind_scan ==
|
|
712
|
+
self.scanner.positions.size - 1 and
|
|
713
|
+
self.ind_average ==
|
|
714
|
+
self.settings[
|
|
715
|
+
'scan_options', 'scan_average'] - 1))
|
|
699
716
|
except Exception as e:
|
|
700
717
|
logger.exception(str(e))
|
|
701
718
|
#################
|
|
@@ -1100,7 +1117,8 @@ class DAQScanAcquisition(QObject):
|
|
|
1100
1117
|
if self.Naverage > 1:
|
|
1101
1118
|
for nav_axis in nav_axes:
|
|
1102
1119
|
nav_axis.index += 1
|
|
1103
|
-
nav_axes.append(data_mod.Axis('Average', data=np.linspace(0, self.Naverage - 1,
|
|
1120
|
+
nav_axes.append(data_mod.Axis('Average', data=np.linspace(0, self.Naverage - 1,
|
|
1121
|
+
self.Naverage),
|
|
1104
1122
|
index=0))
|
|
1105
1123
|
self.module_and_data_saver.add_nav_axes(nav_axes)
|
|
1106
1124
|
|
|
@@ -59,9 +59,12 @@ class LoaderPlotter:
|
|
|
59
59
|
def data(self) -> DataToExport:
|
|
60
60
|
return self._data
|
|
61
61
|
|
|
62
|
-
def load_data(self, filter_dims: List[Union[DataDim, str]] = None,
|
|
63
|
-
|
|
64
|
-
|
|
62
|
+
def load_data(self, filter_dims: List[Union[DataDim, str]] = None,
|
|
63
|
+
filter_full_names: List[str] = None, remove_navigation: bool = True,
|
|
64
|
+
group_0D=False, average_axis: int=None, average_index: int = 0,
|
|
65
|
+
last_step=False):
|
|
66
|
+
"""Load Data from the h5 node of the dataloader and apply some filtering/manipulation before
|
|
67
|
+
plotting
|
|
65
68
|
|
|
66
69
|
Parameters
|
|
67
70
|
----------
|
|
@@ -70,14 +73,17 @@ class LoaderPlotter:
|
|
|
70
73
|
filter_full_names: List[str]
|
|
71
74
|
load only data matching these names
|
|
72
75
|
remove_navigation: bool
|
|
73
|
-
if True, make navigation axes as signal axes (means DataND could be plotted on Viewer1D
|
|
74
|
-
Viewer2D by concatenation)
|
|
76
|
+
if True, make navigation axes as signal axes (means DataND could be plotted on Viewer1D
|
|
77
|
+
or Viewer2D by concatenation)
|
|
75
78
|
group_0D: bool
|
|
76
79
|
if True, group all (initial) Data0D into one DataFromPlugins
|
|
77
80
|
average_axis: int or None
|
|
78
|
-
which axis in the data shapes should be interpereted as the average (in general it is 0
|
|
81
|
+
which axis in the data shapes should be interpereted as the average (in general it is 0
|
|
82
|
+
or None)
|
|
79
83
|
average_index: int
|
|
80
84
|
which step in the averaging process are we in.
|
|
85
|
+
last_step: bool
|
|
86
|
+
tells if this is the very last step of the (averaged) scan
|
|
81
87
|
|
|
82
88
|
Returns
|
|
83
89
|
-------
|
|
@@ -88,14 +94,15 @@ class LoaderPlotter:
|
|
|
88
94
|
self.dataloader.load_all('/', self._data)
|
|
89
95
|
|
|
90
96
|
if average_axis is not None:
|
|
91
|
-
self.average_axis(average_axis, average_index)
|
|
97
|
+
self.average_axis(average_axis, average_index, last_step=last_step)
|
|
92
98
|
|
|
93
99
|
if filter_dims is not None:
|
|
94
100
|
filter_dims[:] = [enum_checker(DataDim, dim) for dim in filter_dims]
|
|
95
101
|
self._data.data[:] = [data for data in self._data if data.dim in filter_dims]
|
|
96
102
|
|
|
97
103
|
if filter_full_names is not None:
|
|
98
|
-
self._data.data[:] = [data for data in self._data if data.get_full_name() in
|
|
104
|
+
self._data.data[:] = [data for data in self._data if data.get_full_name() in
|
|
105
|
+
filter_full_names]
|
|
99
106
|
|
|
100
107
|
if group_0D: # 0D initial data
|
|
101
108
|
self.group_0D_data()
|
|
@@ -105,31 +112,39 @@ class LoaderPlotter:
|
|
|
105
112
|
|
|
106
113
|
return self._data
|
|
107
114
|
|
|
108
|
-
def average_axis(self, average_axis, average_index) -> None:
|
|
115
|
+
def average_axis(self, average_axis, average_index, last_step=False) -> None:
|
|
109
116
|
""" Average the data along their average axis
|
|
110
117
|
|
|
111
118
|
Parameters
|
|
112
119
|
----------
|
|
113
120
|
average_axis: int or None
|
|
114
|
-
which axis in the data shapes should be
|
|
121
|
+
which axis in the data shapes should be interpreted as the average
|
|
122
|
+
(in general it is 0 or None)
|
|
115
123
|
average_index: int
|
|
116
124
|
which step in the averaging process are we in.
|
|
125
|
+
last_step: bool
|
|
126
|
+
tells if this is the very last step of the (averaged) scan
|
|
117
127
|
"""
|
|
118
128
|
for ind, data in enumerate(self._data):
|
|
119
129
|
current_data = data.inav[average_index, ...]
|
|
120
|
-
if average_index
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
130
|
+
if average_index > 0:
|
|
131
|
+
if last_step:
|
|
132
|
+
data_to_append = data.inav[0:, ...].mean(axis=average_axis)
|
|
133
|
+
else:
|
|
134
|
+
if average_index == 1:
|
|
135
|
+
data_to_append = data.inav[0, ...]
|
|
136
|
+
else:
|
|
137
|
+
data_to_append = data.inav[0:average_index, ...].mean(axis=average_axis)
|
|
138
|
+
|
|
139
|
+
data_to_append.labels = [f'{label}_averaged' for label in data_to_append.labels]
|
|
140
|
+
current_data.append(data_to_append)
|
|
126
141
|
self._data[ind] = current_data
|
|
127
142
|
|
|
128
143
|
def remove_navigation_axes(self):
|
|
129
144
|
"""Make the navigation axes as signal axes
|
|
130
145
|
|
|
131
|
-
transforms DataND into Data1D or Data2D or error... depending the exact shape of the data
|
|
132
|
-
navigation axes
|
|
146
|
+
transforms DataND into Data1D or Data2D or error... depending the exact shape of the data
|
|
147
|
+
and the number of navigation axes
|
|
133
148
|
"""
|
|
134
149
|
for data in self._data:
|
|
135
150
|
data.nav_indexes = ()
|
|
@@ -164,10 +179,17 @@ class LoaderPlotter:
|
|
|
164
179
|
-----
|
|
165
180
|
load_data
|
|
166
181
|
"""
|
|
167
|
-
|
|
168
|
-
|
|
182
|
+
|
|
183
|
+
target_at = kwargs.pop('target_at') if 'target_at' in kwargs else None
|
|
184
|
+
last_step = kwargs.pop('last_step') if 'last_step' in kwargs else False
|
|
185
|
+
|
|
169
186
|
self.load_data(**kwargs)
|
|
170
187
|
self.show_data(target_at=target_at)
|
|
188
|
+
if (last_step and 'average_index' in kwargs and kwargs['average_index']
|
|
189
|
+
is not None):
|
|
190
|
+
kwargs['last_step'] = last_step
|
|
191
|
+
self.load_data(**kwargs)
|
|
192
|
+
self.show_data(target_at=target_at)
|
|
171
193
|
|
|
172
194
|
def show_data(self, **kwargs):
|
|
173
195
|
"""Send data to their dedicated viewers
|
|
@@ -197,7 +219,8 @@ class LoaderPlotter:
|
|
|
197
219
|
self._viewers = dict(zip(viewers_name, self.dispatcher.viewers))
|
|
198
220
|
self._viewer_docks = dict(zip(viewers_name, self.dispatcher.viewer_docks))
|
|
199
221
|
|
|
200
|
-
def set_data_to_viewers(self, data: DataToExport, temp=False,
|
|
222
|
+
def set_data_to_viewers(self, data: DataToExport, temp=False,
|
|
223
|
+
target_at: Iterable[float] = None):
|
|
201
224
|
"""Process data dimensionality and send appropriate data to their data viewers
|
|
202
225
|
|
|
203
226
|
Parameters
|
pymodaq/resources/VERSION
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '4.1.
|
|
1
|
+
version = '4.1.1'
|
pymodaq/utils/data.py
CHANGED
|
@@ -1588,6 +1588,13 @@ class DataWithAxes(DataBase):
|
|
|
1588
1588
|
"""convenience property to set attribute from axis_manager"""
|
|
1589
1589
|
self.set_axes_manager(self.shape, axes=axes, nav_indexes=self.nav_indexes)
|
|
1590
1590
|
|
|
1591
|
+
def axes_limits(self, axes_indexes: List[int] = None) -> List[Tuple[float, float]]:
|
|
1592
|
+
"""Get the limits of specified axes (all if axes_indexes is None)"""
|
|
1593
|
+
if axes_indexes is None:
|
|
1594
|
+
return [(axis.min(), axis.max()) for axis in self.axes]
|
|
1595
|
+
else:
|
|
1596
|
+
return [(axis.min(), axis.max()) for axis in self.axes if axis.index in axes_indexes]
|
|
1597
|
+
|
|
1591
1598
|
@property
|
|
1592
1599
|
def sig_indexes(self):
|
|
1593
1600
|
"""convenience property to fetch attribute from axis_manager"""
|
|
@@ -5,7 +5,7 @@ Created the 21/11/2022
|
|
|
5
5
|
@author: Sebastien Weber
|
|
6
6
|
"""
|
|
7
7
|
from time import time
|
|
8
|
-
from typing import Union, List, Tuple
|
|
8
|
+
from typing import Union, List, Tuple, Iterable
|
|
9
9
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
|
|
@@ -308,7 +308,8 @@ class DataSaverLoader(DataManagement):
|
|
|
308
308
|
bkg_nodes.append(node)
|
|
309
309
|
return bkg_nodes
|
|
310
310
|
|
|
311
|
-
def get_data_arrays(self, where: Union[Node, str], with_bkg=False,
|
|
311
|
+
def get_data_arrays(self, where: Union[Node, str], with_bkg=False,
|
|
312
|
+
load_all=False) -> List[np.ndarray]:
|
|
312
313
|
"""
|
|
313
314
|
|
|
314
315
|
Parameters
|
|
@@ -338,15 +339,17 @@ class DataSaverLoader(DataManagement):
|
|
|
338
339
|
getter = self._get_nodes
|
|
339
340
|
|
|
340
341
|
if with_bkg:
|
|
341
|
-
return [np.atleast_1d(np.squeeze(array.read()-bkg.read()))
|
|
342
|
+
return [np.atleast_1d(np.squeeze(array.read()-bkg.read()))
|
|
343
|
+
for array, bkg in zip(getter(where), bkg_nodes)]
|
|
342
344
|
else:
|
|
343
345
|
return [np.atleast_1d(np.squeeze(array.read())) for array in getter(where)]
|
|
344
346
|
|
|
345
347
|
def load_data(self, where, with_bkg=False, load_all=False) -> DataWithAxes:
|
|
346
|
-
"""Return a DataWithAxes object from the Data and Axis Nodes hanging from (or among) a
|
|
348
|
+
"""Return a DataWithAxes object from the Data and Axis Nodes hanging from (or among) a
|
|
349
|
+
given Node
|
|
347
350
|
|
|
348
|
-
Does not include navigation axes stored elsewhere in the h5file. The node path is stored in
|
|
349
|
-
using the attribute path
|
|
351
|
+
Does not include navigation axes stored elsewhere in the h5file. The node path is stored in
|
|
352
|
+
the DatWithAxis using the attribute path
|
|
350
353
|
|
|
351
354
|
Parameters
|
|
352
355
|
----------
|
|
@@ -383,18 +386,21 @@ class DataSaverLoader(DataManagement):
|
|
|
383
386
|
axes = self.get_axes(parent_node)
|
|
384
387
|
|
|
385
388
|
extra_attributes = data_node.attrs.to_dict()
|
|
386
|
-
for name in ['TITLE', 'CLASS', 'VERSION', 'backend', 'source', 'data_dimension',
|
|
387
|
-
'
|
|
389
|
+
for name in ['TITLE', 'CLASS', 'VERSION', 'backend', 'source', 'data_dimension',
|
|
390
|
+
'distribution', 'label', 'origin', 'nav_indexes', 'dtype', 'data_type',
|
|
391
|
+
'subdtype', 'shape', 'size', 'EXTDIM', 'path']:
|
|
388
392
|
extra_attributes.pop(name, None)
|
|
389
393
|
|
|
390
394
|
data = DataWithAxes(data_node.attrs['TITLE'],
|
|
391
|
-
source=data_node.attrs['source'] if 'source' in data_node.attrs
|
|
395
|
+
source=data_node.attrs['source'] if 'source' in data_node.attrs
|
|
396
|
+
else 'raw',
|
|
392
397
|
dim=data_node.attrs['data_dimension'],
|
|
393
398
|
distribution=data_node.attrs['distribution'],
|
|
394
399
|
data=ndarrays,
|
|
395
400
|
labels=[node.attrs['label'] for node in data_nodes],
|
|
396
401
|
origin=data_node.attrs['origin'] if 'origin' in data_node.attrs else '',
|
|
397
|
-
nav_indexes=data_node.attrs['nav_indexes'] if 'nav_indexes' in
|
|
402
|
+
nav_indexes=data_node.attrs['nav_indexes'] if 'nav_indexes' in
|
|
403
|
+
data_node.attrs else (),
|
|
398
404
|
axes=axes,
|
|
399
405
|
path=data_node.path,
|
|
400
406
|
**extra_attributes)
|
|
@@ -575,7 +581,7 @@ class DataExtendedSaver(DataSaverLoader):
|
|
|
575
581
|
where: Union[Node, str]
|
|
576
582
|
the path of a given node or the node itself
|
|
577
583
|
data: DataWithAxes
|
|
578
|
-
indexes:
|
|
584
|
+
indexes: Iterable[int]
|
|
579
585
|
indexes where to save data in the init h5array (should have the same length as extended_shape and with values
|
|
580
586
|
coherent with this shape
|
|
581
587
|
"""
|
|
@@ -591,7 +597,8 @@ class DataExtendedSaver(DataSaverLoader):
|
|
|
591
597
|
for ind_data in range(len(data)):
|
|
592
598
|
#todo check that getting with index is safe...
|
|
593
599
|
array: CARRAY = self.get_node_from_index(where, ind_data)
|
|
594
|
-
array[tuple(indexes)] = data[ind_data]
|
|
600
|
+
array[tuple(indexes)] = data[ind_data]
|
|
601
|
+
# maybe use array.__setitem__(indexes, data[ind_data]) if it's not working
|
|
595
602
|
|
|
596
603
|
|
|
597
604
|
class DataToExportSaver:
|
|
@@ -619,11 +626,12 @@ class DataToExportSaver:
|
|
|
619
626
|
|
|
620
627
|
@staticmethod
|
|
621
628
|
def channel_formatter(ind: int):
|
|
622
|
-
"""All DataWithAxes included in the DataToExport will be saved into a channel group indexed
|
|
623
|
-
formatted as below"""
|
|
629
|
+
"""All DataWithAxes included in the DataToExport will be saved into a channel group indexed
|
|
630
|
+
and formatted as below"""
|
|
624
631
|
return f'CH{ind:02d}'
|
|
625
632
|
|
|
626
|
-
def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='',
|
|
633
|
+
def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='',
|
|
634
|
+
metadata=None):
|
|
627
635
|
"""
|
|
628
636
|
|
|
629
637
|
Parameters
|
|
@@ -642,8 +650,10 @@ class DataToExportSaver:
|
|
|
642
650
|
dims = data.get_dim_presents()
|
|
643
651
|
for dim in dims:
|
|
644
652
|
dim_group = self._h5saver.get_set_group(where, dim)
|
|
645
|
-
for ind, dwa in enumerate(data.get_data_from_dim(dim)):
|
|
646
|
-
|
|
653
|
+
for ind, dwa in enumerate(data.get_data_from_dim(dim)):
|
|
654
|
+
# dwa: DataWithAxes filtered by dim
|
|
655
|
+
dwa_group = self._h5saver.get_set_group(dim_group, self.channel_formatter(ind),
|
|
656
|
+
dwa.name)
|
|
647
657
|
# dwa_group = self._h5saver.add_ch_group(dim_group, dwa.name)
|
|
648
658
|
self._data_saver.add_data(dwa_group, dwa)
|
|
649
659
|
|
|
@@ -651,8 +661,10 @@ class DataToExportSaver:
|
|
|
651
661
|
dims = data.get_dim_presents()
|
|
652
662
|
for dim in dims:
|
|
653
663
|
dim_group = self._h5saver.get_set_group(where, dim)
|
|
654
|
-
for ind, dwa in enumerate(data.get_data_from_dim(dim)):
|
|
655
|
-
|
|
664
|
+
for ind, dwa in enumerate(data.get_data_from_dim(dim)):
|
|
665
|
+
# dwa: DataWithAxes filtered by dim
|
|
666
|
+
dwa_group = self._h5saver.get_set_group(dim_group,
|
|
667
|
+
self.channel_formatter(ind), dwa.name)
|
|
656
668
|
# dwa_group = self._get_node_from_title(dim_group, dwa.name)
|
|
657
669
|
if dwa_group is not None:
|
|
658
670
|
self._bkg_saver.add_data(dwa_group, dwa, save_axes=False)
|
|
@@ -679,8 +691,8 @@ class DataToExportEnlargeableSaver(DataToExportSaver):
|
|
|
679
691
|
self._axis_name = axis_name
|
|
680
692
|
self._axis_units = axis_units
|
|
681
693
|
|
|
682
|
-
def add_data(self, where: Union[Node, str], data: DataToExport,
|
|
683
|
-
settings_as_xml='', metadata=None):
|
|
694
|
+
def add_data(self, where: Union[Node, str], data: DataToExport,
|
|
695
|
+
axis_value: Union[float, np.ndarray], settings_as_xml='', metadata=None):
|
|
684
696
|
"""
|
|
685
697
|
|
|
686
698
|
Parameters
|
|
@@ -712,7 +724,8 @@ class DataToExportEnlargeableSaver(DataToExportSaver):
|
|
|
712
724
|
class DataToExportTimedSaver(DataToExportEnlargeableSaver):
|
|
713
725
|
"""Specialized DataToExportEnlargeableSaver to save data as a function of a time axis
|
|
714
726
|
|
|
715
|
-
Only one element ca be added at a time, the time axis value are enlarged using the data to be
|
|
727
|
+
Only one element ca be added at a time, the time axis value are enlarged using the data to be
|
|
728
|
+
added timestamp
|
|
716
729
|
|
|
717
730
|
Notes
|
|
718
731
|
-----
|
|
@@ -721,8 +734,10 @@ class DataToExportTimedSaver(DataToExportEnlargeableSaver):
|
|
|
721
734
|
def __init__(self, h5saver: H5Saver):
|
|
722
735
|
super().__init__(h5saver, 'time', 's')
|
|
723
736
|
|
|
724
|
-
def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='',
|
|
725
|
-
|
|
737
|
+
def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='',
|
|
738
|
+
metadata=None):
|
|
739
|
+
super().add_data(where, data, axis_value=data.timestamp, settings_as_xml=settings_as_xml,
|
|
740
|
+
metadata=metadata)
|
|
726
741
|
|
|
727
742
|
|
|
728
743
|
class DataToExportExtendedSaver(DataToExportSaver):
|
|
@@ -755,7 +770,7 @@ class DataToExportExtendedSaver(DataToExportSaver):
|
|
|
755
770
|
for axis in axes:
|
|
756
771
|
self._nav_axis_saver.add_axis(nav_group, axis)
|
|
757
772
|
|
|
758
|
-
def add_data(self, where: Union[Node, str], data: DataToExport, indexes:
|
|
773
|
+
def add_data(self, where: Union[Node, str], data: DataToExport, indexes: Iterable[int],
|
|
759
774
|
distribution=DataDistribution['uniform'],
|
|
760
775
|
settings_as_xml='', metadata={}):
|
|
761
776
|
"""
|
|
@@ -766,8 +781,8 @@ class DataToExportExtendedSaver(DataToExportSaver):
|
|
|
766
781
|
the path of a given node or the node itself
|
|
767
782
|
data: DataToExport
|
|
768
783
|
indexes: List[int]
|
|
769
|
-
indexes where to save data in the init h5array (should have the same length as
|
|
770
|
-
coherent with this shape
|
|
784
|
+
indexes where to save data in the init h5array (should have the same length as
|
|
785
|
+
extended_shape and with values coherent with this shape
|
|
771
786
|
settings_as_xml: str
|
|
772
787
|
The settings parameter as an XML string
|
|
773
788
|
metadata: dict
|
|
@@ -777,9 +792,12 @@ class DataToExportExtendedSaver(DataToExportSaver):
|
|
|
777
792
|
dims = data.get_dim_presents()
|
|
778
793
|
for dim in dims:
|
|
779
794
|
dim_group = self._h5saver.get_set_group(where, dim)
|
|
780
|
-
for ind, dwa in enumerate(data.get_data_from_dim(dim)):
|
|
781
|
-
|
|
782
|
-
self.
|
|
795
|
+
for ind, dwa in enumerate(data.get_data_from_dim(dim)):
|
|
796
|
+
# dwa: DataWithAxes filtered by dim
|
|
797
|
+
dwa_group = self._h5saver.get_set_group(dim_group,
|
|
798
|
+
self.channel_formatter(ind), dwa.name)
|
|
799
|
+
self._data_saver.add_data(dwa_group, dwa, indexes=indexes,
|
|
800
|
+
distribution=distribution)
|
|
783
801
|
|
|
784
802
|
|
|
785
803
|
class DataLoader:
|
|
@@ -823,7 +841,8 @@ class DataLoader:
|
|
|
823
841
|
|
|
824
842
|
Returns
|
|
825
843
|
-------
|
|
826
|
-
GROUP: returns the group named SPECIAL_GROUP_NAMES['nav_axes'] holding all NavAxis for
|
|
844
|
+
GROUP: returns the group named SPECIAL_GROUP_NAMES['nav_axes'] holding all NavAxis for
|
|
845
|
+
those data
|
|
827
846
|
|
|
828
847
|
See Also
|
|
829
848
|
--------
|
|
@@ -874,10 +893,11 @@ class DataLoader:
|
|
|
874
893
|
for child in children_dict:
|
|
875
894
|
if isinstance(children_dict[child], GROUP):
|
|
876
895
|
self.load_all(children_dict[child], data, with_bkg=with_bkg)
|
|
877
|
-
elif 'data_type' in children_dict[child].attrs and 'data' in
|
|
896
|
+
elif ('data_type' in children_dict[child].attrs and 'data' in
|
|
897
|
+
children_dict[child].attrs['data_type']):
|
|
878
898
|
|
|
879
|
-
data_list.append(self.load_data(children_dict[child].path,
|
|
899
|
+
data_list.append(self.load_data(children_dict[child].path,
|
|
900
|
+
with_bkg=with_bkg, load_all=True))
|
|
880
901
|
break
|
|
881
902
|
data_tmp = DataToExport(name=where.name, data=data_list)
|
|
882
903
|
data.append(data_tmp)
|
|
883
|
-
|
|
@@ -72,56 +72,13 @@ class H5Logger(AbstractLogger):
|
|
|
72
72
|
|
|
73
73
|
def add_detector(self, name, settings):
|
|
74
74
|
pass
|
|
75
|
-
# if name not in self.h5saver.raw_group.children_name():
|
|
76
|
-
# group = self.h5saver.add_det_group(self.h5saver.raw_group, name, settings)
|
|
77
|
-
# self.h5saver.add_navigation_axis(np.array([0.0, ]),
|
|
78
|
-
# group, 'time_axis', enlargeable=True,
|
|
79
|
-
# title='Time axis',
|
|
80
|
-
# metadata=dict(label='Time axis', units='s', nav_index=0))
|
|
81
75
|
|
|
82
76
|
def add_actuator(self, name, settings):
|
|
83
77
|
pass
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
# title='Time axis',
|
|
89
|
-
# metadata=dict(label='Time axis', units='s', nav_index=0))
|
|
90
|
-
|
|
91
|
-
def add_data(self, data: DataToExport):
|
|
92
|
-
self.module_and_data_saver.add_data()
|
|
93
|
-
|
|
94
|
-
# name = data['name']
|
|
95
|
-
# group = self.h5saver.get_group_by_title(self.h5saver.raw_group, name)
|
|
96
|
-
# time_array = self.h5saver.get_node(group, 'Logger_time_axis')
|
|
97
|
-
# time_array.append(np.array([data['acq_time_s']]))
|
|
98
|
-
#
|
|
99
|
-
# data_types = ['data0D', 'data1D']
|
|
100
|
-
# if self.settings['save_2D']:
|
|
101
|
-
# data_types.extend(['data2D', 'dataND'])
|
|
102
|
-
#
|
|
103
|
-
# for data_type in data_types:
|
|
104
|
-
# if data_type in data.keys() and len(data[data_type]) != 0:
|
|
105
|
-
# if not self.h5saver.is_node_in_group(group, data_type):
|
|
106
|
-
# data_group = self.h5saver.add_data_group(group, data_type, metadata=dict(type='scan'))
|
|
107
|
-
# else:
|
|
108
|
-
# data_group = self.h5saver.get_node(group, utils.capitalize(data_type))
|
|
109
|
-
# for ind_channel, channel in enumerate(data[data_type]):
|
|
110
|
-
# channel_group = self.h5saver.get_group_by_title(data_group, channel)
|
|
111
|
-
# if channel_group is None:
|
|
112
|
-
# channel_group = self.h5saver.add_CH_group(data_group, title=channel)
|
|
113
|
-
# data_array = self.h5saver.add_data(channel_group, data[data_type][channel],
|
|
114
|
-
# scan_type='scan1D', enlargeable=True)
|
|
115
|
-
# else:
|
|
116
|
-
# data_array = self.h5saver.get_node(channel_group, 'Data')
|
|
117
|
-
# if data_type == 'data0D' and not isinstance(data[data_type][channel]['data'], np.ndarray):
|
|
118
|
-
# #this is a security as accessing an element in an array can be converted
|
|
119
|
-
# # to a scalar... Made some other attempts but found this is the most reliable here.
|
|
120
|
-
# logger.debug('Some data seems to not be properly formated as ndarrays')
|
|
121
|
-
# data_array.append(np.array([data[data_type][channel]['data']]))
|
|
122
|
-
# else:
|
|
123
|
-
# data_array.append(data[data_type][channel]['data'])
|
|
124
|
-
# self.h5saver.flush()
|
|
78
|
+
|
|
79
|
+
def add_data(self, dte: DataToExport):
|
|
80
|
+
self.module_and_data_saver.add_data(dte)
|
|
81
|
+
|
|
125
82
|
self.settings.child('N_saved').setValue(self.settings.child('N_saved').value() + 1)
|
|
126
83
|
|
|
127
84
|
def stop_logger(self):
|
|
@@ -361,7 +361,8 @@ class ScanSaver(ModuleSaver):
|
|
|
361
361
|
for detector in self._module.modules_manager.detectors:
|
|
362
362
|
detector.module_and_data_saver.add_nav_axes(self._module_group, axes)
|
|
363
363
|
|
|
364
|
-
def add_data(self, indexes: Tuple[int] = None,
|
|
364
|
+
def add_data(self, dte: DataToExport = None, indexes: Tuple[int] = None,
|
|
365
|
+
distribution=DataDistribution['uniform']):
|
|
365
366
|
for detector in self._module.modules_manager.detectors:
|
|
366
367
|
try:
|
|
367
368
|
detector.insert_data(indexes, where=self._module_group, distribution=distribution)
|
|
@@ -381,20 +382,18 @@ class LoggerSaver(ScanSaver):
|
|
|
381
382
|
"""
|
|
382
383
|
group_type = GroupType['data_logger']
|
|
383
384
|
|
|
384
|
-
def
|
|
385
|
-
|
|
386
|
-
self._module: H5Logger = module
|
|
387
|
-
self._h5saver = None
|
|
388
|
-
|
|
389
|
-
def add_data(self):
|
|
390
|
-
for detector in self._module.modules_manager.detectors:
|
|
391
|
-
try:
|
|
392
|
-
detector.append_data(data=None, where=self._module_group)
|
|
393
|
-
except Exception as e:
|
|
394
|
-
pass
|
|
385
|
+
def add_data(self, dte: DataToExport):
|
|
386
|
+
"""Add data to it's corresponding control module
|
|
395
387
|
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
388
|
+
The name of the control module is the DataToExport name attribute
|
|
389
|
+
"""
|
|
390
|
+
if dte.name in self._module.modules_manager.detectors_name:
|
|
391
|
+
control_module = self._module.modules_manager.detectors[
|
|
392
|
+
self._module.modules_manager.detectors_name.index(dte.name)]
|
|
393
|
+
elif dte.name in self._module.modules_manager.actuators_name:
|
|
394
|
+
control_module = self._module.modules_manager.actuators[
|
|
395
|
+
self._module.modules_manager.actuators_name.index(dte.name)]
|
|
396
|
+
else:
|
|
397
|
+
return
|
|
398
|
+
|
|
399
|
+
control_module.append_data(dte=dte, where=self._module_group)
|
|
@@ -417,19 +417,19 @@ class H5SaverLowLevel(H5Backend):
|
|
|
417
417
|
return group
|
|
418
418
|
|
|
419
419
|
def show_file_content(self):
|
|
420
|
-
|
|
420
|
+
win = QtWidgets.QMainWindow()
|
|
421
421
|
if not self.isopen():
|
|
422
422
|
if self.h5_file_path is not None:
|
|
423
423
|
if self.h5_file_path.exists():
|
|
424
|
-
self.analysis_prog = browsing.H5Browser(
|
|
424
|
+
self.analysis_prog = browsing.H5Browser(win, h5file_path=self.h5_file_path)
|
|
425
425
|
else:
|
|
426
426
|
logger.warning('The h5 file path has not been defined yet')
|
|
427
427
|
else:
|
|
428
428
|
logger.warning('The h5 file path has not been defined yet')
|
|
429
429
|
else:
|
|
430
430
|
self.flush()
|
|
431
|
-
self.analysis_prog = browsing.H5Browser(
|
|
432
|
-
|
|
431
|
+
self.analysis_prog = browsing.H5Browser(win, h5file=self.h5file)
|
|
432
|
+
win.show()
|
|
433
433
|
|
|
434
434
|
|
|
435
435
|
class H5SaverBase(H5SaverLowLevel, ParameterManager):
|
|
@@ -806,8 +806,9 @@ class H5SaverBase(H5SaverLowLevel, ParameterManager):
|
|
|
806
806
|
|
|
807
807
|
def value_changed(self, param):
|
|
808
808
|
if param.name() == 'show_file':
|
|
809
|
-
param.
|
|
810
|
-
|
|
809
|
+
if param.value():
|
|
810
|
+
param.setValue(False)
|
|
811
|
+
self.show_file_content()
|
|
811
812
|
|
|
812
813
|
elif param.name() == 'base_path':
|
|
813
814
|
try:
|
|
@@ -824,21 +825,6 @@ class H5SaverBase(H5SaverLowLevel, ParameterManager):
|
|
|
824
825
|
def update_status(self, status):
|
|
825
826
|
logger.warning(status)
|
|
826
827
|
|
|
827
|
-
def show_file_content(self):
|
|
828
|
-
win = QtWidgets.QMainWindow()
|
|
829
|
-
if not self.isopen():
|
|
830
|
-
if self.h5_file_path is not None:
|
|
831
|
-
if self.h5_file_path.exists():
|
|
832
|
-
self.analysis_prog = browsing.H5Browser(win, h5file_path=self.h5_file_path)
|
|
833
|
-
else:
|
|
834
|
-
logger.warning('The h5 file path has not been defined yet')
|
|
835
|
-
else:
|
|
836
|
-
logger.warning('The h5 file path has not been defined yet')
|
|
837
|
-
else:
|
|
838
|
-
self.flush()
|
|
839
|
-
self.analysis_prog = browsing.H5Browser(win, h5file=self.h5file)
|
|
840
|
-
win.show()
|
|
841
|
-
|
|
842
828
|
|
|
843
829
|
class H5Saver(H5SaverBase, QObject):
|
|
844
830
|
"""
|
|
@@ -43,8 +43,8 @@ class ModulesManager(QObject, ParameterManager):
|
|
|
43
43
|
|
|
44
44
|
params = [
|
|
45
45
|
{'title': 'Actuators/Detectors Selection', 'name': 'modules', 'type': 'group', 'children': [
|
|
46
|
-
{'title': 'detectors', 'name': 'detectors', 'type': 'itemselect'},
|
|
47
|
-
{'title': 'Actuators', 'name': 'actuators', 'type': 'itemselect'},
|
|
46
|
+
{'title': 'detectors', 'name': 'detectors', 'type': 'itemselect', 'checkbox': True},
|
|
47
|
+
{'title': 'Actuators', 'name': 'actuators', 'type': 'itemselect', 'checkbox': True},
|
|
48
48
|
]},
|
|
49
49
|
|
|
50
50
|
{'title': "Moves done?", 'name': 'move_done', 'type': 'led', 'value': False},
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
|
|
1
3
|
from qtpy import QtWidgets, QtCore, QtGui
|
|
2
4
|
from pyqtgraph.parametertree.Parameter import ParameterItem
|
|
3
5
|
from pyqtgraph.parametertree.parameterTypes.basetypes import WidgetParameterItem
|
|
@@ -41,7 +43,7 @@ class ItemSelect_pb(QtWidgets.QWidget):
|
|
|
41
43
|
|
|
42
44
|
|
|
43
45
|
class ItemSelect(QtWidgets.QListWidget):
|
|
44
|
-
def __init__(self, hasCheckbox=
|
|
46
|
+
def __init__(self, hasCheckbox=True):
|
|
45
47
|
QtWidgets.QListWidget.__init__(self)
|
|
46
48
|
self.hasCheckbox = hasCheckbox # Boolean indicating if listwidget item uses checkbox ot not
|
|
47
49
|
self.selItems = [] # Dummy variable to keep track of click order
|
|
@@ -69,11 +71,11 @@ class ItemSelect(QtWidgets.QListWidget):
|
|
|
69
71
|
# Clean up list with non existing entries
|
|
70
72
|
[self.selItems.remove(item) for item in self.selItems if item not in allitems]
|
|
71
73
|
for item in self.all_items():
|
|
72
|
-
if item.checkState()!=0: # Item is selected
|
|
74
|
+
if item.checkState() != 0: # Item is selected
|
|
73
75
|
if item.text() not in self.selItems: # if item not in list then add it
|
|
74
76
|
self.selItems.append(item.text())
|
|
75
77
|
else: # Item is not selected
|
|
76
|
-
if item.text() in self.selItems:
|
|
78
|
+
if item.text() in self.selItems: # if item in list then remove it
|
|
77
79
|
self.selItems.remove(item.text())
|
|
78
80
|
selitems = self.selItems.copy() #need to copy to correctly emit signal when changed
|
|
79
81
|
|
|
@@ -83,7 +85,7 @@ class ItemSelect(QtWidgets.QListWidget):
|
|
|
83
85
|
|
|
84
86
|
return dict(all_items=allitems, selected=selitems)
|
|
85
87
|
|
|
86
|
-
def all_items(self):
|
|
88
|
+
def all_items(self) -> list:
|
|
87
89
|
"""
|
|
88
90
|
Get the all_items list from the self QtWidget attribute.
|
|
89
91
|
|
|
@@ -94,16 +96,16 @@ class ItemSelect(QtWidgets.QListWidget):
|
|
|
94
96
|
"""
|
|
95
97
|
return [self.item(ind) for ind in range(self.count())]
|
|
96
98
|
|
|
97
|
-
def select_item(self, item:QtWidgets.QListWidgetItem, doSelect:bool = False):
|
|
99
|
+
def select_item(self, item: QtWidgets.QListWidgetItem, doSelect:bool = False):
|
|
98
100
|
"""
|
|
99
101
|
Function to select item. The selection depends if the item uses checkbox or not.
|
|
100
102
|
"""
|
|
101
103
|
if self.hasCheckbox:
|
|
102
|
-
item.setCheckState(int(2*doSelect))
|
|
104
|
+
item.setCheckState(int(2*doSelect)) # 2=QtCore.Qt.Checked, 0=QtCore.Qt.Unchecked
|
|
103
105
|
else:
|
|
104
106
|
item.setSelected(doSelect)
|
|
105
107
|
|
|
106
|
-
def set_value(self, values):
|
|
108
|
+
def set_value(self, values: dict):
|
|
107
109
|
"""
|
|
108
110
|
Set values to the all_items attributes filtering values by the 'selected' key.
|
|
109
111
|
|
|
@@ -113,34 +115,35 @@ class ItemSelect(QtWidgets.QListWidget):
|
|
|
113
115
|
=============== ============== =======================================
|
|
114
116
|
"""
|
|
115
117
|
# Remove values in selected if they do not exist in all
|
|
116
|
-
|
|
118
|
+
values = deepcopy(values)
|
|
119
|
+
[values['selected'].remove(value) for value in values['selected'] if value
|
|
120
|
+
not in values['all_items']]
|
|
117
121
|
|
|
118
122
|
allitems_text = []
|
|
119
123
|
# Check existing items and remove unused ones
|
|
120
124
|
for item in self.all_items():
|
|
121
|
-
if item.text() not in values['all_items']:
|
|
125
|
+
if item.text() not in values['all_items']: # Remove items from list if text not
|
|
126
|
+
# in values
|
|
122
127
|
item = self.takeItem(self.row(item))
|
|
123
128
|
else:
|
|
124
|
-
allitems_text.append(item.text())
|
|
129
|
+
allitems_text.append(item.text()) # Add items to list
|
|
125
130
|
# Create items if needed
|
|
126
|
-
for value in values['all_items']:
|
|
127
|
-
if value not in allitems_text:
|
|
131
|
+
for value in values['all_items']: # Loop through all values
|
|
132
|
+
if value not in allitems_text: # Test if object already exists
|
|
128
133
|
item = QtWidgets.QListWidgetItem(value) # Create object
|
|
129
|
-
if self.hasCheckbox:
|
|
134
|
+
if self.hasCheckbox: # Add checkbox if required
|
|
130
135
|
item.setFlags(item.flags() | QtCore.Qt.ItemIsUserCheckable)
|
|
131
|
-
self.select_item(item, doSelect=False)
|
|
132
|
-
|
|
136
|
+
self.select_item(item, doSelect=False)
|
|
137
|
+
# Make sure item is not selected (checkbox not appearing somehow without)
|
|
138
|
+
self.addItem(item) # Add object to widget
|
|
133
139
|
|
|
134
|
-
allitems = self.all_items()
|
|
135
|
-
# Selection process
|
|
136
|
-
for
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
else:
|
|
142
|
-
self.select_item(item, doSelect=False)
|
|
143
|
-
|
|
140
|
+
allitems = self.all_items() # All selectable items
|
|
141
|
+
# Selection process
|
|
142
|
+
for item in allitems:
|
|
143
|
+
self.select_item(item, doSelect=False)
|
|
144
|
+
for value in values['selected']: # Loop through selected to retain selection order
|
|
145
|
+
item = allitems[[item.text() for item in allitems].index(value)]
|
|
146
|
+
self.select_item(item, doSelect=True)
|
|
144
147
|
QtWidgets.QApplication.processEvents()
|
|
145
148
|
|
|
146
149
|
|
|
@@ -156,12 +159,13 @@ class ItemSelectParameterItem(WidgetParameterItem):
|
|
|
156
159
|
self.hideWidget = False
|
|
157
160
|
opts = self.param.opts
|
|
158
161
|
|
|
159
|
-
if 'checkbox' in opts and opts['checkbox']:
|
|
162
|
+
if 'checkbox' in opts and opts['checkbox']:
|
|
160
163
|
w = ItemSelect_pb(checkbox=opts['checkbox'])
|
|
161
164
|
w.sigChanged = w.itemselect.itemChanged
|
|
162
165
|
else:
|
|
163
166
|
w = ItemSelect_pb()
|
|
164
167
|
w.sigChanged = w.itemselect.itemSelectionChanged
|
|
168
|
+
|
|
165
169
|
|
|
166
170
|
if 'dragdrop' in opts and opts['dragdrop']:
|
|
167
171
|
w.itemselect.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
|
|
@@ -165,26 +165,34 @@ class UniformDataDisplayer(BaseDataDisplayer):
|
|
|
165
165
|
processor: DataProcessorFactory = data_processors
|
|
166
166
|
self.update_processor(processor)
|
|
167
167
|
|
|
168
|
-
def init_rois(self,
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
self._navigator2D.set_crosshair_position(
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
self._viewer2D.
|
|
187
|
-
|
|
168
|
+
def init_rois(self, nav_axes_limits: List[Tuple[float, float]] = None,
|
|
169
|
+
sig_axis_limits: List[Tuple[float, float]] = None):
|
|
170
|
+
|
|
171
|
+
if len(nav_axes_limits) == 1:
|
|
172
|
+
self._navigator1D.crosshair.set_crosshair_position(np.mean(nav_axes_limits[0]))
|
|
173
|
+
|
|
174
|
+
if len(nav_axes_limits) == 2:
|
|
175
|
+
self._navigator2D.crosshair.set_crosshair_position(
|
|
176
|
+
*self._navigator2D.view.unscale_axis(np.mean(nav_axes_limits[1]),
|
|
177
|
+
np.mean(nav_axes_limits[0]))
|
|
178
|
+
)
|
|
179
|
+
if len(sig_axis_limits) == 1:
|
|
180
|
+
self._viewer1D.roi.setPos((float(np.mean(sig_axis_limits[0]) -
|
|
181
|
+
np.abs(np.diff(sig_axis_limits[0])) / 3),
|
|
182
|
+
float(np.mean(sig_axis_limits[0]) +
|
|
183
|
+
np.abs(np.diff(sig_axis_limits[0])) / 3))
|
|
184
|
+
)
|
|
185
|
+
if len(sig_axis_limits) == 2:
|
|
186
|
+
scaled_axes = np.array(self._viewer2D.view.unscale_axis(np.array(sig_axis_limits[1]),
|
|
187
|
+
np.array(sig_axis_limits[0])))
|
|
188
|
+
|
|
189
|
+
self._viewer2D.roi.setSize(
|
|
190
|
+
float(np.diff(scaled_axes[0])) / 3,
|
|
191
|
+
float(np.diff(scaled_axes[1])) / 3)
|
|
192
|
+
|
|
193
|
+
self._viewer2D.roi.setPos(
|
|
194
|
+
float(np.mean(scaled_axes[0])) - float(np.diff(scaled_axes[0])) / 6,
|
|
195
|
+
float(np.mean(scaled_axes[1])) - float(np.diff(scaled_axes[1])) / 6)
|
|
188
196
|
|
|
189
197
|
def updated_nav_integration(self):
|
|
190
198
|
""" Means the ROI select of the 2D viewer has been moved """
|
|
@@ -317,7 +325,8 @@ class SpreadDataDisplayer(BaseDataDisplayer):
|
|
|
317
325
|
processor = data_processors # if len(data.axes_manager.sig_shape) > 1 else math_processors1D
|
|
318
326
|
self.update_processor(processor)
|
|
319
327
|
|
|
320
|
-
def init_rois(self,
|
|
328
|
+
def init_rois(self, nav_axes_limits: List[Tuple[float, float]] = None,
|
|
329
|
+
sig_axis_limits: List[Tuple[float, float]] = None):
|
|
321
330
|
pass
|
|
322
331
|
|
|
323
332
|
def update_viewer_data(self, posx=0, posy=0):
|
|
@@ -520,13 +529,17 @@ class ViewerND(ParameterManager, ActionManager, ViewerBase):
|
|
|
520
529
|
self.axes_viewer)
|
|
521
530
|
|
|
522
531
|
self.navigator1D.crosshair.crosshair_dragged.connect(self.data_displayer.update_viewer_data)
|
|
532
|
+
|
|
523
533
|
self.navigator1D.ROI_select_signal.connect(self.data_displayer.updated_nav_integration)
|
|
534
|
+
|
|
524
535
|
self.navigator2D.crosshair_dragged.connect(self.data_displayer.update_viewer_data)
|
|
536
|
+
|
|
525
537
|
self.navigator2D.ROI_select_signal.connect(self.data_displayer.updated_nav_integration)
|
|
526
538
|
self.axes_viewer.navigation_changed.connect(self.data_displayer.update_viewer_data)
|
|
527
539
|
self.data_displayer.data_dim_signal.connect(self.update_data_dim)
|
|
528
540
|
|
|
529
541
|
self.viewer1D.roi.sigRegionChanged.connect(self.data_displayer.update_nav_data_from_roi)
|
|
542
|
+
|
|
530
543
|
self.viewer2D.roi.sigRegionChanged.connect(self.data_displayer.update_nav_data_from_roi)
|
|
531
544
|
|
|
532
545
|
self.get_action('filters').currentTextChanged.connect(self.data_displayer.update_filter)
|
|
@@ -553,7 +566,8 @@ class ViewerND(ParameterManager, ActionManager, ViewerBase):
|
|
|
553
566
|
|
|
554
567
|
if force_update:
|
|
555
568
|
self.update_widget_visibility(data)
|
|
556
|
-
self.data_displayer.init_rois(data)
|
|
569
|
+
self.data_displayer.init_rois(data.axes_limits(data.nav_indexes),
|
|
570
|
+
data.axes_limits(data.sig_indexes))
|
|
557
571
|
self.data_to_export_signal.emit(self.data_to_export)
|
|
558
572
|
|
|
559
573
|
def set_data_test(self, data_shape='3D'):
|
|
@@ -636,9 +650,10 @@ class ViewerND(ParameterManager, ActionManager, ViewerBase):
|
|
|
636
650
|
Axis(data=t, index=2, label='t_axis', units='tunits')])
|
|
637
651
|
elif data_shape == '2D':
|
|
638
652
|
data = [np.sum(data, axis=(2, 3))]
|
|
639
|
-
dataraw = DataRaw('NDdata', data=data, dim='DataND', nav_indexes=[0
|
|
653
|
+
dataraw = DataRaw('NDdata', data=data, dim='DataND', nav_indexes=[0],
|
|
640
654
|
axes=[Axis(data=y, index=0, label='y_axis', units='yunits'),
|
|
641
|
-
Axis(data=x, index=1, label='x_axis', units='xunits')]
|
|
655
|
+
Axis(data=x, index=1, label='x_axis', units='xunits')],
|
|
656
|
+
)
|
|
642
657
|
elif data_shape == '1D':
|
|
643
658
|
data = [np.sum(data, axis=(0, 1, 2))]
|
|
644
659
|
dataraw = DataRaw('NDdata', data=data, dim='DataND', nav_indexes=[],
|
|
@@ -120,7 +120,8 @@ class Scan1DSparse(Scan1DBase):
|
|
|
120
120
|
{'title': 'Parsed string:', 'name': 'parsed_string', 'type': 'text', 'value': '0:0.1:1', }
|
|
121
121
|
]
|
|
122
122
|
n_axes = 1
|
|
123
|
-
distribution = DataDistribution['
|
|
123
|
+
distribution = DataDistribution['uniform'] # because in 1D it doesn't matter is spread or
|
|
124
|
+
# uniform, one can easily plot both types on a regulat 1D plot
|
|
124
125
|
|
|
125
126
|
def __init__(self, actuators: List = None, **_ignored):
|
|
126
127
|
super().__init__(actuators=actuators)
|
|
@@ -3,7 +3,7 @@ pymodaq/dashboard.py,sha256=aKG7XhO7e4S_9nv1MdrCFW5xJ2ADJ6evDIgaPy6ih3w,63635
|
|
|
3
3
|
pymodaq/icon.ico,sha256=hOHHfNDENKphQvG1WDleSEYcHukneR2eRFJu8isIlD4,74359
|
|
4
4
|
pymodaq/splash.png,sha256=ow8IECF3tPRUMA4tf2tMu1aRiMaxx91_Y2ckVxkrmF0,53114
|
|
5
5
|
pymodaq/control_modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
pymodaq/control_modules/daq_move.py,sha256=
|
|
6
|
+
pymodaq/control_modules/daq_move.py,sha256=Lkq6hVzBulTpU8VnQ71Mo59p61v9mjjH3Qfif0iuCuI,36578
|
|
7
7
|
pymodaq/control_modules/daq_move_ui.py,sha256=sviVOHASH4zjCIfhWzh42v35_n4JaUwUh-t5dFzQBo4,14530
|
|
8
8
|
pymodaq/control_modules/daq_viewer.py,sha256=XJ6PHOg_Emz2-Leg9Fw2k91Mlmz00RFk3xm4NGZnJFI,58421
|
|
9
9
|
pymodaq/control_modules/daq_viewer_ui.py,sha256=EfWnXQYcq4x5qOA2GIuYRBcD0L5Gak44lJ7WqwOrFe4,15646
|
|
@@ -36,7 +36,7 @@ pymodaq/examples/Labview_TCP_Client/cmd_types.ctl,sha256=gwuDyGcte11Zqx0C-U8ljRy
|
|
|
36
36
|
pymodaq/extensions/__init__.py,sha256=EnlA2Pv8B2HSYzHz_QJhxvWBSEaE1BvFQEA_puxuD9U,321
|
|
37
37
|
pymodaq/extensions/console.py,sha256=SOKQCZtOB6sqtI7JoiG7sRfL1KWLGGI_3VGDEva2Ufg,2522
|
|
38
38
|
pymodaq/extensions/daq_logger.py,sha256=7G7LzL_Xu7PEQUk-N-3lQXEochysCmvv_bbf2nGUpmE,19508
|
|
39
|
-
pymodaq/extensions/daq_scan.py,sha256=
|
|
39
|
+
pymodaq/extensions/daq_scan.py,sha256=TGoaJcVgYBJ6hffqVGcM89t5cW3RqOZ6f1FSSaFZQYI,54032
|
|
40
40
|
pymodaq/extensions/daq_scan_ui.py,sha256=zp9dCOnsWdPRe0LuU8hxhTf8L3t2eo-MjBd8xiHX_mk,10134
|
|
41
41
|
pymodaq/extensions/h5browser.py,sha256=udE8v4SLr2w9SaYVynFFkxUuz94qme3vFPf1nng3O1M,736
|
|
42
42
|
pymodaq/extensions/utils.py,sha256=lGyPCnN8PEWuSNbQisHjd4PBMjlnh_Zz2BqJplkvqE4,1881
|
|
@@ -45,7 +45,7 @@ pymodaq/extensions/pid/daq_move_PID.py,sha256=EiTJz4fLcjVL1UxnFREu2oZEz2nl-iL5xg
|
|
|
45
45
|
pymodaq/extensions/pid/pid_controller.py,sha256=-0_Flv4PnF9N3CyJ45LRtVbLmH9P5rhwCSGJxnkge6Q,28317
|
|
46
46
|
pymodaq/extensions/pid/utils.py,sha256=OPMKbfnjj4fW0I5jPRBuMjn0thPNFrTQLwd3or6xUPw,8476
|
|
47
47
|
pymodaq/post_treatment/__init__.py,sha256=xaaLFZJ7OLqI_7yPurFk89A7m2ywSbYDXAsdE-QQ8Zg,81
|
|
48
|
-
pymodaq/post_treatment/load_and_plot.py,sha256=
|
|
48
|
+
pymodaq/post_treatment/load_and_plot.py,sha256=CZP8MbJUzhKMmnpwb-Z1nw_HBR5sZ5rRsVmHSRVtsp4,10954
|
|
49
49
|
pymodaq/post_treatment/process_to_scalar.py,sha256=NHntybqpDhDjQJ224Dhf9Ij_ql-fAEMRT6egA6UEGfA,11568
|
|
50
50
|
pymodaq/post_treatment/daq_analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
51
|
pymodaq/post_treatment/daq_measurement/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -53,7 +53,7 @@ pymodaq/post_treatment/daq_measurement/daq_measurement_GUI.py,sha256=1u7hWDaiwsZ
|
|
|
53
53
|
pymodaq/post_treatment/daq_measurement/daq_measurement_GUI.ui,sha256=PyzbCWPMkh5oIYYteZczXyWMeHKW9EJmM1QlzXhnyTk,7037
|
|
54
54
|
pymodaq/post_treatment/daq_measurement/daq_measurement_main.py,sha256=CAKwcWMOD86aXB8mbdxOK7e8nZRos5d59FzDtqK1QoY,17093
|
|
55
55
|
pymodaq/post_treatment/daq_measurement/process_from_QtDesigner_DAQ_Measurement_GUI.bat,sha256=e1tu2A67MS9fk3jhriF6saQgRxWIucIvNW92iWXFP6E,164
|
|
56
|
-
pymodaq/resources/VERSION,sha256=
|
|
56
|
+
pymodaq/resources/VERSION,sha256=BRayochAiCccQkfIWF1yVCj5pLS-_7G_RzWifJ9lAHA,17
|
|
57
57
|
pymodaq/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
58
58
|
pymodaq/resources/config_scan_template.toml,sha256=zlbFO6pOdrLOFJTVMXfek8YpvOo29tqv0aTjGnB7_mA,570
|
|
59
59
|
pymodaq/resources/config_template.toml,sha256=b91pWeFlKKN-HPpYkwzBLEsEQnoKEM6GmNKQq7aCfts,3352
|
|
@@ -302,7 +302,7 @@ pymodaq/utils/chrono_timer.py,sha256=rwX8apS8B-IKhA0Cp2H9tLz0BRN7G3Pg5ptozvd3MKM
|
|
|
302
302
|
pymodaq/utils/config.py,sha256=iaZpoI-l5KkCKlE49k6IPcRXKB_Ai6bi72Vaf6a2Xbg,12480
|
|
303
303
|
pymodaq/utils/conftests.py,sha256=3Ak8WEpa3EhAp73Yb1LLq8YFONhPqiL7gG9eSDIoTNc,58
|
|
304
304
|
pymodaq/utils/daq_utils.py,sha256=LCz6wM1sAkKPf4CMnYB9HXGiizlVpN5Yj4KXo6D-oRg,30312
|
|
305
|
-
pymodaq/utils/data.py,sha256=
|
|
305
|
+
pymodaq/utils/data.py,sha256=0LTET3_W8MnU97P-n_a6rgn1SBmv5CF9UXEo19ieiz8,86167
|
|
306
306
|
pymodaq/utils/enums.py,sha256=WNU6w3tsNpP7HQnNrQ6eKnDTXulOlZufQRr11__-l0M,1822
|
|
307
307
|
pymodaq/utils/exceptions.py,sha256=wLO6VlofzfwWkOOWMN2B-3NEWMfpgygyeEdakIx_rAs,668
|
|
308
308
|
pymodaq/utils/factory.py,sha256=OLPxzbgpvIuxMN9e59O768NN25FXagcLuE8juvVbhT0,2311
|
|
@@ -337,11 +337,11 @@ pymodaq/utils/gui_utils/widgets/tree_toml.py,sha256=Csq1v1_sqdCEvrb4VCewfsMVMpu-
|
|
|
337
337
|
pymodaq/utils/h5modules/__init__.py,sha256=x3_4ELvG9onTKEFgIt9xEGg_mA1bB07dvVbU9q0xQKw,104
|
|
338
338
|
pymodaq/utils/h5modules/backends.py,sha256=6n_6HoeQ5gLE_e9pme-ByzN8oidtvQn04TGfk0FPxSc,33040
|
|
339
339
|
pymodaq/utils/h5modules/browsing.py,sha256=4MGKUDYbxC-C2bWcjBRuHB9pGWxqcAiqxtwH_3O_gKM,23299
|
|
340
|
-
pymodaq/utils/h5modules/data_saving.py,sha256=
|
|
340
|
+
pymodaq/utils/h5modules/data_saving.py,sha256=9KktJr7i4KB95lcUb5qKT4vxAiNm_mu2Q0JLVIuz4lY,34234
|
|
341
341
|
pymodaq/utils/h5modules/exporter.py,sha256=iCfUjkuGjs3-ijcUkt38NMrjO8tI9wXShvwYHJIUU70,3670
|
|
342
|
-
pymodaq/utils/h5modules/h5logging.py,sha256=
|
|
343
|
-
pymodaq/utils/h5modules/module_saving.py,sha256=
|
|
344
|
-
pymodaq/utils/h5modules/saving.py,sha256=
|
|
342
|
+
pymodaq/utils/h5modules/h5logging.py,sha256=UhRo9YvjU5Ujw_i5aPHXOgOdw_IszxmekOa7gYUY5AQ,2492
|
|
343
|
+
pymodaq/utils/h5modules/module_saving.py,sha256=r9rzHQhePS78yRZd4Sb-4vccLGNY8n_ulB0qJb6HogA,13761
|
|
344
|
+
pymodaq/utils/h5modules/saving.py,sha256=0Cw3cgc_kfI6nWTTLjUkeUpYcA00VIgx5QkJypUwkP8,34013
|
|
345
345
|
pymodaq/utils/h5modules/utils.py,sha256=0isF661xthXlT3hFJvXTcgGqkZcGQmSanTNAGSInez4,3368
|
|
346
346
|
pymodaq/utils/h5modules/exporters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
347
347
|
pymodaq/utils/h5modules/exporters/base.py,sha256=tUXUoTFPHJY9itHR2CajvwrJPVX5gmupMwO-GIQ43Cg,3961
|
|
@@ -350,7 +350,7 @@ pymodaq/utils/h5modules/exporters/hyperspy.py,sha256=rheeVJQO0BAF606D_0S_j8huzOL
|
|
|
350
350
|
pymodaq/utils/managers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
351
351
|
pymodaq/utils/managers/action_manager.py,sha256=l6p_JjEkFce1CskZzDO8be1BMslCYdS0d4U53yHBDHo,17227
|
|
352
352
|
pymodaq/utils/managers/batchscan_manager.py,sha256=jcL08YvFafX5kiy03BV1_6obt2Xogiby5pvTKeN8_ho,13905
|
|
353
|
-
pymodaq/utils/managers/modules_manager.py,sha256=
|
|
353
|
+
pymodaq/utils/managers/modules_manager.py,sha256=RKpt1RU0VrELBPmTAlLB_B5k-5KX7deHnequgOFfYKk,20821
|
|
354
354
|
pymodaq/utils/managers/overshoot_manager.py,sha256=fe_CR1Bkw85BER34MoVFlm-xtKl9Hr9bkf2nyaz9hXg,7158
|
|
355
355
|
pymodaq/utils/managers/parameter_manager.py,sha256=BX_wPzx02zYgqZHl0pnGHOq3fwE__l38d6xtDQILmAQ,11426
|
|
356
356
|
pymodaq/utils/managers/preset_manager.py,sha256=u_gssDcJ-z6oj9Wyrppt1H-FW9pbeihxA8jU00hEFpM,8205
|
|
@@ -364,7 +364,7 @@ pymodaq/utils/parameter/pymodaq_ptypes/__init__.py,sha256=NNPGOTfpft27W4mxHbqcgf
|
|
|
364
364
|
pymodaq/utils/parameter/pymodaq_ptypes/bool.py,sha256=yz9-zn6T5NN7SDEWwf8tfw8Ezbllbt1rOd8vHGqw3mI,761
|
|
365
365
|
pymodaq/utils/parameter/pymodaq_ptypes/date.py,sha256=ND9lnRxbEE_KNb1utypYM9rvC7d01GJYNa13XJpadqE,3829
|
|
366
366
|
pymodaq/utils/parameter/pymodaq_ptypes/filedir.py,sha256=IVFbN08Dr3NdRNMufBef1vWr2igiULq3CBu10-yLqSQ,4571
|
|
367
|
-
pymodaq/utils/parameter/pymodaq_ptypes/itemselect.py,sha256=
|
|
367
|
+
pymodaq/utils/parameter/pymodaq_ptypes/itemselect.py,sha256=Zxg1HpRDz8R0aQ38ZGD5vfXwIyR6bPddDTHul_zxGes,11002
|
|
368
368
|
pymodaq/utils/parameter/pymodaq_ptypes/led.py,sha256=oDKM3k4aPy_CArgQIEqjLx3KG5CR7NrywacJKalmKQ4,1129
|
|
369
369
|
pymodaq/utils/parameter/pymodaq_ptypes/list.py,sha256=bXGlUxKVAA5_QY41BmzG2iWWoh1Qq-w-QHgMo-_Kc7Q,5716
|
|
370
370
|
pymodaq/utils/parameter/pymodaq_ptypes/numeric.py,sha256=lzklQWAnzh9LDC20hdlGbAj2NdVzNDDeJke1KnPi7GU,560
|
|
@@ -386,7 +386,7 @@ pymodaq/utils/plotting/data_viewers/viewer1D.py,sha256=aR-Qbxd1Pr28DF4kLJDp4Ph65
|
|
|
386
386
|
pymodaq/utils/plotting/data_viewers/viewer1Dbasic.py,sha256=sfqiuNPlsF2SbhFwVfLD7CzfKD7UUlHITb5LC1clXE0,7407
|
|
387
387
|
pymodaq/utils/plotting/data_viewers/viewer2D.py,sha256=EnJ8NYay9iRm6dsyMtKjGX4Uqx8lSXO1Ci0j1_kpVH8,42182
|
|
388
388
|
pymodaq/utils/plotting/data_viewers/viewer2D_basic.py,sha256=aRLu8JVZZI8PH6Lxl8oITpHwUXaUY3PyLW6eHzkf76o,5588
|
|
389
|
-
pymodaq/utils/plotting/data_viewers/viewerND.py,sha256=
|
|
389
|
+
pymodaq/utils/plotting/data_viewers/viewerND.py,sha256=BV2cs_HWH9vaZ0JwoDo-oFNCBcAHwLiJAH9fwZTWOM8,38315
|
|
390
390
|
pymodaq/utils/plotting/items/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
391
391
|
pymodaq/utils/plotting/items/axis_scaled.py,sha256=YVSqpSs2gfALdsH_wKheYf6fgEQ0-7tU2zWu709K3h4,3245
|
|
392
392
|
pymodaq/utils/plotting/items/crosshair.py,sha256=hVoM3dDtRmYGfzZJsP-Yhyfnx2HV1g-nLSQMVl6ZZXY,3064
|
|
@@ -402,7 +402,7 @@ pymodaq/utils/scanner/scan_config.py,sha256=mRAhHrkulXcO05IjmalTxmUPTGkH_sHJQu6I
|
|
|
402
402
|
pymodaq/utils/scanner/scan_factory.py,sha256=tKYY72xdVpc73fOhAUzkQ8NbKAtnNnW2C3C_qrCm_HE,9601
|
|
403
403
|
pymodaq/utils/scanner/scanner.py,sha256=OrKQa0g_R5ZHW7xVpLAEw5v1Ju9RUayfvDK5xmRPOPU,10843
|
|
404
404
|
pymodaq/utils/scanner/utils.py,sha256=xHQaNvWwVLnQvI8fVbLqMO1M6XOJjV5n39e3f7v8Zjc,3682
|
|
405
|
-
pymodaq/utils/scanner/scanners/_1d_scanners.py,sha256=
|
|
405
|
+
pymodaq/utils/scanner/scanners/_1d_scanners.py,sha256=rmqyHCSCD5HynuFELlzRd7URliz1rlwBcGA6TK1uceM,7694
|
|
406
406
|
pymodaq/utils/scanner/scanners/_2d_scanners.py,sha256=-25DBTJrO7k7REREGH5i-6DbVRApc76B3OaJeeJTC1M,15560
|
|
407
407
|
pymodaq/utils/scanner/scanners/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
408
408
|
pymodaq/utils/scanner/scanners/sequential.py,sha256=7hcjg0csoA-umV3hd51AdiD0THZjcI2JorMzhS81BO4,7842
|
|
@@ -415,8 +415,8 @@ pymodaq/utils/tcp_ip/__init__.py,sha256=1e_EK0AgvdoLAD_CSGGEaITZdy6OWCO7ih9IAIp7
|
|
|
415
415
|
pymodaq/utils/tcp_ip/mysocket.py,sha256=StAWj8dzHeMnbLj68Sel81uWFy-YkKVNRnVf7gXrESI,3452
|
|
416
416
|
pymodaq/utils/tcp_ip/serializer.py,sha256=zpgf1KTYvRnD7wY3YT1ym3vHw6T1Zd3Y3eBAr91B_SY,23205
|
|
417
417
|
pymodaq/utils/tcp_ip/tcp_server_client.py,sha256=eZswNZWKyL6ZFHuSROf3nL53fVgDNXMV1PMK5lJNS1o,30490
|
|
418
|
-
pymodaq-4.1.
|
|
419
|
-
pymodaq-4.1.
|
|
420
|
-
pymodaq-4.1.
|
|
421
|
-
pymodaq-4.1.
|
|
422
|
-
pymodaq-4.1.
|
|
418
|
+
pymodaq-4.1.1.dist-info/METADATA,sha256=prS3VxXw5Dg2M1nbiLTUVRqr66qf5g_H695iBNbTBbg,7575
|
|
419
|
+
pymodaq-4.1.1.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
|
|
420
|
+
pymodaq-4.1.1.dist-info/entry_points.txt,sha256=RAzdYNjvUT28I2eiCKki_g2NzXq0woWxhev6lwzwRv8,348
|
|
421
|
+
pymodaq-4.1.1.dist-info/licenses/LICENSE,sha256=VKOejxexXAe3XwfhAhcFGqeXQ12irxVHdeAojZwFEI8,1108
|
|
422
|
+
pymodaq-4.1.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|