pymodaq 3.6.12__py3-none-any.whl → 4.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pymodaq might be problematic. Click here for more details.
- pymodaq/__init__.py +13 -6
- pymodaq/control_modules/__init__.py +0 -7
- pymodaq/control_modules/daq_move.py +965 -2
- pymodaq/control_modules/daq_move_ui.py +319 -0
- pymodaq/control_modules/daq_viewer.py +1573 -3
- pymodaq/control_modules/daq_viewer_ui.py +393 -0
- pymodaq/control_modules/mocks.py +51 -0
- pymodaq/control_modules/move_utility_classes.py +709 -8
- pymodaq/control_modules/utils.py +256 -0
- pymodaq/control_modules/viewer_utility_classes.py +663 -6
- pymodaq/daq_utils.py +89 -0
- pymodaq/dashboard.py +91 -72
- pymodaq/examples/custom_app.py +12 -11
- pymodaq/examples/custom_viewer.py +10 -10
- pymodaq/examples/function_plotter.py +16 -13
- pymodaq/examples/nonlinearscanner.py +8 -6
- pymodaq/examples/parameter_ex.py +7 -7
- pymodaq/examples/preset_MockCamera.xml +1 -0
- pymodaq/extensions/__init__.py +16 -0
- pymodaq/extensions/console.py +76 -0
- pymodaq/{daq_logger.py → extensions/daq_logger.py} +115 -65
- pymodaq/extensions/daq_scan.py +1339 -0
- pymodaq/extensions/daq_scan_ui.py +240 -0
- pymodaq/extensions/h5browser.py +23 -0
- pymodaq/{pid → extensions/pid}/__init__.py +4 -2
- pymodaq/{pid → extensions/pid}/daq_move_PID.py +2 -2
- pymodaq/{pid → extensions/pid}/pid_controller.py +48 -36
- pymodaq/{pid → extensions/pid}/utils.py +52 -6
- pymodaq/extensions/utils.py +40 -0
- pymodaq/post_treatment/__init__.py +6 -0
- pymodaq/{daq_analysis → post_treatment/daq_analysis}/daq_analysis_main.py +17 -17
- pymodaq/{daq_measurement → post_treatment/daq_measurement}/daq_measurement_main.py +8 -14
- pymodaq/post_treatment/load_and_plot.py +219 -0
- pymodaq/post_treatment/process_to_scalar.py +263 -0
- pymodaq/resources/QtDesigner_Ressources/Icon_Library/run_all.png +0 -0
- pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop_all.png +0 -0
- pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.bat +1 -1
- pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.qrc +1 -0
- pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources_rc.py +109784 -109173
- pymodaq/resources/QtDesigner_Ressources/icons.svg +142 -0
- pymodaq/resources/VERSION +1 -1
- pymodaq/resources/config_template.toml +32 -13
- pymodaq/resources/preset_default.xml +1 -1
- pymodaq/{daq_utils → utils}/Tuto innosetup/script_full_setup.iss +1 -1
- pymodaq/utils/__init__.py +0 -29
- pymodaq/utils/abstract/__init__.py +48 -0
- pymodaq/{daq_utils → utils}/abstract/logger.py +7 -3
- pymodaq/utils/array_manipulation.py +379 -8
- pymodaq/{daq_utils → utils}/calibration_camera.py +6 -6
- pymodaq/{daq_utils → utils}/chrono_timer.py +1 -1
- pymodaq/utils/config.py +448 -0
- pymodaq/utils/conftests.py +5 -0
- pymodaq/utils/daq_utils.py +828 -8
- pymodaq/utils/data.py +1873 -7
- pymodaq/{daq_utils → utils}/db/db_logger/db_logger.py +86 -47
- pymodaq/{daq_utils → utils}/db/db_logger/db_logger_models.py +31 -10
- pymodaq/{daq_utils → utils}/enums.py +12 -7
- pymodaq/utils/exceptions.py +37 -0
- pymodaq/utils/factory.py +82 -0
- pymodaq/{daq_utils → utils}/gui_utils/__init__.py +1 -1
- pymodaq/utils/gui_utils/custom_app.py +129 -0
- pymodaq/utils/gui_utils/file_io.py +66 -0
- pymodaq/{daq_utils → utils}/gui_utils/layout.py +2 -2
- pymodaq/{daq_utils → utils}/gui_utils/utils.py +13 -3
- pymodaq/{daq_utils → utils}/gui_utils/widgets/__init__.py +2 -2
- pymodaq/utils/gui_utils/widgets/label.py +24 -0
- pymodaq/{daq_utils → utils}/gui_utils/widgets/lcd.py +12 -7
- pymodaq/{daq_utils → utils}/gui_utils/widgets/push.py +66 -2
- pymodaq/{daq_utils → utils}/gui_utils/widgets/qled.py +6 -4
- pymodaq/utils/gui_utils/widgets/spinbox.py +24 -0
- pymodaq/{daq_utils → utils}/gui_utils/widgets/table.py +2 -2
- pymodaq/utils/h5modules/__init__.py +1 -0
- pymodaq/{daq_utils/h5backend.py → utils/h5modules/backends.py} +200 -112
- pymodaq/utils/h5modules/browsing.py +683 -0
- pymodaq/utils/h5modules/data_saving.py +839 -0
- pymodaq/utils/h5modules/h5logging.py +110 -0
- pymodaq/utils/h5modules/module_saving.py +350 -0
- pymodaq/utils/h5modules/saving.py +914 -0
- pymodaq/utils/h5modules/utils.py +85 -0
- pymodaq/utils/logger.py +64 -6
- pymodaq/utils/managers/action_manager.py +460 -0
- pymodaq/{daq_utils → utils}/managers/batchscan_manager.py +144 -112
- pymodaq/{daq_utils → utils}/managers/modules_manager.py +188 -114
- pymodaq/{daq_utils → utils}/managers/overshoot_manager.py +3 -3
- pymodaq/utils/managers/parameter_manager.py +110 -0
- pymodaq/{daq_utils → utils}/managers/preset_manager.py +17 -13
- pymodaq/{daq_utils → utils}/managers/preset_manager_utils.py +8 -7
- pymodaq/{daq_utils → utils}/managers/remote_manager.py +7 -6
- pymodaq/{daq_utils → utils}/managers/roi_manager.py +148 -57
- pymodaq/utils/math_utils.py +546 -10
- pymodaq/{daq_utils → utils}/messenger.py +5 -1
- pymodaq/utils/parameter/__init__.py +2 -15
- pymodaq/{daq_utils → utils}/parameter/ioxml.py +12 -6
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/__init__.py +1 -3
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/filedir.py +1 -1
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/itemselect.py +3 -0
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/led.py +1 -1
- pymodaq/utils/parameter/pymodaq_ptypes/pixmap.py +161 -0
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/slide.py +1 -1
- pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/table.py +1 -1
- pymodaq/utils/parameter/utils.py +206 -11
- pymodaq/utils/plotting/data_viewers/__init__.py +6 -0
- pymodaq/utils/plotting/data_viewers/viewer.py +393 -0
- pymodaq/utils/plotting/data_viewers/viewer0D.py +251 -0
- pymodaq/utils/plotting/data_viewers/viewer1D.py +574 -0
- pymodaq/{daq_utils → utils}/plotting/data_viewers/viewer1Dbasic.py +8 -3
- pymodaq/{daq_utils → utils}/plotting/data_viewers/viewer2D.py +292 -357
- pymodaq/{daq_utils → utils}/plotting/data_viewers/viewer2D_basic.py +58 -75
- pymodaq/utils/plotting/data_viewers/viewerND.py +738 -0
- pymodaq/{daq_utils → utils}/plotting/gant_chart.py +2 -2
- pymodaq/{daq_utils → utils}/plotting/items/axis_scaled.py +4 -2
- pymodaq/{daq_utils → utils}/plotting/items/image.py +8 -6
- pymodaq/utils/plotting/navigator.py +355 -0
- pymodaq/utils/plotting/scan_selector.py +480 -0
- pymodaq/utils/plotting/utils/axes_viewer.py +88 -0
- pymodaq/utils/plotting/utils/filter.py +538 -0
- pymodaq/utils/plotting/utils/lineout.py +224 -0
- pymodaq/{daq_utils → utils}/plotting/utils/plot_utils.py +196 -84
- pymodaq/{daq_utils → utils}/plotting/utils/signalND.py +21 -13
- pymodaq/utils/plotting/widgets.py +76 -0
- pymodaq/utils/scanner/__init__.py +10 -0
- pymodaq/utils/scanner/scan_factory.py +204 -0
- pymodaq/utils/scanner/scanner.py +271 -0
- pymodaq/utils/scanner/scanners/_1d_scanners.py +117 -0
- pymodaq/utils/scanner/scanners/_2d_scanners.py +293 -0
- pymodaq/utils/scanner/scanners/sequential.py +192 -0
- pymodaq/utils/scanner/scanners/tabular.py +294 -0
- pymodaq/utils/scanner/utils.py +83 -0
- pymodaq/utils/slicing.py +47 -0
- pymodaq/utils/svg/__init__.py +6 -0
- pymodaq/utils/svg/svg_renderer.py +20 -0
- pymodaq/utils/svg/svg_view.py +35 -0
- pymodaq/utils/svg/svg_viewer2D.py +51 -0
- pymodaq/{daq_utils → utils}/tcp_server_client.py +36 -37
- pymodaq/{daq_utils → utils}/tree_layout/tree_layout_main.py +50 -35
- pymodaq/utils/units.py +216 -0
- pymodaq-4.0.1.dist-info/METADATA +159 -0
- {pymodaq-3.6.12.dist-info → pymodaq-4.0.1.dist-info}/RECORD +167 -170
- {pymodaq-3.6.12.dist-info → pymodaq-4.0.1.dist-info}/WHEEL +1 -2
- pymodaq-4.0.1.dist-info/entry_points.txt +8 -0
- pymodaq/daq_move/daq_move_gui.py +0 -279
- pymodaq/daq_move/daq_move_gui.ui +0 -534
- pymodaq/daq_move/daq_move_main.py +0 -1042
- pymodaq/daq_move/process_from_QtDesigner_DAQ_Move_GUI.bat +0 -2
- pymodaq/daq_move/utility_classes.py +0 -671
- pymodaq/daq_scan.py +0 -2160
- pymodaq/daq_utils/array_manipulation.py +0 -386
- pymodaq/daq_utils/config.py +0 -273
- pymodaq/daq_utils/conftests.py +0 -7
- pymodaq/daq_utils/custom_parameter_tree.py +0 -9
- pymodaq/daq_utils/daq_enums.py +0 -133
- pymodaq/daq_utils/daq_utils.py +0 -1402
- pymodaq/daq_utils/exceptions.py +0 -71
- pymodaq/daq_utils/gui_utils/custom_app.py +0 -103
- pymodaq/daq_utils/gui_utils/file_io.py +0 -75
- pymodaq/daq_utils/gui_utils/widgets/spinbox.py +0 -9
- pymodaq/daq_utils/h5exporter_hyperspy.py +0 -115
- pymodaq/daq_utils/h5exporters.py +0 -242
- pymodaq/daq_utils/h5modules.py +0 -1559
- pymodaq/daq_utils/h5utils.py +0 -241
- pymodaq/daq_utils/managers/action_manager.py +0 -236
- pymodaq/daq_utils/managers/parameter_manager.py +0 -57
- pymodaq/daq_utils/math_utils.py +0 -705
- pymodaq/daq_utils/parameter/__init__.py +0 -1
- pymodaq/daq_utils/parameter/oldpymodaq_ptypes.py +0 -1626
- pymodaq/daq_utils/parameter/pymodaq_ptypes/pixmap.py +0 -85
- pymodaq/daq_utils/parameter/utils.py +0 -136
- pymodaq/daq_utils/plotting/data_viewers/__init__.py +0 -0
- pymodaq/daq_utils/plotting/data_viewers/process_from_QtDesigner_0DViewer_GUI.bat +0 -2
- pymodaq/daq_utils/plotting/data_viewers/viewer0D.py +0 -204
- pymodaq/daq_utils/plotting/data_viewers/viewer0D_GUI.py +0 -89
- pymodaq/daq_utils/plotting/data_viewers/viewer0D_GUI.ui +0 -131
- pymodaq/daq_utils/plotting/data_viewers/viewer1D.py +0 -781
- pymodaq/daq_utils/plotting/data_viewers/viewerND.py +0 -894
- pymodaq/daq_utils/plotting/data_viewers/viewerbase.py +0 -64
- pymodaq/daq_utils/plotting/items/__init__.py +0 -0
- pymodaq/daq_utils/plotting/navigator.py +0 -500
- pymodaq/daq_utils/plotting/scan_selector.py +0 -289
- pymodaq/daq_utils/plotting/utils/__init__.py +0 -0
- pymodaq/daq_utils/plotting/utils/filter.py +0 -236
- pymodaq/daq_utils/plotting/viewer0D/__init__.py +0 -0
- pymodaq/daq_utils/plotting/viewer0D/viewer0D_main.py +0 -4
- pymodaq/daq_utils/plotting/viewer1D/__init__.py +0 -0
- pymodaq/daq_utils/plotting/viewer1D/viewer1D_main.py +0 -4
- pymodaq/daq_utils/plotting/viewer1D/viewer1Dbasic.py +0 -4
- pymodaq/daq_utils/plotting/viewer2D/viewer_2D_basic.py +0 -4
- pymodaq/daq_utils/plotting/viewer2D/viewer_2D_main.py +0 -4
- pymodaq/daq_utils/plotting/viewerND/__init__.py +0 -0
- pymodaq/daq_utils/plotting/viewerND/viewerND_main.py +0 -4
- pymodaq/daq_utils/scanner.py +0 -1289
- pymodaq/daq_utils/tree_layout/__init__.py +0 -0
- pymodaq/daq_viewer/__init__.py +0 -0
- pymodaq/daq_viewer/daq_gui_settings.py +0 -237
- pymodaq/daq_viewer/daq_gui_settings.ui +0 -441
- pymodaq/daq_viewer/daq_viewer_main.py +0 -2225
- pymodaq/daq_viewer/process_from_QtDesigner_DAQ_GUI_settings.bat +0 -2
- pymodaq/daq_viewer/utility_classes.py +0 -673
- pymodaq/examples/logger_image/__init__.py +0 -0
- pymodaq/examples/logger_image/logger_displayer.py +0 -121
- pymodaq/examples/logger_image/setup.svg +0 -3119
- pymodaq/examples/logger_image/setup_svg.py +0 -114
- pymodaq/h5browser.py +0 -39
- pymodaq/utils/scanner.py +0 -15
- pymodaq-3.6.12.dist-info/METADATA +0 -39
- pymodaq-3.6.12.dist-info/entry_points.txt +0 -8
- pymodaq-3.6.12.dist-info/top_level.txt +0 -1
- /pymodaq/{daq_analysis → post_treatment/daq_analysis}/__init__.py +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/__init__.py +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/daq_measurement_GUI.py +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/daq_measurement_GUI.ui +0 -0
- /pymodaq/{daq_measurement → post_treatment/daq_measurement}/process_from_QtDesigner_DAQ_Measurement_GUI.bat +0 -0
- /pymodaq/{daq_utils → utils}/Tuto innosetup/Tuto innosetup.odt +0 -0
- /pymodaq/{daq_utils → utils}/Tuto innosetup/Tuto innosetup.pdf +0 -0
- /pymodaq/{daq_move → utils/db}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils/db/db_logger}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/gui_utils/dock.py +0 -0
- /pymodaq/{daq_utils → utils}/gui_utils/list_picker.py +0 -0
- /pymodaq/{daq_utils/abstract → utils/managers}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/bool.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/date.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/list.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/numeric.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/tableview.py +0 -0
- /pymodaq/{daq_utils → utils}/parameter/pymodaq_ptypes/text.py +0 -0
- /pymodaq/{daq_utils/db → utils/plotting}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/plotting/image_viewer.py +0 -0
- /pymodaq/{daq_utils/db/db_logger → utils/plotting/items}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/plotting/items/crosshair.py +0 -0
- /pymodaq/{daq_utils/managers → utils/plotting/utils}/__init__.py +0 -0
- /pymodaq/{daq_utils → utils}/qvariant.py +0 -0
- /pymodaq/{daq_utils/plotting/viewer2D → utils/scanner/scanners}/__init__.py +0 -0
- /pymodaq/{daq_utils/plotting → utils/tree_layout}/__init__.py +0 -0
- {pymodaq-3.6.12.dist-info → pymodaq-4.0.1.dist-info/licenses}/LICENSE +0 -0
pymodaq/utils/data.py
CHANGED
|
@@ -1,15 +1,1881 @@
|
|
|
1
1
|
# -*- coding: utf-8 -*-
|
|
2
2
|
"""
|
|
3
|
-
Created the
|
|
3
|
+
Created the 28/10/2022
|
|
4
4
|
|
|
5
5
|
@author: Sebastien Weber
|
|
6
6
|
"""
|
|
7
|
+
from __future__ import annotations
|
|
7
8
|
|
|
8
|
-
from
|
|
9
|
+
from abc import ABCMeta, abstractmethod, abstractproperty
|
|
10
|
+
import numbers
|
|
11
|
+
import numpy as np
|
|
12
|
+
from typing import List, Tuple, Union
|
|
13
|
+
from typing import Iterable as IterableType
|
|
14
|
+
from collections.abc import Iterable
|
|
15
|
+
import logging
|
|
9
16
|
|
|
17
|
+
import warnings
|
|
18
|
+
from time import time
|
|
19
|
+
import copy
|
|
20
|
+
|
|
21
|
+
from multipledispatch import dispatch
|
|
22
|
+
from pymodaq.utils.enums import BaseEnum, enum_checker
|
|
23
|
+
from pymodaq.utils.messenger import deprecation_msg
|
|
24
|
+
from pymodaq.utils.daq_utils import find_objects_in_list_from_attr_name_val
|
|
25
|
+
from pymodaq.utils.logger import set_logger, get_module_name
|
|
26
|
+
from pymodaq.utils.slicing import SpecialSlicersData
|
|
27
|
+
from pymodaq.utils import math_utils as mutils
|
|
28
|
+
|
|
29
|
+
logger = set_logger(get_module_name(__file__))
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class DataIndexWarning(Warning):
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class DataTypeWarning(Warning):
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class DataDimWarning(Warning):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class DataSizeWarning(Warning):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
WARNINGS = [DataIndexWarning, DataTypeWarning, DataDimWarning, DataSizeWarning]
|
|
49
|
+
|
|
50
|
+
if logging.getLevelName(logger.level) == 'DEBUG':
|
|
51
|
+
for warning in WARNINGS:
|
|
52
|
+
warnings.filterwarnings('default', category=warning)
|
|
53
|
+
else:
|
|
54
|
+
for warning in WARNINGS:
|
|
55
|
+
warnings.filterwarnings('ignore', category=warning)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class DataShapeError(Exception):
|
|
59
|
+
pass
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class DataLengthError(Exception):
|
|
63
|
+
pass
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class DataDim(BaseEnum):
|
|
67
|
+
"""Enum for dimensionality representation of data"""
|
|
68
|
+
Data0D = 0
|
|
69
|
+
Data1D = 1
|
|
70
|
+
Data2D = 2
|
|
71
|
+
DataND = 3
|
|
72
|
+
|
|
73
|
+
def __le__(self, other_dim: 'DataDim'):
|
|
74
|
+
return self.value.__le__(other_dim.value)
|
|
75
|
+
|
|
76
|
+
def __lt__(self, other_dim: 'DataDim'):
|
|
77
|
+
return self.value.__lt__(other_dim.value)
|
|
78
|
+
|
|
79
|
+
def __ge__(self, other_dim: 'DataDim'):
|
|
80
|
+
other_dim = enum_checker(DataDim, other_dim)
|
|
81
|
+
return self.value.__ge__(other_dim.value)
|
|
82
|
+
|
|
83
|
+
def __gt__(self, other_dim: 'DataDim'):
|
|
84
|
+
return self.value.__gt__(other_dim.value)
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def dim_index(self):
|
|
88
|
+
return self.value
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class DataSource(BaseEnum):
|
|
92
|
+
"""Enum for source of data"""
|
|
93
|
+
raw = 0
|
|
94
|
+
calculated = 1
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class DataDistribution(BaseEnum):
|
|
98
|
+
"""Enum for distribution of data"""
|
|
99
|
+
uniform = 0
|
|
100
|
+
spread = 1
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class Axis:
|
|
104
|
+
"""Object holding info and data about physical axis of some data
|
|
105
|
+
|
|
106
|
+
In case the axis's data is linear, store the info as a scale and offset else store the data
|
|
107
|
+
|
|
108
|
+
Parameters
|
|
109
|
+
----------
|
|
110
|
+
label: str
|
|
111
|
+
The label of the axis, for instance 'time' for a temporal axis
|
|
112
|
+
units: str
|
|
113
|
+
The units of the data in the object, for instance 's' for seconds
|
|
114
|
+
data: ndarray
|
|
115
|
+
A 1D ndarray holding the data of the axis
|
|
116
|
+
index: int
|
|
117
|
+
an integer representing the index of the Data object this axis is related to
|
|
118
|
+
scaling: float
|
|
119
|
+
The scaling to apply to a linspace version in order to obtain the proper scaling
|
|
120
|
+
offset: float
|
|
121
|
+
The offset to apply to a linspace/scaled version in order to obtain the proper axis
|
|
122
|
+
spread_order: int
|
|
123
|
+
An integer needed in the case where data has a spread DataDistribution. It refers to the index along the data's
|
|
124
|
+
spread_index dimension
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
def __init__(self, label: str = '', units: str = '', data: np.ndarray = None, index: int = 0, scaling=None,
|
|
128
|
+
offset=None, spread_order: int = None):
|
|
129
|
+
super().__init__()
|
|
130
|
+
|
|
131
|
+
self.iaxis: Axis = SpecialSlicersData(self, False)
|
|
132
|
+
|
|
133
|
+
self._size = None
|
|
134
|
+
self._data = None
|
|
135
|
+
self._index = None
|
|
136
|
+
self._label = None
|
|
137
|
+
self._units = None
|
|
138
|
+
self._scaling = scaling
|
|
139
|
+
self._offset = offset
|
|
140
|
+
|
|
141
|
+
self.units = units
|
|
142
|
+
self.label = label
|
|
143
|
+
self.data = data
|
|
144
|
+
self.index = index
|
|
145
|
+
self.spread_order = spread_order
|
|
146
|
+
|
|
147
|
+
self.get_scale_offset_from_data(data)
|
|
148
|
+
|
|
149
|
+
def copy(self):
|
|
150
|
+
return copy.copy(self)
|
|
151
|
+
|
|
152
|
+
@property
|
|
153
|
+
def label(self) -> str:
|
|
154
|
+
"""str: get/set the label of this axis"""
|
|
155
|
+
return self._label
|
|
156
|
+
|
|
157
|
+
@label.setter
|
|
158
|
+
def label(self, lab: str):
|
|
159
|
+
if not isinstance(lab, str):
|
|
160
|
+
raise TypeError('label for the Axis class should be a string')
|
|
161
|
+
self._label = lab
|
|
162
|
+
|
|
163
|
+
@property
|
|
164
|
+
def units(self) -> str:
|
|
165
|
+
"""str: get/set the units for this axis"""
|
|
166
|
+
return self._units
|
|
167
|
+
|
|
168
|
+
@units.setter
|
|
169
|
+
def units(self, units: str):
|
|
170
|
+
if not isinstance(units, str):
|
|
171
|
+
raise TypeError('units for the Axis class should be a string')
|
|
172
|
+
self._units = units
|
|
173
|
+
|
|
174
|
+
@property
|
|
175
|
+
def index(self) -> int:
|
|
176
|
+
"""int: get/set the index this axis corresponds to in a DataWithAxis object"""
|
|
177
|
+
return self._index
|
|
178
|
+
|
|
179
|
+
@index.setter
|
|
180
|
+
def index(self, ind: int):
|
|
181
|
+
self._check_index_valid(ind)
|
|
182
|
+
self._index = ind
|
|
183
|
+
|
|
184
|
+
@property
|
|
185
|
+
def data(self):
|
|
186
|
+
"""np.ndarray: get/set the data of Axis"""
|
|
187
|
+
return self._data
|
|
188
|
+
|
|
189
|
+
@data.setter
|
|
190
|
+
def data(self, data: np.ndarray):
|
|
191
|
+
if data is not None:
|
|
192
|
+
self._check_data_valid(data)
|
|
193
|
+
self.get_scale_offset_from_data(data)
|
|
194
|
+
self._size = data.size
|
|
195
|
+
else:
|
|
196
|
+
self._size = 0
|
|
197
|
+
self._data = data
|
|
198
|
+
|
|
199
|
+
def get_data(self) -> np.ndarray:
|
|
200
|
+
"""Convenience method to obtain the axis data (usually None because scaling and offset are used)"""
|
|
201
|
+
return self._data if self._data is not None else self._linear_data(self.size)
|
|
202
|
+
|
|
203
|
+
def get_scale_offset_from_data(self, data: np.ndarray = None):
|
|
204
|
+
"""Get the scaling and offset from the axis's data
|
|
205
|
+
|
|
206
|
+
If data is not None, extract the scaling and offset
|
|
207
|
+
|
|
208
|
+
Parameters
|
|
209
|
+
----------
|
|
210
|
+
data: ndarray
|
|
211
|
+
"""
|
|
212
|
+
if data is None and self._data is not None:
|
|
213
|
+
data = self._data
|
|
214
|
+
|
|
215
|
+
if self.is_axis_linear(data):
|
|
216
|
+
self._scaling = np.mean(np.diff(data))
|
|
217
|
+
self._offset = data[0]
|
|
218
|
+
self._data = None
|
|
219
|
+
|
|
220
|
+
def is_axis_linear(self, data=None):
|
|
221
|
+
if data is None:
|
|
222
|
+
data = self._data
|
|
223
|
+
if data is not None:
|
|
224
|
+
return np.allclose(np.diff(data), np.mean(np.diff(data)))
|
|
225
|
+
else:
|
|
226
|
+
return False
|
|
227
|
+
|
|
228
|
+
@property
|
|
229
|
+
def scaling(self):
|
|
230
|
+
return self._scaling
|
|
231
|
+
|
|
232
|
+
@scaling.setter
|
|
233
|
+
def scaling(self, _scaling: float):
|
|
234
|
+
self._scaling = _scaling
|
|
235
|
+
|
|
236
|
+
@property
|
|
237
|
+
def offset(self):
|
|
238
|
+
return self._offset
|
|
239
|
+
|
|
240
|
+
@offset.setter
|
|
241
|
+
def offset(self, _offset: float):
|
|
242
|
+
self._offset = _offset
|
|
243
|
+
|
|
244
|
+
@property
|
|
245
|
+
def size(self) -> int:
|
|
246
|
+
"""int: get/set the size/length of the 1D ndarray"""
|
|
247
|
+
return self._size
|
|
248
|
+
|
|
249
|
+
@size.setter
|
|
250
|
+
def size(self, _size: int):
|
|
251
|
+
if self._data is None:
|
|
252
|
+
self._size = _size
|
|
253
|
+
|
|
254
|
+
@staticmethod
|
|
255
|
+
def _check_index_valid(index: int):
|
|
256
|
+
if not isinstance(index, int):
|
|
257
|
+
raise TypeError('index for the Axis class should be a positive integer')
|
|
258
|
+
elif index < 0:
|
|
259
|
+
raise ValueError('index for the Axis class should be a positive integer')
|
|
260
|
+
|
|
261
|
+
@staticmethod
|
|
262
|
+
def _check_data_valid(data):
|
|
263
|
+
if not isinstance(data, np.ndarray):
|
|
264
|
+
raise TypeError(f'data for the Axis class should be a 1D numpy array')
|
|
265
|
+
elif len(data.shape) != 1:
|
|
266
|
+
raise ValueError(f'data for the Axis class should be a 1D numpy array')
|
|
267
|
+
|
|
268
|
+
def _linear_data(self, nsteps: int):
|
|
269
|
+
"""create axis data with a linear version using scaling and offset"""
|
|
270
|
+
return self._offset + self._scaling * np.linspace(0, nsteps-1, nsteps)
|
|
271
|
+
|
|
272
|
+
def create_linear_data(self, nsteps:int):
|
|
273
|
+
"""replace the axis data with a linear version using scaling and offset"""
|
|
274
|
+
self.data = self._linear_data(nsteps)
|
|
275
|
+
|
|
276
|
+
@staticmethod
|
|
277
|
+
def create_simple_linear_data(nsteps: int):
|
|
278
|
+
return np.linspace(0, nsteps-1, nsteps)
|
|
279
|
+
|
|
280
|
+
def __len__(self):
|
|
281
|
+
return self.size
|
|
282
|
+
|
|
283
|
+
def _slicer(self, _slice, *ignored, **ignored_also):
|
|
284
|
+
ax = copy.deepcopy(self)
|
|
285
|
+
if isinstance(_slice, int):
|
|
286
|
+
return None
|
|
287
|
+
elif _slice is Ellipsis:
|
|
288
|
+
return ax
|
|
289
|
+
elif isinstance(_slice, slice):
|
|
290
|
+
if ax._data is not None:
|
|
291
|
+
ax.data = ax._data.__getitem__(_slice)
|
|
292
|
+
return ax
|
|
293
|
+
else:
|
|
294
|
+
start = _slice.start if _slice.start is not None else 0
|
|
295
|
+
stop = _slice.stop if _slice.stop is not None else self.size
|
|
296
|
+
|
|
297
|
+
ax._offset = ax.offset + start * ax.scaling
|
|
298
|
+
ax._size = stop - start
|
|
299
|
+
return ax
|
|
300
|
+
|
|
301
|
+
def __getitem__(self, item):
|
|
302
|
+
if hasattr(self, item):
|
|
303
|
+
# for when axis was a dict
|
|
304
|
+
deprecation_msg('attributes from an Axis object should not be fetched using __getitem__')
|
|
305
|
+
return getattr(self, item)
|
|
306
|
+
|
|
307
|
+
def __repr__(self):
|
|
308
|
+
return f'{self.__class__.__name__}: <label: {self.label}> - <units: {self.units}> - <index: {self.index}>'
|
|
309
|
+
|
|
310
|
+
def __mul__(self, scale: numbers.Real):
|
|
311
|
+
if isinstance(scale, numbers.Real):
|
|
312
|
+
ax = copy.deepcopy(self)
|
|
313
|
+
if self.data is not None:
|
|
314
|
+
ax.data *= scale
|
|
315
|
+
else:
|
|
316
|
+
ax._offset *= scale
|
|
317
|
+
ax._scaling *= scale
|
|
318
|
+
return ax
|
|
319
|
+
|
|
320
|
+
def __add__(self, offset: numbers.Real):
|
|
321
|
+
if isinstance(offset, numbers.Real):
|
|
322
|
+
ax = copy.deepcopy(self)
|
|
323
|
+
if self.data is not None:
|
|
324
|
+
ax.data += offset
|
|
325
|
+
else:
|
|
326
|
+
ax._offset += offset
|
|
327
|
+
return ax
|
|
328
|
+
|
|
329
|
+
def __eq__(self, other):
|
|
330
|
+
eq = self.label == other.label
|
|
331
|
+
eq = eq and (self.units == other.units)
|
|
332
|
+
eq = eq and (self.index == other.index)
|
|
333
|
+
if self.data is not None and other.data is not None:
|
|
334
|
+
eq = eq and (np.allclose(self.data, other.data))
|
|
335
|
+
else:
|
|
336
|
+
eq = eq and self.offset == other.offset
|
|
337
|
+
eq = eq and self.scaling == other.scaling
|
|
338
|
+
|
|
339
|
+
return eq
|
|
340
|
+
|
|
341
|
+
def mean(self):
|
|
342
|
+
if self._data is not None:
|
|
343
|
+
return np.mean(self._data)
|
|
344
|
+
else:
|
|
345
|
+
return self.offset + self.size / 2 * self.scaling
|
|
346
|
+
|
|
347
|
+
def min(self):
|
|
348
|
+
if self._data is not None:
|
|
349
|
+
return np.min(self._data)
|
|
350
|
+
else:
|
|
351
|
+
return self.offset + (self.size * self.scaling if self.scaling < 0 else 0)
|
|
352
|
+
|
|
353
|
+
def max(self):
|
|
354
|
+
if self._data is not None:
|
|
355
|
+
return np.max(self._data)
|
|
356
|
+
else:
|
|
357
|
+
return self.offset + (self.size * self.scaling if self.scaling > 0 else 0)
|
|
358
|
+
|
|
359
|
+
def find_index(self, threshold: float) -> int:
|
|
360
|
+
"""find the index of the threshold value within the axis"""
|
|
361
|
+
if self._data is not None:
|
|
362
|
+
return mutils.find_index(self._data, threshold)[0][0]
|
|
363
|
+
else:
|
|
364
|
+
return int((threshold - self.offset) / self.scaling)
|
|
365
|
+
|
|
366
|
+
def find_indexes(self, thresholds: IterableType[float]) -> IterableType[int]:
|
|
367
|
+
return [self.find_index(threshold) for threshold in thresholds]
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
class NavAxis(Axis):
|
|
371
|
+
def __init__(self, *args, **kwargs):
|
|
372
|
+
super().__init__(*args, **kwargs)
|
|
373
|
+
deprecation_msg('NavAxis should not be used anymore, please use Axis object with correct index.'
|
|
374
|
+
'The navigation index should be specified in the Data object')
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
class DataLowLevel:
|
|
378
|
+
"""Abstract object for all Data Object
|
|
379
|
+
|
|
380
|
+
Parameters
|
|
381
|
+
----------
|
|
382
|
+
name: str
|
|
383
|
+
the identifier of the data
|
|
384
|
+
|
|
385
|
+
Attributes
|
|
386
|
+
----------
|
|
387
|
+
name: str
|
|
388
|
+
timestamp: float
|
|
389
|
+
Time in seconds since epoch. See method time.time()
|
|
390
|
+
"""
|
|
391
|
+
|
|
392
|
+
def __init__(self, name: str):
|
|
393
|
+
self._timestamp = time()
|
|
394
|
+
self._name = name
|
|
395
|
+
|
|
396
|
+
@property
|
|
397
|
+
def name(self):
|
|
398
|
+
"""str: the identifier of the data"""
|
|
399
|
+
return self._name
|
|
400
|
+
|
|
401
|
+
@property
|
|
402
|
+
def timestamp(self):
|
|
403
|
+
"""Get/Set the timestamp of when the object has been created"""
|
|
404
|
+
return self._timestamp
|
|
405
|
+
|
|
406
|
+
@timestamp.setter
|
|
407
|
+
def timestamp(self, timestamp: float):
|
|
408
|
+
"""The timestamp of when the object has been created"""
|
|
409
|
+
self._timestamp = timestamp
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
class DataBase(DataLowLevel):
|
|
413
|
+
"""Base object to store homogeneous data and metadata generated by pymodaq's objects. To be inherited for real data
|
|
414
|
+
|
|
415
|
+
Parameters
|
|
416
|
+
----------
|
|
417
|
+
name: str
|
|
418
|
+
the identifier of these data
|
|
419
|
+
source: DataSource or str
|
|
420
|
+
Enum specifying if data are raw or processed (for instance from roi)
|
|
421
|
+
dim: DataDim or str
|
|
422
|
+
The identifier of the data type
|
|
423
|
+
distribution: DataDistribution or str
|
|
424
|
+
The distribution type of the data: uniform if distributed on a regular grid or spread if on specific
|
|
425
|
+
unordered points
|
|
426
|
+
data: list of ndarray
|
|
427
|
+
The data the object is storing
|
|
428
|
+
labels: list of str
|
|
429
|
+
The labels of the data nd-arrays
|
|
430
|
+
origin: str
|
|
431
|
+
An identifier of the element where the data originated, for instance the DAQ_Viewer's name. Used when appending
|
|
432
|
+
DataToExport in DAQ_Scan to disintricate from which origin data comes from when scanning multiple detectors.
|
|
433
|
+
kwargs: named parameters
|
|
434
|
+
All other parameters are stored dynamically using the name/value pair. The name of these extra parameters are
|
|
435
|
+
added into the extra_attributes attribute
|
|
436
|
+
|
|
437
|
+
Attributes
|
|
438
|
+
----------
|
|
439
|
+
name: str
|
|
440
|
+
the identifier of these data
|
|
441
|
+
source: DataSource or str
|
|
442
|
+
Enum specifying if data are raw or processed (for instance from roi)
|
|
443
|
+
dim: DataDim or str
|
|
444
|
+
The identifier of the data type
|
|
445
|
+
distribution: DataDistribution or str
|
|
446
|
+
The distribution type of the data: uniform if distributed on a regular grid or spread if on specific
|
|
447
|
+
unordered points
|
|
448
|
+
data: list of ndarray
|
|
449
|
+
The data the object is storing
|
|
450
|
+
labels: list of str
|
|
451
|
+
The labels of the data nd-arrays
|
|
452
|
+
origin: str
|
|
453
|
+
An identifier of the element where the data originated, for instance the DAQ_Viewer's name. Used when appending
|
|
454
|
+
DataToExport in DAQ_Scan to disintricate from which origin data comes from when scanning multiple detectors.
|
|
455
|
+
shape: Tuple[int]
|
|
456
|
+
The shape of the underlying data
|
|
457
|
+
size: int
|
|
458
|
+
The size of the ndarrays stored in the object
|
|
459
|
+
length: int
|
|
460
|
+
The number of ndarrays stored in the object
|
|
461
|
+
extra_attributes: List[str]
|
|
462
|
+
list of string giving identifiers of the attributes added dynamically at the initialization (for instance
|
|
463
|
+
to save extra metadata using the DataSaverLoader
|
|
464
|
+
|
|
465
|
+
See Also
|
|
466
|
+
--------
|
|
467
|
+
DataWithAxes, DataFromPlugins, DataRaw, DataSaverLoader
|
|
468
|
+
"""
|
|
469
|
+
|
|
470
|
+
def __init__(self, name: str, source: DataSource = None, dim: DataDim = None,
|
|
471
|
+
distribution: DataDistribution = DataDistribution['uniform'], data: List[np.ndarray] = None,
|
|
472
|
+
labels: List[str] = [], origin: str = None, **kwargs):
|
|
473
|
+
|
|
474
|
+
super().__init__(name=name)
|
|
475
|
+
self._iter_index = 0
|
|
476
|
+
self._shape = None
|
|
477
|
+
self._size = None
|
|
478
|
+
self._data = None
|
|
479
|
+
self._length = None
|
|
480
|
+
self._labels = None
|
|
481
|
+
self._dim = dim
|
|
482
|
+
self.origin = origin
|
|
483
|
+
|
|
484
|
+
source = enum_checker(DataSource, source)
|
|
485
|
+
self._source = source
|
|
486
|
+
|
|
487
|
+
distribution = enum_checker(DataDistribution, distribution)
|
|
488
|
+
self._distribution = distribution
|
|
489
|
+
|
|
490
|
+
self.data = data # dim consistency is actually checked within the setter method
|
|
491
|
+
|
|
492
|
+
self._check_labels(labels)
|
|
493
|
+
self.extra_attributes = []
|
|
494
|
+
for key in kwargs:
|
|
495
|
+
self.extra_attributes.append(key)
|
|
496
|
+
setattr(self, key, kwargs[key])
|
|
497
|
+
|
|
498
|
+
def get_full_name(self) -> str:
|
|
499
|
+
"""Get the data ful name including the origin attribute into the returned value
|
|
500
|
+
|
|
501
|
+
Returns
|
|
502
|
+
-------
|
|
503
|
+
str: the name of the ataWithAxes data constructed as : origin/name
|
|
504
|
+
|
|
505
|
+
Examples
|
|
506
|
+
--------
|
|
507
|
+
d0 = DataBase(name='datafromdet0', origin='det0')
|
|
508
|
+
"""
|
|
509
|
+
return f'{self.origin}/{self.name}'
|
|
510
|
+
|
|
511
|
+
def __repr__(self):
|
|
512
|
+
return f'{self.__class__.__name__} <{self.name}> <{self.dim}> <{self.source}> <{self.shape}>'
|
|
513
|
+
|
|
514
|
+
def __len__(self):
|
|
515
|
+
return self.length
|
|
516
|
+
|
|
517
|
+
def __iter__(self):
|
|
518
|
+
self._iter_index = 0
|
|
519
|
+
return self
|
|
520
|
+
|
|
521
|
+
def __next__(self):
|
|
522
|
+
if self._iter_index < len(self):
|
|
523
|
+
self._iter_index += 1
|
|
524
|
+
return self.data[self._iter_index-1]
|
|
525
|
+
else:
|
|
526
|
+
raise StopIteration
|
|
527
|
+
|
|
528
|
+
def __getitem__(self, item) -> np.ndarray:
|
|
529
|
+
if isinstance(item, int) and item < len(self):
|
|
530
|
+
return self.data[item]
|
|
531
|
+
else:
|
|
532
|
+
raise IndexError(f'The index should be an integer lower than the data length')
|
|
533
|
+
|
|
534
|
+
def __setitem__(self, key, value):
|
|
535
|
+
if isinstance(key, int) and key < len(self) and isinstance(value, np.ndarray) and value.shape == self.shape:
|
|
536
|
+
self.data[key] = value
|
|
537
|
+
else:
|
|
538
|
+
raise IndexError(f'The index should be an positive integer lower than the data length')
|
|
539
|
+
|
|
540
|
+
def __add__(self, other: object):
|
|
541
|
+
if isinstance(other, DataBase) and len(other) == len(self):
|
|
542
|
+
new_data = copy.deepcopy(self)
|
|
543
|
+
for ind_array in range(len(new_data)):
|
|
544
|
+
if self[ind_array].shape != other[ind_array].shape:
|
|
545
|
+
raise ValueError('The shapes of arrays stored into the data are not consistent')
|
|
546
|
+
new_data[ind_array] = self[ind_array] + other[ind_array]
|
|
547
|
+
return new_data
|
|
548
|
+
else:
|
|
549
|
+
raise TypeError(f'Could not add a {other.__class__.__name__} or a {self.__class__.__name__} '
|
|
550
|
+
f'of a different length')
|
|
551
|
+
|
|
552
|
+
def __sub__(self, other: object):
|
|
553
|
+
if isinstance(other, DataBase) and len(other) == len(self):
|
|
554
|
+
new_data = copy.deepcopy(self)
|
|
555
|
+
for ind_array in range(len(new_data)):
|
|
556
|
+
new_data[ind_array] = self[ind_array] - other[ind_array]
|
|
557
|
+
return new_data
|
|
558
|
+
else:
|
|
559
|
+
raise TypeError(f'Could not substract a {other.__class__.__name__} or a {self.__class__.__name__} '
|
|
560
|
+
f'of a different length')
|
|
561
|
+
|
|
562
|
+
def __mul__(self, other):
|
|
563
|
+
if isinstance(other, numbers.Number):
|
|
564
|
+
new_data = copy.deepcopy(self)
|
|
565
|
+
for ind_array in range(len(new_data)):
|
|
566
|
+
new_data[ind_array] = self[ind_array] * other
|
|
567
|
+
return new_data
|
|
568
|
+
else:
|
|
569
|
+
raise TypeError(f'Could not multiply a {other.__class__.__name__} and a {self.__class__.__name__} '
|
|
570
|
+
f'of a different length')
|
|
571
|
+
|
|
572
|
+
def __truediv__(self, other):
|
|
573
|
+
if isinstance(other, numbers.Number):
|
|
574
|
+
return self * (1 / other)
|
|
575
|
+
else:
|
|
576
|
+
raise TypeError(f'Could not divide a {other.__class__.__name__} and a {self.__class__.__name__} '
|
|
577
|
+
f'of a different length')
|
|
578
|
+
|
|
579
|
+
def __eq__(self, other):
|
|
580
|
+
if isinstance(other, DataBase):
|
|
581
|
+
if not(self.name == other.name and len(self) == len(other)):
|
|
582
|
+
return False
|
|
583
|
+
eq = True
|
|
584
|
+
for ind in range(len(self)):
|
|
585
|
+
if self[ind].shape != other[ind].shape:
|
|
586
|
+
eq = False
|
|
587
|
+
break
|
|
588
|
+
eq = eq and np.allclose(self[ind], other[ind])
|
|
589
|
+
# labels_not = (self.labels == other.labels)
|
|
590
|
+
# if labels_not:
|
|
591
|
+
# logger.debug(f'labels from self:{self.labels}, other: {other.labels}')
|
|
592
|
+
# eq = (eq and labels_not)
|
|
593
|
+
return eq
|
|
594
|
+
else:
|
|
595
|
+
raise TypeError()
|
|
596
|
+
|
|
597
|
+
def average(self, other: 'DataBase', weight: int) -> 'DataBase':
|
|
598
|
+
""" Compute the weighted average between self and other DataBase and attributes it to self
|
|
599
|
+
|
|
600
|
+
Parameters
|
|
601
|
+
----------
|
|
602
|
+
other_data: DataBase
|
|
603
|
+
weight: int
|
|
604
|
+
The weight the 'other' holds with respect to self
|
|
605
|
+
|
|
606
|
+
"""
|
|
607
|
+
if isinstance(other, DataBase) and len(other) == len(self) and isinstance(weight, numbers.Number):
|
|
608
|
+
new_data = copy.copy(self)
|
|
609
|
+
return (other * (weight - 1) + new_data) / weight
|
|
610
|
+
else:
|
|
611
|
+
raise TypeError(f'Could not average a {other.__class__.__name__} or a {self.__class__.__name__} '
|
|
612
|
+
f'of a different length')
|
|
613
|
+
|
|
614
|
+
def append(self, data: DataWithAxes):
|
|
615
|
+
for dat in data:
|
|
616
|
+
if dat.shape != self.shape:
|
|
617
|
+
raise DataShapeError('Cannot append those ndarrays, they don\'t have the same shape as self')
|
|
618
|
+
self.data = self.data + data.data
|
|
619
|
+
self.labels.extend(data.labels)
|
|
620
|
+
|
|
621
|
+
@property
|
|
622
|
+
def shape(self):
|
|
623
|
+
"""The shape of the nd-arrays"""
|
|
624
|
+
return self._shape
|
|
625
|
+
|
|
626
|
+
@property
|
|
627
|
+
def size(self):
|
|
628
|
+
"""The size of the nd-arrays"""
|
|
629
|
+
return self._size
|
|
630
|
+
|
|
631
|
+
@property
|
|
632
|
+
def dim(self):
|
|
633
|
+
"""DataDim: the enum representing the dimensionality of the stored data"""
|
|
634
|
+
return self._dim
|
|
635
|
+
|
|
636
|
+
def set_dim(self, dim: Union[DataDim, str]):
|
|
637
|
+
"""Addhoc modification of dim independantly of the real data shape, should be used with extra care"""
|
|
638
|
+
self._dim = enum_checker(DataDim, dim)
|
|
639
|
+
|
|
640
|
+
@property
|
|
641
|
+
def source(self):
|
|
642
|
+
"""DataSource: the enum representing the source of the data"""
|
|
643
|
+
return self._source
|
|
644
|
+
|
|
645
|
+
@property
|
|
646
|
+
def distribution(self):
|
|
647
|
+
"""DataDistribution: the enum representing the distribution of the stored data"""
|
|
648
|
+
return self._distribution
|
|
649
|
+
|
|
650
|
+
@property
|
|
651
|
+
def length(self):
|
|
652
|
+
"""The length of data. This is the length of the list containing the nd-arrays"""
|
|
653
|
+
return self._length
|
|
654
|
+
|
|
655
|
+
@property
|
|
656
|
+
def labels(self):
|
|
657
|
+
return self._labels
|
|
658
|
+
|
|
659
|
+
@labels.setter
|
|
660
|
+
def labels(self, labels: List['str']):
|
|
661
|
+
self._check_labels(labels)
|
|
662
|
+
|
|
663
|
+
def _check_labels(self, labels: List['str']):
|
|
664
|
+
if labels is None:
|
|
665
|
+
labels = []
|
|
666
|
+
else:
|
|
667
|
+
labels = labels[:]
|
|
668
|
+
while len(labels) < self.length:
|
|
669
|
+
labels.append(f'CH{len(labels):02d}')
|
|
670
|
+
self._labels = labels
|
|
671
|
+
|
|
672
|
+
def get_data_index(self, index: int = 0):
|
|
673
|
+
"""Get the data by its index in the list"""
|
|
674
|
+
return self.data[index]
|
|
675
|
+
|
|
676
|
+
@staticmethod
|
|
677
|
+
def _check_data_type(data: List[np.ndarray]) -> List[np.ndarray]:
|
|
678
|
+
"""make sure data is a list of nd-arrays"""
|
|
679
|
+
is_valid = True
|
|
680
|
+
if data is None:
|
|
681
|
+
is_valid = False
|
|
682
|
+
if not isinstance(data, list):
|
|
683
|
+
# try to transform the data to regular type
|
|
684
|
+
if isinstance(data, np.ndarray):
|
|
685
|
+
warnings.warn(DataTypeWarning(f'Your data should be a list of numpy arrays not just a single numpy'
|
|
686
|
+
f' array, wrapping them with a list'))
|
|
687
|
+
data = [data]
|
|
688
|
+
elif isinstance(data, numbers.Number):
|
|
689
|
+
warnings.warn(DataTypeWarning(f'Your data should be a list of numpy arrays not just a single numpy'
|
|
690
|
+
f' array, wrapping them with a list'))
|
|
691
|
+
data = [np.array([data])]
|
|
692
|
+
else:
|
|
693
|
+
is_valid = False
|
|
694
|
+
if isinstance(data, list):
|
|
695
|
+
if len(data) == 0:
|
|
696
|
+
is_valid = False
|
|
697
|
+
if not isinstance(data[0], np.ndarray):
|
|
698
|
+
is_valid = False
|
|
699
|
+
elif len(data[0].shape) == 0:
|
|
700
|
+
is_valid = False
|
|
701
|
+
if not is_valid:
|
|
702
|
+
raise TypeError(f'Data should be an non-empty list of non-empty numpy arrays')
|
|
703
|
+
return data
|
|
704
|
+
|
|
705
|
+
def check_shape_from_data(self, data: List[np.ndarray]):
|
|
706
|
+
self._shape = data[0].shape
|
|
707
|
+
|
|
708
|
+
def get_dim_from_data(self, data: List[np.ndarray]):
|
|
709
|
+
"""Get the dimensionality DataDim from data"""
|
|
710
|
+
self.check_shape_from_data(data)
|
|
711
|
+
self._size = data[0].size
|
|
712
|
+
self._length = len(data)
|
|
713
|
+
if len(self._shape) == 1 and self._size == 1:
|
|
714
|
+
dim = DataDim['Data0D']
|
|
715
|
+
elif len(self._shape) == 1 and self._size > 1:
|
|
716
|
+
dim = DataDim['Data1D']
|
|
717
|
+
elif len(self._shape) == 2:
|
|
718
|
+
dim = DataDim['Data2D']
|
|
719
|
+
else:
|
|
720
|
+
dim = DataDim['DataND']
|
|
721
|
+
return dim
|
|
722
|
+
|
|
723
|
+
def _check_shape_dim_consistency(self, data: List[np.ndarray]):
|
|
724
|
+
"""Process the dim from data or make sure data and DataDim are coherent"""
|
|
725
|
+
dim = self.get_dim_from_data(data)
|
|
726
|
+
if self._dim is None:
|
|
727
|
+
self._dim = dim
|
|
728
|
+
else:
|
|
729
|
+
self._dim = enum_checker(DataDim, self._dim)
|
|
730
|
+
if self._dim != dim:
|
|
731
|
+
warnings.warn(DataDimWarning('The specified dimensionality is not coherent with the data shape, '
|
|
732
|
+
'replacing it'))
|
|
733
|
+
self._dim = dim
|
|
734
|
+
|
|
735
|
+
def _check_same_shape(self, data: List[np.ndarray]):
|
|
736
|
+
"""Check that all nd-arrays have the same shape"""
|
|
737
|
+
for dat in data:
|
|
738
|
+
if dat.shape != self.shape:
|
|
739
|
+
raise DataShapeError('The shape of the ndarrays in data is not the same')
|
|
740
|
+
|
|
741
|
+
@property
|
|
742
|
+
def data(self) -> List[np.ndarray]:
|
|
743
|
+
"""List[np.ndarray]: get/set (and check) the data the object is storing"""
|
|
744
|
+
return self._data
|
|
745
|
+
|
|
746
|
+
@data.setter
|
|
747
|
+
def data(self, data: List[np.ndarray]):
|
|
748
|
+
data = self._check_data_type(data)
|
|
749
|
+
self._check_shape_dim_consistency(data)
|
|
750
|
+
self._check_same_shape(data)
|
|
751
|
+
self._data = data
|
|
752
|
+
|
|
753
|
+
|
|
754
|
+
class AxesManagerBase:
|
|
755
|
+
def __init__(self, data_shape: Tuple[int], axes: List[Axis], nav_indexes=None, sig_indexes=None, **kwargs):
|
|
756
|
+
self._data_shape = data_shape[:] # initial shape needed for self._check_axis
|
|
757
|
+
self._axes = axes[:]
|
|
758
|
+
self._nav_indexes = nav_indexes
|
|
759
|
+
self._sig_indexes = sig_indexes if sig_indexes is not None else self.compute_sig_indexes()
|
|
760
|
+
|
|
761
|
+
self._check_axis(self._axes)
|
|
762
|
+
self._manage_named_axes(self._axes, **kwargs)
|
|
763
|
+
|
|
764
|
+
@property
|
|
765
|
+
def axes(self):
|
|
766
|
+
return self._axes
|
|
767
|
+
|
|
768
|
+
@axes.setter
|
|
769
|
+
def axes(self, axes: List[Axis]):
|
|
770
|
+
self._axes = axes[:]
|
|
771
|
+
self._check_axis(self._axes)
|
|
772
|
+
|
|
773
|
+
@abstractmethod
|
|
774
|
+
def _check_axis(self, axes):
|
|
775
|
+
...
|
|
776
|
+
|
|
777
|
+
def compute_sig_indexes(self):
|
|
778
|
+
_shape = list(self._data_shape)
|
|
779
|
+
indexes = list(np.arange(len(self._data_shape)))
|
|
780
|
+
for index in self.nav_indexes:
|
|
781
|
+
if index in indexes:
|
|
782
|
+
indexes.pop(indexes.index(index))
|
|
783
|
+
return tuple(indexes)
|
|
784
|
+
|
|
785
|
+
def _has_get_axis_from_index(self, index: int):
|
|
786
|
+
"""Check if the axis referred by a given data dimensionality index is present
|
|
787
|
+
|
|
788
|
+
Returns
|
|
789
|
+
-------
|
|
790
|
+
bool: True if the axis has been found else False
|
|
791
|
+
Axis or None: return the axis instance if has the axis else None
|
|
792
|
+
"""
|
|
793
|
+
if index > len(self._data_shape) or index < 0:
|
|
794
|
+
raise IndexError('The specified index does not correspond to any data dimension')
|
|
795
|
+
for axis in self.axes:
|
|
796
|
+
if axis.index == index:
|
|
797
|
+
return True, axis
|
|
798
|
+
return False, None
|
|
799
|
+
|
|
800
|
+
def _manage_named_axes(self, axes, x_axis=None, y_axis=None, nav_x_axis=None, nav_y_axis=None):
|
|
801
|
+
"""This method make sur old style Data is still compatible, especially when using x_axis or y_axis parameters"""
|
|
802
|
+
modified = False
|
|
803
|
+
if x_axis is not None:
|
|
804
|
+
modified = True
|
|
805
|
+
index = 0
|
|
806
|
+
if len(self._data_shape) == 1 and not self._has_get_axis_from_index(0)[0]:
|
|
807
|
+
# in case of Data1D the x_axis corresponds to the first data dim
|
|
808
|
+
index = 0
|
|
809
|
+
elif len(self._data_shape) == 2 and not self._has_get_axis_from_index(1)[0]:
|
|
810
|
+
# in case of Data2D the x_axis corresponds to the second data dim (columns)
|
|
811
|
+
index = 1
|
|
812
|
+
axes.append(Axis(x_axis.label, x_axis.units, x_axis.data, index=index))
|
|
813
|
+
|
|
814
|
+
if y_axis is not None:
|
|
815
|
+
|
|
816
|
+
if len(self._data_shape) == 2 and not self._has_get_axis_from_index(0)[0]:
|
|
817
|
+
modified = True
|
|
818
|
+
# in case of Data2D the y_axis corresponds to the first data dim (lines)
|
|
819
|
+
axes.append(Axis(y_axis.label, y_axis.units, y_axis.data, index=0))
|
|
820
|
+
|
|
821
|
+
if nav_x_axis is not None:
|
|
822
|
+
if len(self.nav_indexes) > 0:
|
|
823
|
+
modified = True
|
|
824
|
+
# in case of DataND the y_axis corresponds to the first data dim (lines)
|
|
825
|
+
axes.append(Axis(nav_x_axis.label, nav_x_axis.units, nav_x_axis.data, index=self._nav_indexes[0]))
|
|
826
|
+
|
|
827
|
+
if nav_y_axis is not None:
|
|
828
|
+
if len(self.nav_indexes) > 1:
|
|
829
|
+
modified = True
|
|
830
|
+
# in case of Data2D the y_axis corresponds to the first data dim (lines)
|
|
831
|
+
axes.append(Axis(nav_y_axis.label, nav_y_axis.units, nav_y_axis.data, index=self._nav_indexes[1]))
|
|
832
|
+
|
|
833
|
+
if modified:
|
|
834
|
+
self._check_axis(axes)
|
|
835
|
+
|
|
836
|
+
@property
|
|
837
|
+
def shape(self) -> Tuple[int]:
|
|
838
|
+
# self._data_shape = self.compute_shape_from_axes()
|
|
839
|
+
return self._data_shape
|
|
840
|
+
|
|
841
|
+
@abstractmethod
|
|
842
|
+
def compute_shape_from_axes(self):
|
|
843
|
+
...
|
|
844
|
+
|
|
845
|
+
@property
|
|
846
|
+
def sig_shape(self) -> tuple:
|
|
847
|
+
return tuple([self.shape[ind] for ind in self.sig_indexes])
|
|
848
|
+
|
|
849
|
+
@property
|
|
850
|
+
def nav_shape(self) -> tuple:
|
|
851
|
+
return tuple([self.shape[ind] for ind in self.nav_indexes])
|
|
852
|
+
|
|
853
|
+
def append_axis(self, axis: Axis):
|
|
854
|
+
self._axes.append(axis)
|
|
855
|
+
self._check_axis([axis])
|
|
856
|
+
|
|
857
|
+
@property
|
|
858
|
+
def nav_indexes(self) -> IterableType[int]:
|
|
859
|
+
return self._nav_indexes
|
|
860
|
+
|
|
861
|
+
@nav_indexes.setter
|
|
862
|
+
def nav_indexes(self, nav_indexes: IterableType[int]):
|
|
863
|
+
if isinstance(nav_indexes, Iterable):
|
|
864
|
+
nav_indexes = tuple(nav_indexes)
|
|
865
|
+
valid = True
|
|
866
|
+
for index in nav_indexes:
|
|
867
|
+
if index not in self.get_axes_index():
|
|
868
|
+
logger.warning('Could not set the corresponding nav_index into the data object, not enough'
|
|
869
|
+
' Axis declared')
|
|
870
|
+
valid = False
|
|
871
|
+
break
|
|
872
|
+
if valid:
|
|
873
|
+
self._nav_indexes = nav_indexes
|
|
874
|
+
else:
|
|
875
|
+
logger.warning('Could not set the corresponding sig_indexes into the data object, should be an iterable')
|
|
876
|
+
self.sig_indexes = self.compute_sig_indexes()
|
|
877
|
+
self.shape
|
|
878
|
+
|
|
879
|
+
@property
|
|
880
|
+
def sig_indexes(self) -> IterableType[int]:
|
|
881
|
+
return self._sig_indexes
|
|
882
|
+
|
|
883
|
+
@sig_indexes.setter
|
|
884
|
+
def sig_indexes(self, sig_indexes: IterableType[int]):
|
|
885
|
+
if isinstance(sig_indexes, Iterable):
|
|
886
|
+
sig_indexes = tuple(sig_indexes)
|
|
887
|
+
valid = True
|
|
888
|
+
for index in sig_indexes:
|
|
889
|
+
if index in self._nav_indexes:
|
|
890
|
+
logger.warning('Could not set the corresponding sig_index into the axis manager object, '
|
|
891
|
+
'the axis is already affected to the navigation axis')
|
|
892
|
+
valid = False
|
|
893
|
+
break
|
|
894
|
+
if index not in self.get_axes_index():
|
|
895
|
+
logger.warning('Could not set the corresponding nav_index into the data object, not enough'
|
|
896
|
+
' Axis declared')
|
|
897
|
+
valid = False
|
|
898
|
+
break
|
|
899
|
+
if valid:
|
|
900
|
+
self._sig_indexes = sig_indexes
|
|
901
|
+
else:
|
|
902
|
+
logger.warning('Could not set the corresponding sig_indexes into the data object, should be an iterable')
|
|
903
|
+
|
|
904
|
+
@property
|
|
905
|
+
def nav_axes(self) -> List[int]:
|
|
906
|
+
deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
|
|
907
|
+
return self._nav_indexes
|
|
908
|
+
|
|
909
|
+
@nav_axes.setter
|
|
910
|
+
def nav_axes(self, nav_indexes: List[int]):
|
|
911
|
+
deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
|
|
912
|
+
self.nav_indexes = nav_indexes
|
|
913
|
+
|
|
914
|
+
def is_axis_signal(self, axis: Axis) -> bool:
|
|
915
|
+
"""Check if an axis is considered signal or navigation"""
|
|
916
|
+
return axis.index in self._nav_indexes
|
|
917
|
+
|
|
918
|
+
def is_axis_navigation(self, axis: Axis) -> bool:
|
|
919
|
+
"""Check if an axis is considered signal or navigation"""
|
|
920
|
+
return axis.index not in self._nav_indexes
|
|
921
|
+
|
|
922
|
+
@abstractmethod
|
|
923
|
+
def get_shape_from_index(self, index: int) -> int:
|
|
924
|
+
"""Get the data shape at the given index"""
|
|
925
|
+
...
|
|
926
|
+
|
|
927
|
+
def get_axes_index(self) -> List[int]:
|
|
928
|
+
"""Get the index list from the axis objects"""
|
|
929
|
+
return [axis.index for axis in self._axes]
|
|
930
|
+
|
|
931
|
+
@abstractmethod
|
|
932
|
+
def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
|
|
933
|
+
...
|
|
934
|
+
|
|
935
|
+
def get_nav_axes(self) -> List[Axis]:
|
|
936
|
+
"""Get the navigation axes corresponding to the data
|
|
937
|
+
|
|
938
|
+
Use get_axis_from_index for all index in self.nav_indexes, but in spread distribution, one index may
|
|
939
|
+
correspond to multiple nav axes, see Spread data distribution
|
|
940
|
+
|
|
941
|
+
|
|
942
|
+
"""
|
|
943
|
+
return list(mutils.flatten([copy.copy(self.get_axis_from_index(index, create=True))
|
|
944
|
+
for index in self.nav_indexes]))
|
|
945
|
+
|
|
946
|
+
def get_signal_axes(self):
|
|
947
|
+
if self.sig_indexes is None:
|
|
948
|
+
self._sig_indexes = tuple([axis.index for axis in self.axes if axis.index not in self.nav_indexes])
|
|
949
|
+
return list(mutils.flatten([copy.copy(self.get_axis_from_index(index, create=True))
|
|
950
|
+
for index in self.sig_indexes]))
|
|
951
|
+
|
|
952
|
+
def is_axis_signal(self, axis: Axis) -> bool:
|
|
953
|
+
"""Check if an axis is considered signal or navigation"""
|
|
954
|
+
return axis.index in self._nav_indexes
|
|
955
|
+
|
|
956
|
+
def is_axis_navigation(self, axis: Axis) -> bool:
|
|
957
|
+
"""Check if an axis is considered signal or navigation"""
|
|
958
|
+
return axis.index not in self._nav_indexes
|
|
959
|
+
|
|
960
|
+
def __repr__(self):
|
|
961
|
+
return self._get_dimension_str()
|
|
962
|
+
|
|
963
|
+
@abstractmethod
|
|
964
|
+
def _get_dimension_str(self):
|
|
965
|
+
...
|
|
966
|
+
|
|
967
|
+
|
|
968
|
+
class AxesManagerUniform(AxesManagerBase):
|
|
969
|
+
def __init__(self, *args, **kwargs):
|
|
970
|
+
super().__init__(*args, **kwargs)
|
|
971
|
+
|
|
972
|
+
def compute_shape_from_axes(self):
|
|
973
|
+
if len(self.axes) != 0:
|
|
974
|
+
shape = []
|
|
975
|
+
for ind in range(len(self.axes)):
|
|
976
|
+
shape.append(len(self.get_axis_from_index(ind, create=True)[0]))
|
|
977
|
+
else:
|
|
978
|
+
shape = self._data_shape
|
|
979
|
+
return tuple(shape)
|
|
980
|
+
|
|
981
|
+
def get_shape_from_index(self, index: int) -> int:
|
|
982
|
+
"""Get the data shape at the given index"""
|
|
983
|
+
if index > len(self._data_shape) or index < 0:
|
|
984
|
+
raise IndexError('The specified index does not correspond to any data dimension')
|
|
985
|
+
return self._data_shape[index]
|
|
986
|
+
|
|
987
|
+
def _check_axis(self, axes: List[Axis]):
|
|
988
|
+
"""Check all axis to make sure of their type and make sure their data are properly referring to the data index
|
|
989
|
+
|
|
990
|
+
See Also
|
|
991
|
+
--------
|
|
992
|
+
:py:meth:`Axis.create_linear_data`
|
|
993
|
+
"""
|
|
994
|
+
for ind, axis in enumerate(axes):
|
|
995
|
+
if not isinstance(axis, Axis):
|
|
996
|
+
raise TypeError(f'An axis of {self.__class__.__name__} should be an Axis object')
|
|
997
|
+
if self.get_shape_from_index(axis.index) != axis.size:
|
|
998
|
+
warnings.warn(DataSizeWarning('The size of the axis is not coherent with the shape of the data. '
|
|
999
|
+
'Replacing it with a linspaced version: np.array([0, 1, 2, ...])'))
|
|
1000
|
+
axis.size = self.get_shape_from_index(axis.index)
|
|
1001
|
+
axis.scaling = 1
|
|
1002
|
+
axis.offset = 0
|
|
1003
|
+
axes[ind] = axis
|
|
1004
|
+
self._axes = axes
|
|
1005
|
+
|
|
1006
|
+
def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
|
|
1007
|
+
"""Get the axis referred by a given data dimensionality index
|
|
1008
|
+
|
|
1009
|
+
If the axis is absent, create a linear one to fit the data shape if parameter create is True
|
|
1010
|
+
|
|
1011
|
+
Parameters
|
|
1012
|
+
----------
|
|
1013
|
+
index: int
|
|
1014
|
+
The index referring to the data ndarray shape
|
|
1015
|
+
create: bool
|
|
1016
|
+
If True and the axis referred by index has not been found in axes, create one
|
|
1017
|
+
|
|
1018
|
+
Returns
|
|
1019
|
+
-------
|
|
1020
|
+
Axis or None: return the list of axis instance if Data has the axis (or it has been created) else None
|
|
1021
|
+
|
|
1022
|
+
See Also
|
|
1023
|
+
--------
|
|
1024
|
+
:py:meth:`Axis.create_linear_data`
|
|
1025
|
+
"""
|
|
1026
|
+
has_axis, axis = self._has_get_axis_from_index(index)
|
|
1027
|
+
if not has_axis:
|
|
1028
|
+
if create:
|
|
1029
|
+
warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, '
|
|
1030
|
+
f'creating a linear one...'))
|
|
1031
|
+
axis = Axis(index=index, offset=0, scaling=1)
|
|
1032
|
+
axis.size = self.get_shape_from_index(index)
|
|
1033
|
+
else:
|
|
1034
|
+
warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, returning None'))
|
|
1035
|
+
return [axis]
|
|
1036
|
+
|
|
1037
|
+
def _get_dimension_str(self):
|
|
1038
|
+
string = "("
|
|
1039
|
+
for nav_index in self.nav_indexes:
|
|
1040
|
+
string += str(self._data_shape[nav_index]) + ", "
|
|
1041
|
+
string = string.rstrip(", ")
|
|
1042
|
+
string += "|"
|
|
1043
|
+
for sig_index in self.sig_indexes:
|
|
1044
|
+
string += str(self._data_shape[sig_index]) + ", "
|
|
1045
|
+
string = string.rstrip(", ")
|
|
1046
|
+
string += ")"
|
|
1047
|
+
return string
|
|
1048
|
+
|
|
1049
|
+
|
|
1050
|
+
class AxesManagerSpread(AxesManagerBase):
|
|
1051
|
+
"""For this particular data category, some explanation is needed, see example below:
|
|
1052
|
+
|
|
1053
|
+
Examples
|
|
1054
|
+
--------
|
|
1055
|
+
One take images data (20x30) as a function of 2 parameters, say xaxis and yaxis non-linearly spaced on a regular
|
|
1056
|
+
grid.
|
|
1057
|
+
|
|
1058
|
+
data.shape = (150, 20, 30)
|
|
1059
|
+
data.nav_indexes = (0,)
|
|
1060
|
+
|
|
1061
|
+
The first dimension (150) corresponds to the navigation (there are 150 non uniform data points taken)
|
|
1062
|
+
The second and third could correspond to signal data, here an image of size (20x30)
|
|
1063
|
+
so:
|
|
1064
|
+
* nav_indexes is (0, )
|
|
1065
|
+
* sig_indexes are (1, 2)
|
|
1066
|
+
|
|
1067
|
+
xaxis = Axis(name=xaxis, index=0, data...) length 150
|
|
1068
|
+
yaxis = Axis(name=yaxis, index=0, data...) length 150
|
|
1069
|
+
|
|
1070
|
+
In fact from such a data shape the number of navigation axes in unknown . In our example, they are 2. To somehow
|
|
1071
|
+
keep track of some ordering in these navigation axes, one adds an attribute to the Axis object: the spread_order
|
|
1072
|
+
xaxis = Axis(name=xaxis, index=0, spread_order=0, data...) length 150
|
|
1073
|
+
yaxis = Axis(name=yaxis, index=0, spread_order=1, data...) length 150
|
|
1074
|
+
"""
|
|
1075
|
+
|
|
1076
|
+
def __init__(self, *args, **kwargs):
|
|
1077
|
+
super().__init__(*args, **kwargs)
|
|
1078
|
+
|
|
1079
|
+
def _check_axis(self, axes: List[Axis]):
|
|
1080
|
+
"""Check all axis to make sure of their type and make sure their data are properly referring to the data index
|
|
1081
|
+
|
|
1082
|
+
"""
|
|
1083
|
+
for axis in axes:
|
|
1084
|
+
if not isinstance(axis, Axis):
|
|
1085
|
+
raise TypeError(f'An axis of {self.__class__.__name__} should be an Axis object')
|
|
1086
|
+
elif len(self.nav_indexes) != 1:
|
|
1087
|
+
raise ValueError('Spread data should have only one specified index in self.nav_indexes')
|
|
1088
|
+
elif axis.index in self.nav_indexes:
|
|
1089
|
+
if axis.size != self._data_shape[self.nav_indexes[0]]:
|
|
1090
|
+
raise DataLengthError('all navigation axes should have the same size')
|
|
1091
|
+
|
|
1092
|
+
def compute_shape_from_axes(self):
|
|
1093
|
+
"""Get data shape from axes
|
|
1094
|
+
|
|
1095
|
+
First get the nav length from one of the navigation axes
|
|
1096
|
+
Then check for signal axes
|
|
1097
|
+
"""
|
|
1098
|
+
if len(self.axes) != 0:
|
|
1099
|
+
|
|
1100
|
+
axes = sorted(self.axes, key=lambda axis: axis.index)
|
|
1101
|
+
|
|
1102
|
+
shape = []
|
|
1103
|
+
for axis in axes:
|
|
1104
|
+
if axis.index in self.nav_indexes:
|
|
1105
|
+
shape.append(axis.size)
|
|
1106
|
+
break
|
|
1107
|
+
for axis in axes:
|
|
1108
|
+
if axis.index not in self.nav_indexes:
|
|
1109
|
+
shape.append(axis.size)
|
|
1110
|
+
else:
|
|
1111
|
+
shape = self._data_shape
|
|
1112
|
+
return tuple(shape)
|
|
1113
|
+
|
|
1114
|
+
def get_shape_from_index(self, index: int) -> int:
|
|
1115
|
+
"""Get the data shape at the given index"""
|
|
1116
|
+
if index > len(self._data_shape) or index < 0:
|
|
1117
|
+
raise IndexError('The specified index does not correspond to any data dimension')
|
|
1118
|
+
return self._data_shape[index]
|
|
1119
|
+
|
|
1120
|
+
def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
|
|
1121
|
+
"""in spread mode, different nav axes have the same index (but not
|
|
1122
|
+
the same spread_order integer value) so may return multiple axis
|
|
1123
|
+
|
|
1124
|
+
No possible "linear" creation in this mode
|
|
1125
|
+
|
|
1126
|
+
"""
|
|
1127
|
+
axes = []
|
|
1128
|
+
for axis in self.axes:
|
|
1129
|
+
if axis.index == index:
|
|
1130
|
+
axes.append(axis)
|
|
1131
|
+
return axes
|
|
1132
|
+
|
|
1133
|
+
def _get_dimension_str(self):
|
|
1134
|
+
try:
|
|
1135
|
+
string = "("
|
|
1136
|
+
for nav_index in self.nav_indexes:
|
|
1137
|
+
string += str(self._data_shape[nav_index]) + ", "
|
|
1138
|
+
break
|
|
1139
|
+
string = string.rstrip(", ")
|
|
1140
|
+
string += "|"
|
|
1141
|
+
for sig_index in self.sig_indexes:
|
|
1142
|
+
string += str(self._data_shape[sig_index]) + ", "
|
|
1143
|
+
string = string.rstrip(", ")
|
|
1144
|
+
string += ")"
|
|
1145
|
+
except Exception as e:
|
|
1146
|
+
string = f'({self._data_shape})'
|
|
1147
|
+
finally:
|
|
1148
|
+
return string
|
|
1149
|
+
|
|
1150
|
+
|
|
1151
|
+
class DataWithAxes(DataBase):
|
|
1152
|
+
"""Data object with Axis objects corresponding to underlying data nd-arrays
|
|
1153
|
+
|
|
1154
|
+
Parameters
|
|
1155
|
+
----------
|
|
1156
|
+
axes: list of Axis
|
|
1157
|
+
the list of Axis object for proper plotting, calibration ...
|
|
1158
|
+
nav_indexes: tuple of int
|
|
1159
|
+
highlight which Axis in axes is Signal or Navigation axis depending on the content:
|
|
1160
|
+
For instance, nav_indexes = (2,), means that the axis with index 2 in a at least 3D ndarray data is the first
|
|
1161
|
+
navigation axis
|
|
1162
|
+
For instance, nav_indexes = (3,2), means that the axis with index 3 in a at least 4D ndarray data is the first
|
|
1163
|
+
navigation axis while the axis with index 2 is the second navigation Axis. Axes with index 0 and 1 are signal
|
|
1164
|
+
axes of 2D ndarray data
|
|
1165
|
+
"""
|
|
1166
|
+
|
|
1167
|
+
def __init__(self, *args, axes: List[Axis] = [], nav_indexes: Tuple[int] = (), **kwargs):
|
|
1168
|
+
|
|
1169
|
+
if 'nav_axes' in kwargs:
|
|
1170
|
+
deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
|
|
1171
|
+
nav_indexes = kwargs.pop('nav_axes')
|
|
1172
|
+
|
|
1173
|
+
x_axis = kwargs.pop('x_axis') if 'x_axis' in kwargs else None
|
|
1174
|
+
y_axis = kwargs.pop('y_axis') if 'y_axis' in kwargs else None
|
|
1175
|
+
|
|
1176
|
+
nav_x_axis = kwargs.pop('nav_x_axis') if 'nav_x_axis' in kwargs else None
|
|
1177
|
+
nav_y_axis = kwargs.pop('nav_y_axis') if 'nav_y_axis' in kwargs else None
|
|
1178
|
+
|
|
1179
|
+
super().__init__(*args, **kwargs)
|
|
1180
|
+
|
|
1181
|
+
self._axes = axes
|
|
1182
|
+
|
|
1183
|
+
other_kwargs = dict(x_axis=x_axis, y_axis=y_axis, nav_x_axis=nav_x_axis, nav_y_axis=nav_y_axis)
|
|
1184
|
+
|
|
1185
|
+
self.set_axes_manager(self.shape, axes=axes, nav_indexes=nav_indexes, **other_kwargs)
|
|
1186
|
+
|
|
1187
|
+
self.inav: Iterable[DataWithAxes] = SpecialSlicersData(self, True)
|
|
1188
|
+
self.isig: Iterable[DataWithAxes] = SpecialSlicersData(self, False)
|
|
1189
|
+
|
|
1190
|
+
self.get_dim_from_data_axes() # in DataBase, dim is processed from the shape of data, but if axes are provided
|
|
1191
|
+
#then use get_dim_from axes
|
|
1192
|
+
|
|
1193
|
+
def set_axes_manager(self, data_shape, axes, nav_indexes, **kwargs):
|
|
1194
|
+
if self.distribution.name == 'uniform' or len(nav_indexes) == 0:
|
|
1195
|
+
self._distribution = DataDistribution['uniform']
|
|
1196
|
+
self.axes_manager = AxesManagerUniform(data_shape=data_shape, axes=axes, nav_indexes=nav_indexes,
|
|
1197
|
+
**kwargs)
|
|
1198
|
+
elif self.distribution.name == 'spread':
|
|
1199
|
+
self.axes_manager = AxesManagerSpread(data_shape=data_shape, axes=axes, nav_indexes=nav_indexes,
|
|
1200
|
+
**kwargs)
|
|
1201
|
+
else:
|
|
1202
|
+
raise ValueError(f'Such a data distribution ({data.distribution}) has no AxesManager')
|
|
1203
|
+
|
|
1204
|
+
def __repr__(self):
|
|
1205
|
+
return f'<{self.__class__.__name__}, {self.name}, {self._am}>'
|
|
1206
|
+
|
|
1207
|
+
def sort_data(self, nav_axis: int = 0):
|
|
1208
|
+
"""Sort spread data along a given navigation axis, default is 0"""
|
|
1209
|
+
if self.distribution == 'spread':
|
|
1210
|
+
axis = self.get_nav_axes()[nav_axis]
|
|
1211
|
+
sorted_index = np.argsort(axis.get_data())
|
|
1212
|
+
data = self.deepcopy()
|
|
1213
|
+
for ind in range(len(data)):
|
|
1214
|
+
data.data[ind] = data.data[ind][sorted_index]
|
|
1215
|
+
for ind in range(len(data.axes)):
|
|
1216
|
+
data.axes[ind].data = data.axes[ind].data[sorted_index]
|
|
1217
|
+
return data
|
|
1218
|
+
else:
|
|
1219
|
+
return self
|
|
1220
|
+
|
|
1221
|
+
def transpose(self):
|
|
1222
|
+
"""replace the data by their transposed version
|
|
1223
|
+
|
|
1224
|
+
Valid only for 2D data
|
|
1225
|
+
"""
|
|
1226
|
+
if self.dim == 'Data2D':
|
|
1227
|
+
self.data[:] = [data.T for data in self.data]
|
|
1228
|
+
for axis in self.axes:
|
|
1229
|
+
axis.index = 0 if axis.index == 1 else 1
|
|
1230
|
+
|
|
1231
|
+
def mean(self, axis: int = 0) -> DataWithAxes:
|
|
1232
|
+
"""Process the mean of the data on the specified axis and returns the new data
|
|
1233
|
+
|
|
1234
|
+
Parameters
|
|
1235
|
+
----------
|
|
1236
|
+
axis: int
|
|
1237
|
+
|
|
1238
|
+
Returns
|
|
1239
|
+
-------
|
|
1240
|
+
DataWithAxes
|
|
1241
|
+
"""
|
|
1242
|
+
dat_mean = []
|
|
1243
|
+
for dat in self.data:
|
|
1244
|
+
dat_mean.append(np.mean(dat, axis=axis))
|
|
1245
|
+
return self.deepcopy_with_new_data(dat_mean, remove_axes_index=axis)
|
|
1246
|
+
|
|
1247
|
+
def get_dim_from_data_axes(self) -> DataDim:
|
|
1248
|
+
"""Get the dimensionality DataDim from data taking into account nav indexes
|
|
1249
|
+
"""
|
|
1250
|
+
if len(self.axes) != len(self.shape):
|
|
1251
|
+
self._dim = self.get_dim_from_data(self.data)
|
|
1252
|
+
else:
|
|
1253
|
+
if len(self.nav_indexes) > 0:
|
|
1254
|
+
self._dim = DataDim['DataND']
|
|
1255
|
+
else:
|
|
1256
|
+
if len(self.axes) == 0:
|
|
1257
|
+
self._dim = DataDim['Data0D']
|
|
1258
|
+
elif len(self.axes) == 1:
|
|
1259
|
+
self._dim = DataDim['Data1D']
|
|
1260
|
+
elif len(self.axes) == 2:
|
|
1261
|
+
self._dim = DataDim['Data2D']
|
|
1262
|
+
return self._dim
|
|
1263
|
+
|
|
1264
|
+
@property
|
|
1265
|
+
def axes(self):
|
|
1266
|
+
"""convenience property to fetch attribute from axis_manager"""
|
|
1267
|
+
return self._am.axes
|
|
1268
|
+
|
|
1269
|
+
@axes.setter
|
|
1270
|
+
def axes(self, axes: List[Axis]):
|
|
1271
|
+
"""convenience property to set attribute from axis_manager"""
|
|
1272
|
+
self.set_axes_manager(self.shape, axes=axes, nav_indexes=self.nav_indexes)
|
|
1273
|
+
|
|
1274
|
+
@property
|
|
1275
|
+
def sig_indexes(self):
|
|
1276
|
+
"""convenience property to fetch attribute from axis_manager"""
|
|
1277
|
+
return self._am.sig_indexes
|
|
1278
|
+
|
|
1279
|
+
@property
|
|
1280
|
+
def nav_indexes(self):
|
|
1281
|
+
"""convenience property to fetch attribute from axis_manager"""
|
|
1282
|
+
return self._am.nav_indexes
|
|
1283
|
+
|
|
1284
|
+
@nav_indexes.setter
|
|
1285
|
+
def nav_indexes(self, indexes: List[int]):
|
|
1286
|
+
"""create new axis manager with new navigation indexes"""
|
|
1287
|
+
self.set_axes_manager(self.shape, axes=self.axes, nav_indexes=indexes)
|
|
1288
|
+
self.get_dim_from_data_axes()
|
|
1289
|
+
|
|
1290
|
+
def get_nav_axes(self) -> List[Axis]:
|
|
1291
|
+
return self._am.get_nav_axes()
|
|
1292
|
+
|
|
1293
|
+
def get_nav_axes_with_data(self) -> List[Axis]:
|
|
1294
|
+
"""Get the data's navigation axes making sure there is data in the data field"""
|
|
1295
|
+
axes = self.get_nav_axes()
|
|
1296
|
+
for axis in axes:
|
|
1297
|
+
if axis.data is None:
|
|
1298
|
+
axis.create_linear_data(self.shape[axis.index])
|
|
1299
|
+
return axes
|
|
1300
|
+
|
|
1301
|
+
def get_axis_indexes(self) -> List[int]:
|
|
1302
|
+
"""Get all present different axis indexes"""
|
|
1303
|
+
return sorted(list(set([axis.index for axis in self.axes])))
|
|
1304
|
+
|
|
1305
|
+
def get_axis_from_index(self, index, create=False):
|
|
1306
|
+
return self._am.get_axis_from_index(index, create)
|
|
1307
|
+
|
|
1308
|
+
def create_missing_axes(self):
|
|
1309
|
+
"""Check if given the data shape, some axes are missing to properly define the data (especially for plotting)"""
|
|
1310
|
+
axes = self.axes[:]
|
|
1311
|
+
for index in range(len(self.shape)):
|
|
1312
|
+
if len(self.get_axis_from_index(index)) != 0 and self.get_axis_from_index(index)[0] is None:
|
|
1313
|
+
axes.extend(self.get_axis_from_index(index, create=True))
|
|
1314
|
+
self.axes = axes
|
|
1315
|
+
|
|
1316
|
+
def _compute_slices(self, slices, is_navigation=True):
|
|
1317
|
+
"""Compute the total slice to apply to the data
|
|
1318
|
+
|
|
1319
|
+
Filling in Ellipsis when no slicing should be done
|
|
1320
|
+
"""
|
|
1321
|
+
if is_navigation:
|
|
1322
|
+
indexes = self._am.nav_indexes
|
|
1323
|
+
else:
|
|
1324
|
+
indexes = self._am.sig_indexes
|
|
1325
|
+
total_slices = []
|
|
1326
|
+
slices = list(slices)
|
|
1327
|
+
for ind in range(len(self.shape)):
|
|
1328
|
+
if ind in indexes:
|
|
1329
|
+
total_slices.append(slices.pop(0))
|
|
1330
|
+
elif len(total_slices) == 0 or total_slices[-1] != Ellipsis:
|
|
1331
|
+
total_slices.append(Ellipsis)
|
|
1332
|
+
total_slices = tuple(total_slices)
|
|
1333
|
+
return total_slices
|
|
1334
|
+
|
|
1335
|
+
def _slicer(self, slices, is_navigation=True):
|
|
1336
|
+
"""Apply a given slice to the data either navigation or signal dimension
|
|
1337
|
+
|
|
1338
|
+
Parameters
|
|
1339
|
+
----------
|
|
1340
|
+
slices: tuple of slice or int
|
|
1341
|
+
the slices to apply to the data
|
|
1342
|
+
is_navigation: bool
|
|
1343
|
+
if True apply the slices to the navigation dimension else to the signal ones
|
|
1344
|
+
|
|
1345
|
+
Returns
|
|
1346
|
+
-------
|
|
1347
|
+
DataWithAxes
|
|
1348
|
+
Object of the same type as the initial data, derived from DataWithAxes. But with lower data size due to the
|
|
1349
|
+
slicing and with eventually less axes.
|
|
1350
|
+
"""
|
|
1351
|
+
|
|
1352
|
+
if isinstance(slices, numbers.Number) or isinstance(slices, slice):
|
|
1353
|
+
slices = [slices]
|
|
1354
|
+
total_slices = self._compute_slices(slices, is_navigation)
|
|
1355
|
+
new_arrays_data = [np.atleast_1d(np.squeeze(dat[total_slices])) for dat in self.data]
|
|
1356
|
+
tmp_axes = self._am.get_signal_axes() if is_navigation else self._am.get_nav_axes()
|
|
1357
|
+
axes_to_append = [copy.deepcopy(axis) for axis in tmp_axes]
|
|
1358
|
+
|
|
1359
|
+
# axes_to_append are the axes to append to the new produced data (basically the ones to keep)
|
|
1360
|
+
|
|
1361
|
+
indexes_to_get = self.nav_indexes if is_navigation else self.sig_indexes
|
|
1362
|
+
# indexes_to_get are the indexes of the axes where the slice should be applied
|
|
1363
|
+
|
|
1364
|
+
_indexes = list(self.nav_indexes)
|
|
1365
|
+
_indexes.extend(self.sig_indexes)
|
|
1366
|
+
lower_indexes = dict(zip(_indexes, [0 for _ in range(len(_indexes))]))
|
|
1367
|
+
# lower_indexes will store for each *axis index* how much the index should be reduced because one axis has
|
|
1368
|
+
# been removed
|
|
1369
|
+
|
|
1370
|
+
axes = []
|
|
1371
|
+
nav_indexes = [] if is_navigation else list(self._am.nav_indexes)
|
|
1372
|
+
for ind_slice, _slice in enumerate(slices):
|
|
1373
|
+
ax = self._am.get_axis_from_index(indexes_to_get[ind_slice])
|
|
1374
|
+
if len(ax) != 0:
|
|
1375
|
+
for ind in range(len(ax)):
|
|
1376
|
+
ax[ind] = ax[ind].iaxis[_slice]
|
|
1377
|
+
|
|
1378
|
+
if not(ax[0] is None or ax[0].size <= 1): # means the slice kept part of the axis
|
|
1379
|
+
if is_navigation:
|
|
1380
|
+
nav_indexes.append(self._am.nav_indexes[ind_slice])
|
|
1381
|
+
axes.extend(ax)
|
|
1382
|
+
else:
|
|
1383
|
+
for axis in axes_to_append: # means we removed one of the axes (and data dim),
|
|
1384
|
+
# hence axis index above current index should be lowered by 1
|
|
1385
|
+
if axis.index > indexes_to_get[ind_slice]:
|
|
1386
|
+
lower_indexes[axis.index] += 1
|
|
1387
|
+
for index in indexes_to_get[ind_slice+1:]:
|
|
1388
|
+
lower_indexes[index] += 1
|
|
1389
|
+
|
|
1390
|
+
axes.extend(axes_to_append)
|
|
1391
|
+
for axis in axes:
|
|
1392
|
+
axis.index -= lower_indexes[axis.index]
|
|
1393
|
+
for ind in range(len(nav_indexes)):
|
|
1394
|
+
nav_indexes[ind] -= lower_indexes[nav_indexes[ind]]
|
|
1395
|
+
data = DataWithAxes(self.name, data=new_arrays_data, nav_indexes=tuple(nav_indexes), axes=axes,
|
|
1396
|
+
source='calculated', origin=self.origin,
|
|
1397
|
+
distribution=self.distribution if len(nav_indexes) != 0 else DataDistribution['uniform'])
|
|
1398
|
+
return data
|
|
1399
|
+
|
|
1400
|
+
def deepcopy_with_new_data(self, data: List[np.ndarray] = None,
|
|
1401
|
+
remove_axes_index: List[int] = None,
|
|
1402
|
+
source: DataSource = 'calculated') -> DataWithAxes:
|
|
1403
|
+
"""deepcopy without copying the initial data (saving memory)
|
|
1404
|
+
|
|
1405
|
+
The new data, may have some axes stripped as specified in remove_axes_index
|
|
1406
|
+
"""
|
|
1407
|
+
try:
|
|
1408
|
+
old_data = self.data
|
|
1409
|
+
self._data = None
|
|
1410
|
+
new_data = self.deepcopy()
|
|
1411
|
+
new_data._data = data
|
|
1412
|
+
new_data.get_dim_from_data(data)
|
|
1413
|
+
|
|
1414
|
+
if source is not None:
|
|
1415
|
+
source = enum_checker(DataSource, source)
|
|
1416
|
+
new_data._source = source
|
|
1417
|
+
|
|
1418
|
+
if not isinstance(remove_axes_index, Iterable):
|
|
1419
|
+
remove_axes_index = [remove_axes_index]
|
|
1420
|
+
|
|
1421
|
+
if remove_axes_index is not None:
|
|
1422
|
+
lower_indexes = dict(zip(new_data.get_axis_indexes(),
|
|
1423
|
+
[0 for _ in range(len(new_data.get_axis_indexes()))]))
|
|
1424
|
+
# lower_indexes will store for each *axis index* how much the index should be reduced because one axis has
|
|
1425
|
+
# been removed
|
|
1426
|
+
|
|
1427
|
+
nav_indexes = list(new_data.nav_indexes)
|
|
1428
|
+
sig_indexes = list(new_data.sig_indexes)
|
|
1429
|
+
for index in remove_axes_index:
|
|
1430
|
+
for axis in new_data.get_axis_from_index(index):
|
|
1431
|
+
new_data.axes.remove(axis)
|
|
1432
|
+
|
|
1433
|
+
if index in new_data.nav_indexes:
|
|
1434
|
+
nav_indexes.pop(nav_indexes.index(index))
|
|
1435
|
+
if index in new_data.sig_indexes:
|
|
1436
|
+
sig_indexes.pop(sig_indexes.index(index))
|
|
1437
|
+
|
|
1438
|
+
# for ind, nav_ind in enumerate(nav_indexes):
|
|
1439
|
+
# if nav_ind > index and nav_ind not in remove_axes_index:
|
|
1440
|
+
# nav_indexes[ind] -= 1
|
|
1441
|
+
|
|
1442
|
+
# for ind, sig_ind in enumerate(sig_indexes):
|
|
1443
|
+
# if sig_ind > index:
|
|
1444
|
+
# sig_indexes[ind] -= 1
|
|
1445
|
+
for axis in new_data.axes:
|
|
1446
|
+
if axis.index > index and axis.index not in remove_axes_index:
|
|
1447
|
+
lower_indexes[axis.index] += 1
|
|
1448
|
+
|
|
1449
|
+
for axis in new_data.axes:
|
|
1450
|
+
axis.index -= lower_indexes[axis.index]
|
|
1451
|
+
for ind in range(len(nav_indexes)):
|
|
1452
|
+
nav_indexes[ind] -= lower_indexes[nav_indexes[ind]]
|
|
1453
|
+
|
|
1454
|
+
new_data.nav_indexes = tuple(nav_indexes)
|
|
1455
|
+
# new_data._am.sig_indexes = tuple(sig_indexes)
|
|
1456
|
+
|
|
1457
|
+
new_data._shape = data[0].shape
|
|
1458
|
+
new_data._dim = self.get_dim_from_data(data)
|
|
1459
|
+
return new_data
|
|
1460
|
+
|
|
1461
|
+
except Exception as e:
|
|
1462
|
+
pass
|
|
1463
|
+
finally:
|
|
1464
|
+
self._data = old_data
|
|
1465
|
+
|
|
1466
|
+
def deepcopy(self):
|
|
1467
|
+
return copy.deepcopy(self)
|
|
1468
|
+
|
|
1469
|
+
@property
|
|
1470
|
+
def _am(self) -> AxesManagerBase:
|
|
1471
|
+
return self.axes_manager
|
|
1472
|
+
|
|
1473
|
+
def get_data_dimension(self) -> str:
|
|
1474
|
+
return str(self._am)
|
|
1475
|
+
|
|
1476
|
+
|
|
1477
|
+
|
|
1478
|
+
|
|
1479
|
+
class DataRaw(DataWithAxes):
|
|
1480
|
+
"""Specialized DataWithAxes set with source as 'raw'. To be used for raw data"""
|
|
1481
|
+
def __init__(self, *args, **kwargs):
|
|
1482
|
+
if 'source' in kwargs:
|
|
1483
|
+
kwargs.pop('source')
|
|
1484
|
+
super().__init__(*args, source=DataSource['raw'], **kwargs)
|
|
1485
|
+
|
|
1486
|
+
|
|
1487
|
+
class DataFromPlugins(DataRaw):
|
|
1488
|
+
"""Specialized DataWithAxes set with source as 'raw'. To be used for raw data generated by plugins"""
|
|
1489
|
+
def __init__(self, *args, **kwargs):
|
|
1490
|
+
super().__init__(*args, **kwargs)
|
|
1491
|
+
|
|
1492
|
+
|
|
1493
|
+
class DataCalculated(DataWithAxes):
|
|
1494
|
+
"""Specialized DataWithAxes set with source as 'calculated'. To be used for processed/calculated data"""
|
|
1495
|
+
def __init__(self, *args, axes=[], **kwargs):
|
|
1496
|
+
if 'source' in kwargs:
|
|
1497
|
+
kwargs.pop('source')
|
|
1498
|
+
super().__init__(*args, source=DataSource['calculated'], axes=axes, **kwargs)
|
|
1499
|
+
|
|
1500
|
+
|
|
1501
|
+
class DataFromRoi(DataCalculated):
|
|
1502
|
+
"""Specialized DataWithAxes set with source as 'calculated'.To be used for processed data from region of interest"""
|
|
1503
|
+
def __init__(self, *args, axes=[], **kwargs):
|
|
1504
|
+
super().__init__(*args, axes=axes, **kwargs)
|
|
1505
|
+
|
|
1506
|
+
|
|
1507
|
+
class DataToExport(DataLowLevel):
|
|
1508
|
+
"""Object to store all raw and calculated DataWithAxes data for later exporting, saving, sending signal...
|
|
1509
|
+
|
|
1510
|
+
Includes methods to retrieve data from dim, source...
|
|
1511
|
+
Stored data have a unique identifier their name. If some data is appended with an existing name, it will replace
|
|
1512
|
+
the existing data. So if you want to append data that has the same name
|
|
1513
|
+
|
|
1514
|
+
Parameters
|
|
1515
|
+
----------
|
|
1516
|
+
name: str
|
|
1517
|
+
The identifier of the exporting object
|
|
1518
|
+
data: list of DataWithAxes
|
|
1519
|
+
All the raw and calculated data to be exported
|
|
1520
|
+
|
|
1521
|
+
Attributes
|
|
1522
|
+
----------
|
|
1523
|
+
name
|
|
1524
|
+
timestamp
|
|
1525
|
+
data
|
|
1526
|
+
"""
|
|
1527
|
+
|
|
1528
|
+
def __init__(self, name: str, data: List[DataWithAxes] = [], **kwargs):
|
|
1529
|
+
"""
|
|
1530
|
+
|
|
1531
|
+
Parameters
|
|
1532
|
+
----------
|
|
1533
|
+
name
|
|
1534
|
+
data
|
|
1535
|
+
"""
|
|
1536
|
+
super().__init__(name)
|
|
1537
|
+
if not isinstance(data, list):
|
|
1538
|
+
raise TypeError('Data stored in a DataToExport object should be as a list of objects'
|
|
1539
|
+
' inherited from DataWithAxis')
|
|
1540
|
+
self._data = []
|
|
1541
|
+
|
|
1542
|
+
self.data = data
|
|
1543
|
+
for key in kwargs:
|
|
1544
|
+
setattr(self, key, kwargs[key])
|
|
1545
|
+
|
|
1546
|
+
def affect_name_to_origin_if_none(self):
|
|
1547
|
+
"""Affect self.name to all DataWithAxes children's attribute origin if this origin is not defined"""
|
|
1548
|
+
for dat in self.data:
|
|
1549
|
+
if dat.origin is None:
|
|
1550
|
+
dat.origin = self.name
|
|
1551
|
+
|
|
1552
|
+
def __sub__(self, other: object):
|
|
1553
|
+
if isinstance(other, DataToExport) and len(other) == len(self):
|
|
1554
|
+
new_data = copy.deepcopy(self)
|
|
1555
|
+
for ind_dfp in range(len(self)):
|
|
1556
|
+
new_data[ind_dfp] = self[ind_dfp] - other[ind_dfp]
|
|
1557
|
+
return new_data
|
|
1558
|
+
else:
|
|
1559
|
+
raise TypeError(f'Could not substract a {other.__class__.__name__} or a {self.__class__.__name__} '
|
|
1560
|
+
f'of a different length')
|
|
1561
|
+
|
|
1562
|
+
def __add__(self, other: object):
|
|
1563
|
+
if isinstance(other, DataToExport) and len(other) == len(self):
|
|
1564
|
+
new_data = copy.deepcopy(self)
|
|
1565
|
+
for ind_dfp in range(len(self)):
|
|
1566
|
+
new_data[ind_dfp] = self[ind_dfp] + other[ind_dfp]
|
|
1567
|
+
return new_data
|
|
1568
|
+
else:
|
|
1569
|
+
raise TypeError(f'Could not add a {other.__class__.__name__} or a {self.__class__.__name__} '
|
|
1570
|
+
f'of a different length')
|
|
1571
|
+
|
|
1572
|
+
def __mul__(self, other: object):
|
|
1573
|
+
if isinstance(other, numbers.Number):
|
|
1574
|
+
new_data = copy.deepcopy(self)
|
|
1575
|
+
for ind_dfp in range(len(self)):
|
|
1576
|
+
new_data[ind_dfp] = self[ind_dfp] * other
|
|
1577
|
+
return new_data
|
|
1578
|
+
else:
|
|
1579
|
+
raise TypeError(f'Could not multiply a {other.__class__.__name__} with a {self.__class__.__name__} '
|
|
1580
|
+
f'of a different length')
|
|
1581
|
+
|
|
1582
|
+
def __truediv__(self, other: object):
|
|
1583
|
+
if isinstance(other, numbers.Number):
|
|
1584
|
+
return self * (1 / other)
|
|
1585
|
+
else:
|
|
1586
|
+
raise TypeError(f'Could not divide a {other.__class__.__name__} with a {self.__class__.__name__} '
|
|
1587
|
+
f'of a different length')
|
|
1588
|
+
|
|
1589
|
+
def average(self, other: DataToExport, weight: int) -> DataToExport:
|
|
1590
|
+
""" Compute the weighted average between self and other DataToExport and attributes it to self
|
|
1591
|
+
|
|
1592
|
+
Parameters
|
|
1593
|
+
----------
|
|
1594
|
+
other: DataToExport
|
|
1595
|
+
weight: int
|
|
1596
|
+
The weight the 'other_data' holds with respect to self
|
|
1597
|
+
|
|
1598
|
+
"""
|
|
1599
|
+
if isinstance(other, DataToExport) and len(other) == len(self):
|
|
1600
|
+
new_data = copy.copy(self)
|
|
1601
|
+
for ind_dfp in range(len(self)):
|
|
1602
|
+
new_data[ind_dfp] = self[ind_dfp].average(other[ind_dfp], weight)
|
|
1603
|
+
return new_data
|
|
1604
|
+
else:
|
|
1605
|
+
raise TypeError(f'Could not average a {other.__class__.__name__} with a {self.__class__.__name__} '
|
|
1606
|
+
f'of a different length')
|
|
1607
|
+
|
|
1608
|
+
def __repr__(self):
|
|
1609
|
+
return f'{self.__class__.__name__}: {self.name} <len:{len(self)}>'
|
|
1610
|
+
|
|
1611
|
+
def __len__(self):
|
|
1612
|
+
return len(self.data)
|
|
1613
|
+
|
|
1614
|
+
def __iter__(self):
|
|
1615
|
+
self._iter_index = 0
|
|
1616
|
+
return self
|
|
1617
|
+
|
|
1618
|
+
def __next__(self) -> DataWithAxes:
|
|
1619
|
+
if self._iter_index < len(self):
|
|
1620
|
+
self._iter_index += 1
|
|
1621
|
+
return self.data[self._iter_index-1]
|
|
1622
|
+
else:
|
|
1623
|
+
raise StopIteration
|
|
1624
|
+
|
|
1625
|
+
def __getitem__(self, item) -> DataWithAxes:
|
|
1626
|
+
if isinstance(item, int) and 0 <= item < len(self):
|
|
1627
|
+
return self.data[item]
|
|
1628
|
+
else:
|
|
1629
|
+
raise IndexError(f'The index should be a positive integer lower than the data length')
|
|
1630
|
+
|
|
1631
|
+
def __setitem__(self, key, value: DataWithAxes):
|
|
1632
|
+
if isinstance(key, int) and 0 <= key < len(self) and isinstance(value, DataWithAxes):
|
|
1633
|
+
self.data[key] = value
|
|
1634
|
+
else:
|
|
1635
|
+
raise IndexError(f'The index should be a positive integer lower than the data length')
|
|
1636
|
+
|
|
1637
|
+
def get_names(self, dim: DataDim = None):
|
|
1638
|
+
"""Get the names of the stored DataWithAxes, eventually filtered by dim
|
|
1639
|
+
|
|
1640
|
+
Parameters
|
|
1641
|
+
----------
|
|
1642
|
+
dim: DataDim or str
|
|
1643
|
+
|
|
1644
|
+
Returns
|
|
1645
|
+
-------
|
|
1646
|
+
list of str: the names of the (filtered) DataWithAxes data
|
|
1647
|
+
"""
|
|
1648
|
+
if dim is None:
|
|
1649
|
+
return [data.name for data in self.data]
|
|
1650
|
+
else:
|
|
1651
|
+
return [data.name for data in self.get_data_from_dim(dim).data]
|
|
1652
|
+
|
|
1653
|
+
def get_full_names(self, dim: DataDim = None):
|
|
1654
|
+
"""Get the ful names including the origin attribute into the returned value, eventually filtered by dim
|
|
1655
|
+
|
|
1656
|
+
Parameters
|
|
1657
|
+
----------
|
|
1658
|
+
dim: DataDim or str
|
|
1659
|
+
|
|
1660
|
+
Returns
|
|
1661
|
+
-------
|
|
1662
|
+
list of str: the names of the (filtered) DataWithAxes data constructed as : origin/name
|
|
1663
|
+
|
|
1664
|
+
Examples
|
|
1665
|
+
--------
|
|
1666
|
+
d0 = DataWithAxes(name='datafromdet0', origin='det0')
|
|
1667
|
+
"""
|
|
1668
|
+
if dim is None:
|
|
1669
|
+
return [data.get_full_name() for data in self.data]
|
|
1670
|
+
else:
|
|
1671
|
+
return [data.get_full_name() for data in self.get_data_from_dim(dim).data]
|
|
1672
|
+
|
|
1673
|
+
def get_data_from_full_name(self, full_name: str, deepcopy=False) -> DataWithAxes:
|
|
1674
|
+
"""Get the DataWithAxes with matching full name"""
|
|
1675
|
+
if deepcopy:
|
|
1676
|
+
data = self.get_data_from_name_origin(full_name.split('/')[1], full_name.split('/')[0]).deepcopy()
|
|
1677
|
+
else:
|
|
1678
|
+
data = self.get_data_from_name_origin(full_name.split('/')[1], full_name.split('/')[0])
|
|
1679
|
+
return data
|
|
1680
|
+
|
|
1681
|
+
def get_data_from_full_names(self, full_names: List[str], deepcopy=False) -> DataToExport:
|
|
1682
|
+
data = [self.get_data_from_full_name(full_name, deepcopy) for full_name in full_names]
|
|
1683
|
+
return DataToExport(name=self.name, data=data)
|
|
1684
|
+
|
|
1685
|
+
def get_dim_presents(self) -> List[str]:
|
|
1686
|
+
dims = []
|
|
1687
|
+
for dim in DataDim.names():
|
|
1688
|
+
if len(self.get_data_from_dim(dim)) != 0:
|
|
1689
|
+
dims.append(dim)
|
|
1690
|
+
|
|
1691
|
+
return dims
|
|
1692
|
+
|
|
1693
|
+
def get_data_from_source(self, source: DataSource, deepcopy=False) -> DataToExport:
|
|
1694
|
+
"""Get the data matching the given DataSource
|
|
1695
|
+
|
|
1696
|
+
Returns
|
|
1697
|
+
-------
|
|
1698
|
+
DataToExport: filtered with data matching the dimensionality
|
|
1699
|
+
"""
|
|
1700
|
+
source = enum_checker(DataSource, source)
|
|
1701
|
+
selection = find_objects_in_list_from_attr_name_val(self.data, 'source', source, return_first=False)
|
|
1702
|
+
|
|
1703
|
+
selection.sort(key=lambda elt: elt[0].name)
|
|
1704
|
+
if deepcopy:
|
|
1705
|
+
data = [sel[0].deepcopy() for sel in selection]
|
|
1706
|
+
else:
|
|
1707
|
+
data = [sel[0] for sel in selection]
|
|
1708
|
+
return DataToExport(name=self.name, data=data)
|
|
1709
|
+
|
|
1710
|
+
def get_data_from_dim(self, dim: DataDim, deepcopy=False) -> DataToExport:
|
|
1711
|
+
"""Get the data matching the given DataDim
|
|
1712
|
+
|
|
1713
|
+
Returns
|
|
1714
|
+
-------
|
|
1715
|
+
DataToExport: filtered with data matching the dimensionality
|
|
1716
|
+
"""
|
|
1717
|
+
dim = enum_checker(DataDim, dim)
|
|
1718
|
+
selection = find_objects_in_list_from_attr_name_val(self.data, 'dim', dim, return_first=False)
|
|
1719
|
+
selection.sort(key=lambda elt: elt[0].name)
|
|
1720
|
+
if deepcopy:
|
|
1721
|
+
data = [sel[0].deepcopy() for sel in selection]
|
|
1722
|
+
else:
|
|
1723
|
+
data = [sel[0] for sel in selection]
|
|
1724
|
+
return DataToExport(name=self.name, data=data)
|
|
1725
|
+
|
|
1726
|
+
def get_data_from_dims(self, dims: List[DataDim], deepcopy=False) -> DataToExport:
|
|
1727
|
+
"""Get the data matching the given DataDim
|
|
1728
|
+
|
|
1729
|
+
Returns
|
|
1730
|
+
-------
|
|
1731
|
+
DataToExport: filtered with data matching the dimensionality
|
|
1732
|
+
"""
|
|
1733
|
+
data = DataToExport(name=self.name)
|
|
1734
|
+
for dim in dims:
|
|
1735
|
+
data.append(self.get_data_from_dim(dim, deepcopy=deepcopy))
|
|
1736
|
+
return data
|
|
1737
|
+
|
|
1738
|
+
def get_data_from_Naxes(self, Naxes: int, deepcopy: bool = False) -> DataToExport:
|
|
1739
|
+
"""Get the data matching the given number of axes
|
|
1740
|
+
|
|
1741
|
+
Returns
|
|
1742
|
+
-------
|
|
1743
|
+
DataToExport: filtered with data matching the number of axes
|
|
1744
|
+
"""
|
|
1745
|
+
data = DataToExport(name=self.name)
|
|
1746
|
+
for _data in self:
|
|
1747
|
+
if len(_data.shape) == Naxes:
|
|
1748
|
+
if deepcopy:
|
|
1749
|
+
data.append(_data.deepcopy())
|
|
1750
|
+
else:
|
|
1751
|
+
data.append(_data)
|
|
1752
|
+
return data
|
|
1753
|
+
|
|
1754
|
+
def get_data_from_name(self, name: str) -> List[DataWithAxes]:
|
|
1755
|
+
"""Get the data matching the given name"""
|
|
1756
|
+
data, _ = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
|
|
1757
|
+
return data
|
|
1758
|
+
|
|
1759
|
+
def get_data_from_name_origin(self, name: str, origin: str = None) -> DataWithAxes:
|
|
1760
|
+
"""Get the data matching the given name and the given origin"""
|
|
1761
|
+
if origin is None:
|
|
1762
|
+
data, _ = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
|
|
1763
|
+
else:
|
|
1764
|
+
selection = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=False)
|
|
1765
|
+
selection = [sel[0] for sel in selection]
|
|
1766
|
+
data, _ = find_objects_in_list_from_attr_name_val(selection, 'origin', origin)
|
|
1767
|
+
return data
|
|
1768
|
+
|
|
1769
|
+
def index(self, data: DataWithAxes):
|
|
1770
|
+
return self.data.index(data)
|
|
1771
|
+
|
|
1772
|
+
def index_from_name_origin(self, name: str, origin: str = None) -> List[DataWithAxes]:
|
|
1773
|
+
"""Get the index of a given DataWithAxes within the list of data"""
|
|
1774
|
+
"""Get the data matching the given name and the given origin"""
|
|
1775
|
+
if origin is None:
|
|
1776
|
+
_, index = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
|
|
1777
|
+
else:
|
|
1778
|
+
selection = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=False)
|
|
1779
|
+
data_selection = [sel[0] for sel in selection]
|
|
1780
|
+
index_selection = [sel[1] for sel in selection]
|
|
1781
|
+
_, index = find_objects_in_list_from_attr_name_val(data_selection, 'origin', origin)
|
|
1782
|
+
index = index_selection[index]
|
|
1783
|
+
return index
|
|
1784
|
+
|
|
1785
|
+
def pop(self, index: int) -> DataWithAxes:
|
|
1786
|
+
"""return and remove the DataWithAxes referred by its index
|
|
1787
|
+
|
|
1788
|
+
Parameters
|
|
1789
|
+
----------
|
|
1790
|
+
index: int
|
|
1791
|
+
index as returned by self.index_from_name_origin
|
|
1792
|
+
|
|
1793
|
+
See Also
|
|
1794
|
+
--------
|
|
1795
|
+
index_from_name_origin
|
|
1796
|
+
"""
|
|
1797
|
+
return self.data.pop(index)
|
|
1798
|
+
|
|
1799
|
+
def remove(self, dwa: DataWithAxes):
|
|
1800
|
+
self.pop(self.data.index(dwa))
|
|
1801
|
+
|
|
1802
|
+
@property
|
|
1803
|
+
def data(self) -> List[DataWithAxes]:
|
|
1804
|
+
"""List[DataWithAxes]: get the data contained in the object"""
|
|
1805
|
+
return self._data
|
|
1806
|
+
|
|
1807
|
+
@data.setter
|
|
1808
|
+
def data(self, new_data: List[DataWithAxes]):
|
|
1809
|
+
for dat in new_data:
|
|
1810
|
+
self._check_data_type(dat)
|
|
1811
|
+
self._data[:] = [dat for dat in new_data] # shallow copyto make sure that if the original list
|
|
1812
|
+
# is changed, the change will not be applied in here
|
|
1813
|
+
|
|
1814
|
+
self.affect_name_to_origin_if_none()
|
|
1815
|
+
|
|
1816
|
+
@staticmethod
|
|
1817
|
+
def _check_data_type(data: DataWithAxes):
|
|
1818
|
+
"""Make sure data is a DataWithAxes object or inherited"""
|
|
1819
|
+
if not isinstance(data, DataWithAxes):
|
|
1820
|
+
raise TypeError('Data stored in a DataToExport object should be objects inherited from DataWithAxis')
|
|
1821
|
+
|
|
1822
|
+
@dispatch(list)
|
|
1823
|
+
def append(self, data: List[DataWithAxes]):
|
|
1824
|
+
for dat in data:
|
|
1825
|
+
self.append(dat)
|
|
1826
|
+
|
|
1827
|
+
@dispatch(DataWithAxes)
|
|
1828
|
+
def append(self, data: DataWithAxes):
|
|
1829
|
+
"""Append/replace DataWithAxes object to the data attribute
|
|
1830
|
+
|
|
1831
|
+
Make sure only one DataWithAxes object with a given name is in the list except if they don't have the same
|
|
1832
|
+
origin identifier
|
|
1833
|
+
"""
|
|
1834
|
+
data = copy.deepcopy(data)
|
|
1835
|
+
self._check_data_type(data)
|
|
1836
|
+
obj = self.get_data_from_name_origin(data.name, data.origin)
|
|
1837
|
+
if obj is not None:
|
|
1838
|
+
self._data.pop(self.data.index(obj))
|
|
1839
|
+
self._data.append(data)
|
|
1840
|
+
|
|
1841
|
+
@dispatch(object)
|
|
1842
|
+
def append(self, data: DataToExport):
|
|
1843
|
+
if isinstance(data, DataToExport):
|
|
1844
|
+
for dat in data:
|
|
1845
|
+
self.append(dat)
|
|
1846
|
+
|
|
1847
|
+
|
|
1848
|
+
class DataScan(DataToExport):
|
|
1849
|
+
"""Specialized DataToExport.To be used for data to be saved """
|
|
1850
|
+
def __init__(self, name: str, data: List[DataWithAxes] = [], **kwargs):
|
|
1851
|
+
super().__init__(name, data, **kwargs)
|
|
1852
|
+
|
|
1853
|
+
|
|
1854
|
+
if __name__ == '__main__':
|
|
1855
|
+
|
|
1856
|
+
|
|
1857
|
+
d1 = DataFromRoi(name=f'Hlineout_', data=[np.zeros((24,))],
|
|
1858
|
+
x_axis=Axis(data=np.zeros((24,)), units='myunits', label='mylabel1'))
|
|
1859
|
+
d2 = DataFromRoi(name=f'Hlineout_', data=[np.zeros((12,))],
|
|
1860
|
+
x_axis=Axis(data=np.zeros((12,)),
|
|
1861
|
+
units='myunits2',
|
|
1862
|
+
label='mylabel2'))
|
|
1863
|
+
|
|
1864
|
+
Nsig = 200
|
|
1865
|
+
Nnav = 10
|
|
1866
|
+
x = np.linspace(-Nsig/2, Nsig/2-1, Nsig)
|
|
1867
|
+
|
|
1868
|
+
dat = np.zeros((Nnav, Nsig))
|
|
1869
|
+
for ind in range(Nnav):
|
|
1870
|
+
dat[ind] = mutils.gauss1D(x, 50 * (ind -Nnav / 2), 25 / np.sqrt(2))
|
|
1871
|
+
|
|
1872
|
+
data = DataRaw('mydata', data=[dat], nav_indexes=(0,),
|
|
1873
|
+
axes=[Axis('nav', data=np.linspace(0, Nnav-1, Nnav), index=0),
|
|
1874
|
+
Axis('sig', data=x, index=1)])
|
|
1875
|
+
|
|
1876
|
+
data2 = copy.copy(data)
|
|
1877
|
+
|
|
1878
|
+
data3 = data._deepcopy_with_new_data([np.sum(dat, 1)], remove_axes_index=(1,))
|
|
1879
|
+
|
|
1880
|
+
print('done')
|
|
10
1881
|
|
|
11
|
-
def __getattr__(name):
|
|
12
|
-
if hasattr(daq_utils, name):
|
|
13
|
-
return getattr(daq_utils, name)
|
|
14
|
-
else:
|
|
15
|
-
raise AttributeError
|