pymodaq 4.4.7__py3-none-any.whl → 5.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pymodaq might be problematic. Click here for more details.

Files changed (415) hide show
  1. pymodaq/__init__.py +57 -91
  2. pymodaq/control_modules/daq_move.py +64 -46
  3. pymodaq/control_modules/daq_move_ui.py +34 -12
  4. pymodaq/control_modules/daq_viewer.py +55 -30
  5. pymodaq/control_modules/daq_viewer_ui.py +6 -6
  6. pymodaq/control_modules/mocks.py +1 -1
  7. pymodaq/control_modules/move_utility_classes.py +51 -43
  8. pymodaq/control_modules/utils.py +43 -20
  9. pymodaq/control_modules/viewer_utility_classes.py +54 -18
  10. pymodaq/daq_utils/daq_utils.py +6 -0
  11. pymodaq/dashboard.py +639 -323
  12. pymodaq/examples/function_plotter.py +13 -12
  13. pymodaq/examples/tcp_client.py +1 -1
  14. pymodaq/extensions/__init__.py +1 -1
  15. pymodaq/extensions/bayesian/bayesian_optimisation.py +44 -32
  16. pymodaq/extensions/bayesian/utils.py +10 -10
  17. pymodaq/extensions/console.py +7 -6
  18. pymodaq/extensions/daq_logger/__init__.py +1 -0
  19. pymodaq/extensions/{daq_logger.py → daq_logger/daq_logger.py} +30 -30
  20. pymodaq/{utils/db/db_logger → extensions/daq_logger/db}/db_logger.py +16 -15
  21. pymodaq/{utils/db/db_logger → extensions/daq_logger/db}/db_logger_models.py +2 -0
  22. pymodaq/{utils/h5modules → extensions/daq_logger}/h5logging.py +7 -8
  23. pymodaq/extensions/daq_scan.py +42 -34
  24. pymodaq/extensions/daq_scan_ui.py +18 -18
  25. pymodaq/extensions/h5browser.py +2 -3
  26. pymodaq/extensions/pid/__init__.py +4 -2
  27. pymodaq/extensions/pid/daq_move_PID.py +3 -3
  28. pymodaq/extensions/pid/pid_controller.py +59 -50
  29. pymodaq/extensions/pid/utils.py +10 -5
  30. pymodaq/extensions/utils.py +33 -3
  31. pymodaq/post_treatment/load_and_plot.py +10 -7
  32. pymodaq/resources/preset_default.xml +1 -1
  33. pymodaq/updater.py +107 -0
  34. pymodaq/utils/array_manipulation.py +4 -384
  35. pymodaq/utils/calibration_camera.py +12 -9
  36. pymodaq/utils/chrono_timer.py +11 -10
  37. pymodaq/utils/config.py +3 -458
  38. pymodaq/utils/daq_utils.py +9 -715
  39. pymodaq/utils/data.py +17 -2959
  40. pymodaq/utils/enums.py +4 -74
  41. pymodaq/utils/exceptions.py +0 -4
  42. pymodaq/utils/gui_utils/__init__.py +8 -8
  43. pymodaq/utils/gui_utils/loader_utils.py +26 -1
  44. pymodaq/utils/gui_utils/utils.py +8 -162
  45. pymodaq/utils/gui_utils/widgets/lcd.py +6 -109
  46. pymodaq/utils/h5modules/__init__.py +0 -4
  47. pymodaq/utils/h5modules/module_saving.py +9 -8
  48. pymodaq/utils/leco/__init__.py +2 -2
  49. pymodaq/utils/leco/daq_move_LECODirector.py +3 -6
  50. pymodaq/utils/leco/daq_xDviewer_LECODirector.py +5 -5
  51. pymodaq/utils/leco/director_utils.py +2 -2
  52. pymodaq/utils/leco/leco_director.py +3 -3
  53. pymodaq/utils/leco/pymodaq_listener.py +4 -3
  54. pymodaq/utils/leco/utils.py +11 -9
  55. pymodaq/utils/logger.py +4 -76
  56. pymodaq/utils/managers/batchscan_manager.py +16 -19
  57. pymodaq/utils/managers/modules_manager.py +30 -17
  58. pymodaq/utils/managers/overshoot_manager.py +48 -6
  59. pymodaq/utils/managers/preset_manager.py +39 -59
  60. pymodaq/utils/managers/preset_manager_utils.py +28 -22
  61. pymodaq/utils/managers/remote_manager.py +12 -10
  62. pymodaq/utils/math_utils.py +4 -582
  63. pymodaq/utils/messenger.py +4 -64
  64. pymodaq/utils/parameter/__init__.py +6 -9
  65. pymodaq/utils/parameter/utils.py +4 -328
  66. pymodaq/utils/scanner/scan_config.py +1 -1
  67. pymodaq/utils/scanner/scan_factory.py +16 -12
  68. pymodaq/utils/{plotting → scanner}/scan_selector.py +19 -20
  69. pymodaq/utils/scanner/scanner.py +10 -8
  70. pymodaq/utils/scanner/scanners/_1d_scanners.py +8 -5
  71. pymodaq/utils/scanner/scanners/_2d_scanners.py +5 -5
  72. pymodaq/utils/scanner/scanners/sequential.py +8 -8
  73. pymodaq/utils/scanner/scanners/tabular.py +9 -9
  74. pymodaq/utils/scanner/utils.py +6 -4
  75. pymodaq/utils/svg/svg_viewer2D.py +3 -4
  76. pymodaq/utils/tcp_ip/mysocket.py +4 -110
  77. pymodaq/utils/tcp_ip/serializer.py +4 -801
  78. pymodaq/utils/tcp_ip/tcp_server_client.py +15 -13
  79. pymodaq-5.0.1.dist-info/METADATA +242 -0
  80. pymodaq-5.0.1.dist-info/RECORD +122 -0
  81. {pymodaq-4.4.7.dist-info → pymodaq-5.0.1.dist-info}/WHEEL +1 -1
  82. {pymodaq-4.4.7.dist-info → pymodaq-5.0.1.dist-info}/entry_points.txt +1 -0
  83. pymodaq/examples/custom_app.py +0 -258
  84. pymodaq/examples/custom_viewer.py +0 -112
  85. pymodaq/examples/parameter_ex.py +0 -138
  86. pymodaq/examples/preset_MockCamera.xml +0 -1
  87. pymodaq/post_treatment/daq_measurement/daq_measurement_GUI.py +0 -142
  88. pymodaq/post_treatment/daq_measurement/daq_measurement_GUI.ui +0 -232
  89. pymodaq/post_treatment/daq_measurement/daq_measurement_main.py +0 -391
  90. pymodaq/post_treatment/daq_measurement/process_from_QtDesigner_DAQ_Measurement_GUI.bat +0 -2
  91. pymodaq/post_treatment/process_to_scalar.py +0 -263
  92. pymodaq/resources/QtDesigner_Ressources/Icon_Library/1d.png +0 -0
  93. pymodaq/resources/QtDesigner_Ressources/Icon_Library/2d.png +0 -0
  94. pymodaq/resources/QtDesigner_Ressources/Icon_Library/3d.png +0 -0
  95. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Add2.png +0 -0
  96. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Add_Step.png +0 -0
  97. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Approve.png +0 -0
  98. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Approve_All.png +0 -0
  99. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Browse_Dir_Path.png +0 -0
  100. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Calculator.png +0 -0
  101. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnGroup.png +0 -0
  102. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnNum.png +0 -0
  103. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnText.png +0 -0
  104. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnTime.png +0 -0
  105. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnWave.png +0 -0
  106. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Close3.png +0 -0
  107. pymodaq/resources/QtDesigner_Ressources/Icon_Library/CollapseAll.png +0 -0
  108. pymodaq/resources/QtDesigner_Ressources/Icon_Library/CollapseAll_32.png +0 -0
  109. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ColorPicker.png +0 -0
  110. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Contract.png +0 -0
  111. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Create.png +0 -0
  112. pymodaq/resources/QtDesigner_Ressources/Icon_Library/DeleteLayer.png +0 -0
  113. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EditOpen.png +0 -0
  114. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EditRedo.png +0 -0
  115. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EditUndo.png +0 -0
  116. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Ellipse.png +0 -0
  117. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EllipseFilled.png +0 -0
  118. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Error.png +0 -0
  119. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ErrorMessage.png +0 -0
  120. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Error_16.png +0 -0
  121. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Exit.png +0 -0
  122. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Expand.png +0 -0
  123. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ExpandAll.png +0 -0
  124. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ExpandAll_32.png +0 -0
  125. pymodaq/resources/QtDesigner_Ressources/Icon_Library/FFT.png +0 -0
  126. pymodaq/resources/QtDesigner_Ressources/Icon_Library/HLM.ico +0 -0
  127. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Help.png +0 -0
  128. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Help_32.png +0 -0
  129. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Histogram.png +0 -0
  130. pymodaq/resources/QtDesigner_Ressources/Icon_Library/LUT_LookUpTable.png +0 -0
  131. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MagnifyingGlass.png +0 -0
  132. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MagnifyingGlass_24.png +0 -0
  133. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Marker.png +0 -0
  134. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Math.png +0 -0
  135. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MeasurementStudio_32.png +0 -0
  136. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Move.png +0 -0
  137. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MoveDown.png +0 -0
  138. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MoveUp.png +0 -0
  139. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Multiply.png +0 -0
  140. pymodaq/resources/QtDesigner_Ressources/Icon_Library/NewFile.png +0 -0
  141. pymodaq/resources/QtDesigner_Ressources/Icon_Library/NewLayer.png +0 -0
  142. pymodaq/resources/QtDesigner_Ressources/Icon_Library/New_File.png +0 -0
  143. pymodaq/resources/QtDesigner_Ressources/Icon_Library/New_Folder.png +0 -0
  144. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open.png +0 -0
  145. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_1D.png +0 -0
  146. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_2D.png +0 -0
  147. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_File.png +0 -0
  148. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_File_32.png +0 -0
  149. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_sim.png +0 -0
  150. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Options.png +0 -0
  151. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Oscilloscope.png +0 -0
  152. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Oscilloscope_16.png +0 -0
  153. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Pass.png +0 -0
  154. pymodaq/resources/QtDesigner_Ressources/Icon_Library/RGB.png +0 -0
  155. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Rectangle.png +0 -0
  156. pymodaq/resources/QtDesigner_Ressources/Icon_Library/RectangleFilled.png +0 -0
  157. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Redo.png +0 -0
  158. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Refresh.png +0 -0
  159. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Refresh2.png +0 -0
  160. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Refresh_32.png +0 -0
  161. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Region.png +0 -0
  162. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Rendezvous.png +0 -0
  163. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SELECT.png +0 -0
  164. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Save.png +0 -0
  165. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAll.png +0 -0
  166. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAll_32.png +0 -0
  167. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAs.png +0 -0
  168. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAs_32.png +0 -0
  169. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Save_24.png +0 -0
  170. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Save_32.png +0 -0
  171. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Search.png +0 -0
  172. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SelectPolygon.png +0 -0
  173. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Select_24.png +0 -0
  174. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Settings.png +0 -0
  175. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snap&Save.png +0 -0
  176. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot.png +0 -0
  177. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot2.png +0 -0
  178. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot2_16.png +0 -0
  179. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot2_32.png +0 -0
  180. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Spreadsheet.png +0 -0
  181. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Statistics.png +0 -0
  182. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Statistics2.png +0 -0
  183. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Status.png +0 -0
  184. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Subtract.png +0 -0
  185. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Vision.png +0 -0
  186. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Volts.png +0 -0
  187. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Wait2.png +0 -0
  188. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_1_1.png +0 -0
  189. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_in.png +0 -0
  190. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_out.png +0 -0
  191. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_to_Selection.png +0 -0
  192. pymodaq/resources/QtDesigner_Ressources/Icon_Library/abort.png +0 -0
  193. pymodaq/resources/QtDesigner_Ressources/Icon_Library/advanced2.png +0 -0
  194. pymodaq/resources/QtDesigner_Ressources/Icon_Library/algo.png +0 -0
  195. pymodaq/resources/QtDesigner_Ressources/Icon_Library/autoscale.png +0 -0
  196. pymodaq/resources/QtDesigner_Ressources/Icon_Library/b_icon.png +0 -0
  197. pymodaq/resources/QtDesigner_Ressources/Icon_Library/back.png +0 -0
  198. pymodaq/resources/QtDesigner_Ressources/Icon_Library/bg_icon.png +0 -0
  199. pymodaq/resources/QtDesigner_Ressources/Icon_Library/camera.png +0 -0
  200. pymodaq/resources/QtDesigner_Ressources/Icon_Library/camera_snap.png +0 -0
  201. pymodaq/resources/QtDesigner_Ressources/Icon_Library/cartesian.png +0 -0
  202. pymodaq/resources/QtDesigner_Ressources/Icon_Library/clear2.png +0 -0
  203. pymodaq/resources/QtDesigner_Ressources/Icon_Library/clear_ROI.png +0 -0
  204. pymodaq/resources/QtDesigner_Ressources/Icon_Library/close2.png +0 -0
  205. pymodaq/resources/QtDesigner_Ressources/Icon_Library/cluster2.png +0 -0
  206. pymodaq/resources/QtDesigner_Ressources/Icon_Library/color.png +0 -0
  207. pymodaq/resources/QtDesigner_Ressources/Icon_Library/color2.png +0 -0
  208. pymodaq/resources/QtDesigner_Ressources/Icon_Library/continuous.png +0 -0
  209. pymodaq/resources/QtDesigner_Ressources/Icon_Library/data.png +0 -0
  210. pymodaq/resources/QtDesigner_Ressources/Icon_Library/delay.png +0 -0
  211. pymodaq/resources/QtDesigner_Ressources/Icon_Library/download.png +0 -0
  212. pymodaq/resources/QtDesigner_Ressources/Icon_Library/download2.png +0 -0
  213. pymodaq/resources/QtDesigner_Ressources/Icon_Library/error2.png +0 -0
  214. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ethernet.png +0 -0
  215. pymodaq/resources/QtDesigner_Ressources/Icon_Library/exit2.png +0 -0
  216. pymodaq/resources/QtDesigner_Ressources/Icon_Library/fan.png +0 -0
  217. pymodaq/resources/QtDesigner_Ressources/Icon_Library/filter2.png +0 -0
  218. pymodaq/resources/QtDesigner_Ressources/Icon_Library/g_icon.png +0 -0
  219. pymodaq/resources/QtDesigner_Ressources/Icon_Library/gear2.png +0 -0
  220. pymodaq/resources/QtDesigner_Ressources/Icon_Library/go_to.png +0 -0
  221. pymodaq/resources/QtDesigner_Ressources/Icon_Library/go_to_1.png +0 -0
  222. pymodaq/resources/QtDesigner_Ressources/Icon_Library/go_to_2.png +0 -0
  223. pymodaq/resources/QtDesigner_Ressources/Icon_Library/grab.png +0 -0
  224. pymodaq/resources/QtDesigner_Ressources/Icon_Library/graph.png +0 -0
  225. pymodaq/resources/QtDesigner_Ressources/Icon_Library/greenLight2.png +0 -0
  226. pymodaq/resources/QtDesigner_Ressources/Icon_Library/greenLight2_32.png +0 -0
  227. pymodaq/resources/QtDesigner_Ressources/Icon_Library/green_light.png +0 -0
  228. pymodaq/resources/QtDesigner_Ressources/Icon_Library/grey_icon.png +0 -0
  229. pymodaq/resources/QtDesigner_Ressources/Icon_Library/greyscale.png +0 -0
  230. pymodaq/resources/QtDesigner_Ressources/Icon_Library/help1.png +0 -0
  231. pymodaq/resources/QtDesigner_Ressources/Icon_Library/help1_32.png +0 -0
  232. pymodaq/resources/QtDesigner_Ressources/Icon_Library/home2.png +0 -0
  233. pymodaq/resources/QtDesigner_Ressources/Icon_Library/information2.png +0 -0
  234. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ini.png +0 -0
  235. pymodaq/resources/QtDesigner_Ressources/Icon_Library/input.png +0 -0
  236. pymodaq/resources/QtDesigner_Ressources/Icon_Library/integrator.png +0 -0
  237. pymodaq/resources/QtDesigner_Ressources/Icon_Library/joystick.png +0 -0
  238. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_green.png +0 -0
  239. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_green_16.png +0 -0
  240. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_red.png +0 -0
  241. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_red_16.png +0 -0
  242. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_yellow.png +0 -0
  243. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_yellow_16.png +0 -0
  244. pymodaq/resources/QtDesigner_Ressources/Icon_Library/limiter.png +0 -0
  245. pymodaq/resources/QtDesigner_Ressources/Icon_Library/load_ROI.png +0 -0
  246. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meshPlot.png +0 -0
  247. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meter.png +0 -0
  248. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meter2.png +0 -0
  249. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meter_32.png +0 -0
  250. pymodaq/resources/QtDesigner_Ressources/Icon_Library/move_contour.png +0 -0
  251. pymodaq/resources/QtDesigner_Ressources/Icon_Library/move_straight_line.png +0 -0
  252. pymodaq/resources/QtDesigner_Ressources/Icon_Library/movie.png +0 -0
  253. pymodaq/resources/QtDesigner_Ressources/Icon_Library/multi_point.png +0 -0
  254. pymodaq/resources/QtDesigner_Ressources/Icon_Library/multiplexer.png +0 -0
  255. pymodaq/resources/QtDesigner_Ressources/Icon_Library/new.png +0 -0
  256. pymodaq/resources/QtDesigner_Ressources/Icon_Library/openArrow.png +0 -0
  257. pymodaq/resources/QtDesigner_Ressources/Icon_Library/openTree.png +0 -0
  258. pymodaq/resources/QtDesigner_Ressources/Icon_Library/oscilloscope2.png +0 -0
  259. pymodaq/resources/QtDesigner_Ressources/Icon_Library/oscilloscope3.png +0 -0
  260. pymodaq/resources/QtDesigner_Ressources/Icon_Library/overlay.png +0 -0
  261. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pass2.png +0 -0
  262. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pass2_16.png +0 -0
  263. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pass_32.png +0 -0
  264. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pause.png +0 -0
  265. pymodaq/resources/QtDesigner_Ressources/Icon_Library/permute.png +0 -0
  266. pymodaq/resources/QtDesigner_Ressources/Icon_Library/phase.png +0 -0
  267. pymodaq/resources/QtDesigner_Ressources/Icon_Library/play.png +0 -0
  268. pymodaq/resources/QtDesigner_Ressources/Icon_Library/polar.png +0 -0
  269. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pole_zero.png +0 -0
  270. pymodaq/resources/QtDesigner_Ressources/Icon_Library/powerMeter.png +0 -0
  271. pymodaq/resources/QtDesigner_Ressources/Icon_Library/powerSwitch.png +0 -0
  272. pymodaq/resources/QtDesigner_Ressources/Icon_Library/powerSwitch_16.png +0 -0
  273. pymodaq/resources/QtDesigner_Ressources/Icon_Library/print2.png +0 -0
  274. pymodaq/resources/QtDesigner_Ressources/Icon_Library/print2_32.png +0 -0
  275. pymodaq/resources/QtDesigner_Ressources/Icon_Library/properties.png +0 -0
  276. pymodaq/resources/QtDesigner_Ressources/Icon_Library/r_icon.png +0 -0
  277. pymodaq/resources/QtDesigner_Ressources/Icon_Library/radiocontrolbutton.png +0 -0
  278. pymodaq/resources/QtDesigner_Ressources/Icon_Library/random.png +0 -0
  279. pymodaq/resources/QtDesigner_Ressources/Icon_Library/read2.png +0 -0
  280. pymodaq/resources/QtDesigner_Ressources/Icon_Library/red_light.png +0 -0
  281. pymodaq/resources/QtDesigner_Ressources/Icon_Library/remove.png +0 -0
  282. pymodaq/resources/QtDesigner_Ressources/Icon_Library/reset.png +0 -0
  283. pymodaq/resources/QtDesigner_Ressources/Icon_Library/rgb_icon.png +0 -0
  284. pymodaq/resources/QtDesigner_Ressources/Icon_Library/robot.png +0 -0
  285. pymodaq/resources/QtDesigner_Ressources/Icon_Library/rotation2.png +0 -0
  286. pymodaq/resources/QtDesigner_Ressources/Icon_Library/run2.png +0 -0
  287. pymodaq/resources/QtDesigner_Ressources/Icon_Library/run_all.png +0 -0
  288. pymodaq/resources/QtDesigner_Ressources/Icon_Library/saturation.png +0 -0
  289. pymodaq/resources/QtDesigner_Ressources/Icon_Library/saveTree.png +0 -0
  290. pymodaq/resources/QtDesigner_Ressources/Icon_Library/save_ROI.png +0 -0
  291. pymodaq/resources/QtDesigner_Ressources/Icon_Library/scale_horizontally.png +0 -0
  292. pymodaq/resources/QtDesigner_Ressources/Icon_Library/scale_vertically.png +0 -0
  293. pymodaq/resources/QtDesigner_Ressources/Icon_Library/search2.png +0 -0
  294. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select2.png +0 -0
  295. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select_all.png +0 -0
  296. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select_all2.png +0 -0
  297. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select_none.png +0 -0
  298. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sequence.png +0 -0
  299. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sequence2.png +0 -0
  300. pymodaq/resources/QtDesigner_Ressources/Icon_Library/snap.png +0 -0
  301. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sort_ascend.png +0 -0
  302. pymodaq/resources/QtDesigner_Ressources/Icon_Library/spectrumAnalyzer.png +0 -0
  303. pymodaq/resources/QtDesigner_Ressources/Icon_Library/start.png +0 -0
  304. pymodaq/resources/QtDesigner_Ressources/Icon_Library/status_cancelled.png +0 -0
  305. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop.png +0 -0
  306. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop2.png +0 -0
  307. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop3.png +0 -0
  308. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop_all.png +0 -0
  309. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sum.png +0 -0
  310. pymodaq/resources/QtDesigner_Ressources/Icon_Library/surfacePlot.png +0 -0
  311. pymodaq/resources/QtDesigner_Ressources/Icon_Library/target.png +0 -0
  312. pymodaq/resources/QtDesigner_Ressources/Icon_Library/tree.png +0 -0
  313. pymodaq/resources/QtDesigner_Ressources/Icon_Library/updateTree.png +0 -0
  314. pymodaq/resources/QtDesigner_Ressources/Icon_Library/utility2.png +0 -0
  315. pymodaq/resources/QtDesigner_Ressources/Icon_Library/utility_small.png +0 -0
  316. pymodaq/resources/QtDesigner_Ressources/Icon_Library/vector.png +0 -0
  317. pymodaq/resources/QtDesigner_Ressources/Icon_Library/verify.png +0 -0
  318. pymodaq/resources/QtDesigner_Ressources/Icon_Library/video.png +0 -0
  319. pymodaq/resources/QtDesigner_Ressources/Icon_Library/wait.png +0 -0
  320. pymodaq/resources/QtDesigner_Ressources/Icon_Library/waterfallPlot.png +0 -0
  321. pymodaq/resources/QtDesigner_Ressources/Icon_Library/watershed.png +0 -0
  322. pymodaq/resources/QtDesigner_Ressources/Icon_Library/yellow_light.png +0 -0
  323. pymodaq/resources/QtDesigner_Ressources/Icon_Library/zip_file.png +0 -0
  324. pymodaq/resources/QtDesigner_Ressources/Icon_Library/zoomAuto.png +0 -0
  325. pymodaq/resources/QtDesigner_Ressources/Icon_Library/zoomReset.png +0 -0
  326. pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.bat +0 -2
  327. pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.qrc +0 -238
  328. pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources_rc.py +0 -127453
  329. pymodaq/resources/QtDesigner_Ressources/__init__.py +0 -0
  330. pymodaq/resources/QtDesigner_Ressources/credit.rst +0 -7
  331. pymodaq/resources/QtDesigner_Ressources/icons.svg +0 -142
  332. pymodaq/resources/VERSION +0 -2
  333. pymodaq/resources/config_template.toml +0 -96
  334. pymodaq/resources/triangulation_data.npy +0 -0
  335. pymodaq/utils/abstract/__init__.py +0 -48
  336. pymodaq/utils/db/__init__.py +0 -0
  337. pymodaq/utils/db/db_logger/__init__.py +0 -0
  338. pymodaq/utils/factory.py +0 -82
  339. pymodaq/utils/gui_utils/custom_app.py +0 -133
  340. pymodaq/utils/gui_utils/dock.py +0 -107
  341. pymodaq/utils/gui_utils/file_io.py +0 -94
  342. pymodaq/utils/gui_utils/layout.py +0 -34
  343. pymodaq/utils/gui_utils/list_picker.py +0 -38
  344. pymodaq/utils/gui_utils/widgets/__init__.py +0 -5
  345. pymodaq/utils/gui_utils/widgets/label.py +0 -24
  346. pymodaq/utils/gui_utils/widgets/push.py +0 -149
  347. pymodaq/utils/gui_utils/widgets/qled.py +0 -62
  348. pymodaq/utils/gui_utils/widgets/spinbox.py +0 -24
  349. pymodaq/utils/gui_utils/widgets/table.py +0 -263
  350. pymodaq/utils/gui_utils/widgets/tree_layout.py +0 -188
  351. pymodaq/utils/gui_utils/widgets/tree_toml.py +0 -110
  352. pymodaq/utils/h5modules/backends.py +0 -1022
  353. pymodaq/utils/h5modules/browsing.py +0 -627
  354. pymodaq/utils/h5modules/data_saving.py +0 -1107
  355. pymodaq/utils/h5modules/exporter.py +0 -119
  356. pymodaq/utils/h5modules/exporters/__init__.py +0 -0
  357. pymodaq/utils/h5modules/exporters/base.py +0 -111
  358. pymodaq/utils/h5modules/exporters/flimj.py +0 -63
  359. pymodaq/utils/h5modules/exporters/hyperspy.py +0 -143
  360. pymodaq/utils/h5modules/saving.py +0 -866
  361. pymodaq/utils/h5modules/utils.py +0 -115
  362. pymodaq/utils/managers/action_manager.py +0 -489
  363. pymodaq/utils/managers/parameter_manager.py +0 -279
  364. pymodaq/utils/managers/roi_manager.py +0 -740
  365. pymodaq/utils/parameter/ioxml.py +0 -545
  366. pymodaq/utils/parameter/pymodaq_ptypes/__init__.py +0 -38
  367. pymodaq/utils/parameter/pymodaq_ptypes/bool.py +0 -31
  368. pymodaq/utils/parameter/pymodaq_ptypes/date.py +0 -126
  369. pymodaq/utils/parameter/pymodaq_ptypes/filedir.py +0 -143
  370. pymodaq/utils/parameter/pymodaq_ptypes/itemselect.py +0 -265
  371. pymodaq/utils/parameter/pymodaq_ptypes/led.py +0 -44
  372. pymodaq/utils/parameter/pymodaq_ptypes/list.py +0 -150
  373. pymodaq/utils/parameter/pymodaq_ptypes/numeric.py +0 -18
  374. pymodaq/utils/parameter/pymodaq_ptypes/pixmap.py +0 -175
  375. pymodaq/utils/parameter/pymodaq_ptypes/slide.py +0 -166
  376. pymodaq/utils/parameter/pymodaq_ptypes/table.py +0 -135
  377. pymodaq/utils/parameter/pymodaq_ptypes/tableview.py +0 -149
  378. pymodaq/utils/parameter/pymodaq_ptypes/text.py +0 -142
  379. pymodaq/utils/plotting/__init__.py +0 -0
  380. pymodaq/utils/plotting/data_viewers/__init__.py +0 -10
  381. pymodaq/utils/plotting/data_viewers/base.py +0 -286
  382. pymodaq/utils/plotting/data_viewers/viewer.py +0 -275
  383. pymodaq/utils/plotting/data_viewers/viewer0D.py +0 -298
  384. pymodaq/utils/plotting/data_viewers/viewer1D.py +0 -826
  385. pymodaq/utils/plotting/data_viewers/viewer1Dbasic.py +0 -231
  386. pymodaq/utils/plotting/data_viewers/viewer2D.py +0 -1118
  387. pymodaq/utils/plotting/data_viewers/viewer2D_basic.py +0 -146
  388. pymodaq/utils/plotting/data_viewers/viewerND.py +0 -800
  389. pymodaq/utils/plotting/gant_chart.py +0 -123
  390. pymodaq/utils/plotting/image_viewer.py +0 -97
  391. pymodaq/utils/plotting/items/__init__.py +0 -0
  392. pymodaq/utils/plotting/items/axis_scaled.py +0 -93
  393. pymodaq/utils/plotting/items/crosshair.py +0 -94
  394. pymodaq/utils/plotting/items/image.py +0 -388
  395. pymodaq/utils/plotting/navigator.py +0 -353
  396. pymodaq/utils/plotting/plotter/plotter.py +0 -94
  397. pymodaq/utils/plotting/plotter/plotters/__init__.py +0 -0
  398. pymodaq/utils/plotting/plotter/plotters/matplotlib_plotters.py +0 -134
  399. pymodaq/utils/plotting/plotter/plotters/qt_plotters.py +0 -78
  400. pymodaq/utils/plotting/utils/__init__.py +0 -0
  401. pymodaq/utils/plotting/utils/axes_viewer.py +0 -88
  402. pymodaq/utils/plotting/utils/filter.py +0 -585
  403. pymodaq/utils/plotting/utils/lineout.py +0 -226
  404. pymodaq/utils/plotting/utils/plot_utils.py +0 -579
  405. pymodaq/utils/plotting/utils/signalND.py +0 -1347
  406. pymodaq/utils/plotting/widgets.py +0 -76
  407. pymodaq/utils/qvariant.py +0 -12
  408. pymodaq/utils/slicing.py +0 -63
  409. pymodaq/utils/units.py +0 -216
  410. pymodaq-4.4.7.dist-info/METADATA +0 -163
  411. pymodaq-4.4.7.dist-info/RECORD +0 -446
  412. /pymodaq/{post_treatment/daq_analysis → daq_utils}/__init__.py +0 -0
  413. /pymodaq/{utils/abstract/logger.py → extensions/daq_logger/abstract.py} +0 -0
  414. /pymodaq/{post_treatment/daq_measurement → extensions/daq_logger/db}/__init__.py +0 -0
  415. {pymodaq-4.4.7.dist-info → pymodaq-5.0.1.dist-info}/licenses/LICENSE +0 -0
pymodaq/utils/data.py CHANGED
@@ -1,2406 +1,15 @@
1
- # -*- coding: utf-8 -*-
2
- """
3
- Created the 28/10/2022
4
-
5
- @author: Sebastien Weber
6
- """
7
- from __future__ import annotations
8
-
9
- from abc import ABCMeta, abstractmethod, abstractproperty
10
- import numbers
11
- import numpy as np
12
- from typing import List, Tuple, Union, Any, Callable
13
- from typing import Iterable as IterableType
14
- from collections.abc import Iterable
15
- from collections import OrderedDict
16
- import logging
17
-
18
- import warnings
19
- from time import time
20
- import copy
21
- import pint
22
- from multipledispatch import dispatch
23
- import pymodaq
24
- from pymodaq.utils.enums import BaseEnum, enum_checker
25
- from pymodaq.utils.messenger import deprecation_msg
26
- from pymodaq.utils.daq_utils import find_objects_in_list_from_attr_name_val
27
- from pymodaq.utils.logger import set_logger, get_module_name
28
- from pymodaq.utils.slicing import SpecialSlicersData
29
- from pymodaq.utils import math_utils as mutils
30
- from pymodaq.utils.config import Config
31
- from pymodaq.utils.plotting.plotter.plotter import PlotterFactory
32
-
33
- from pymodaq import Q_, ureg, Unit
34
-
35
- config = Config()
36
- plotter_factory = PlotterFactory()
37
- logger = set_logger(get_module_name(__file__))
38
-
39
-
40
- def check_units(units: str):
41
- try:
42
- Unit(units)
43
- return units
44
- except pint.errors.UndefinedUnitError:
45
- logger.warning(f'The unit "{units}" is not defined in the pint registry, switching to'
46
- f'dimensionless')
47
- return ''
48
-
49
-
50
- def squeeze(data_array: np.ndarray, do_squeeze=True, squeeze_indexes: Tuple[int]=None) -> np.ndarray:
51
- """ Squeeze numpy arrays return at least 1D arrays except if do_squeeze is False"""
52
- if do_squeeze:
53
- return np.atleast_1d(np.squeeze(data_array, axis=squeeze_indexes))
54
- else:
55
- return np.atleast_1d(data_array)
56
-
57
-
58
- class DataIndexWarning(Warning):
59
- pass
60
-
61
-
62
- class DataTypeWarning(Warning):
63
- pass
64
-
65
-
66
- class DataDimWarning(Warning):
67
- pass
68
-
69
-
70
- class DataSizeWarning(Warning):
71
- pass
72
-
73
-
74
- WARNINGS = [DataIndexWarning, DataTypeWarning, DataDimWarning, DataSizeWarning]
75
-
76
- if logging.getLevelName(logger.level) == 'DEBUG':
77
- for warning in WARNINGS:
78
- warnings.filterwarnings('default', category=warning)
79
- else:
80
- for warning in WARNINGS:
81
- warnings.filterwarnings('ignore', category=warning)
82
-
83
-
84
- class DataShapeError(Exception):
85
- pass
86
-
87
-
88
- class DataLengthError(Exception):
89
- pass
90
-
91
-
92
- class DataDimError(Exception):
93
- pass
94
-
95
-
96
- class DataUnitError(Exception):
97
- pass
98
-
99
-
100
- class DwaType(BaseEnum):
101
- """Different types of `DataWithAxes`."""
102
- DataWithAxes = 0
103
- DataRaw = 1
104
- DataActuator = 2
105
- DataFromPlugins = 3
106
- DataCalculated = 4
107
-
108
-
109
- class DataDim(BaseEnum):
110
- """Enum for dimensionality representation of data"""
111
- Data0D = 0
112
- Data1D = 1
113
- Data2D = 2
114
- DataND = 3
115
-
116
- def __le__(self, other_dim: 'DataDim'):
117
- other_dim = enum_checker(DataDim, other_dim)
118
- return self.value.__le__(other_dim.value)
119
-
120
- def __lt__(self, other_dim: 'DataDim'):
121
- other_dim = enum_checker(DataDim, other_dim)
122
- return self.value.__lt__(other_dim.value)
123
-
124
- def __ge__(self, other_dim: 'DataDim'):
125
- other_dim = enum_checker(DataDim, other_dim)
126
- return self.value.__ge__(other_dim.value)
127
-
128
- def __gt__(self, other_dim: 'DataDim'):
129
- other_dim = enum_checker(DataDim, other_dim)
130
- return self.value.__gt__(other_dim.value)
131
-
132
- @property
133
- def dim_index(self):
134
- return self.value
135
-
136
- @staticmethod
137
- def from_data_array(data_array: np.ndarray):
138
- if len(data_array.shape) == 1 and data_array.size == 1:
139
- return DataDim['Data0D']
140
- elif len(data_array.shape) == 1 and data_array.size > 1:
141
- return DataDim['Data1D']
142
- elif len(data_array.shape) == 2:
143
- return DataDim['Data2D']
144
- else:
145
- return DataDim['DataND']
146
-
147
-
148
- class DataSource(BaseEnum):
149
- """Enum for source of data"""
150
- raw = 0
151
- calculated = 1
152
-
153
-
154
- class DataDistribution(BaseEnum):
155
- """Enum for distribution of data"""
156
- uniform = 0
157
- spread = 1
158
-
159
-
160
- class Axis:
161
- """Object holding info and data about physical axis of some data
162
-
163
- In case the axis's data is linear, store the info as a scale and offset else store the data
164
-
165
- Parameters
166
- ----------
167
- label: str
168
- The label of the axis, for instance 'time' for a temporal axis
169
- units: str
170
- The units of the data in the object, for instance 's' for seconds
171
- data: ndarray
172
- A 1D ndarray holding the data of the axis
173
- index: int
174
- an integer representing the index of the Data object this axis is related to
175
- scaling: float
176
- The scaling to apply to a linspace version in order to obtain the proper scaling
177
- offset: float
178
- The offset to apply to a linspace/scaled version in order to obtain the proper axis
179
- size: int
180
- The size of the axis array (to be specified if data is None)
181
- spread_order: int
182
- An integer needed in the case where data has a spread DataDistribution. It refers to the index along the data's
183
- spread_index dimension
184
-
185
- Examples
186
- --------
187
- >>> axis = Axis('myaxis', units='seconds', data=np.array([1,2,3,4,5]), index=0)
188
- """
189
-
190
- base_type = 'Axis'
191
-
192
- def __init__(self, label: str = '', units: str = '', data: np.ndarray = None, index: int = 0,
193
- scaling=None, offset=None, size=None, spread_order: int = 0):
194
- super().__init__()
195
-
196
- self.iaxis: Axis = SpecialSlicersData(self, False)
197
-
198
- self._size = size
199
- self._data = None
200
- self._index = None
201
- self._label = None
202
- self._units = None
203
- self._scaling = scaling
204
- self._offset = offset
205
-
206
- self.units = units
207
- self.label = label
208
- self.data = data
209
- self.index = index
210
- self.spread_order = spread_order
211
- if (scaling is None or offset is None or size is None) and data is not None:
212
- self.get_scale_offset_from_data(data)
213
-
214
- def copy(self):
215
- return copy.copy(self)
216
-
217
- def as_dwa(self) -> DataWithAxes:
218
- dwa = DataRaw(self.label, data=[self.get_data()],
219
- labels=[f'{self.label}_{self.units}'])
220
- dwa.create_missing_axes()
221
- return dwa
222
-
223
- @property
224
- def label(self) -> str:
225
- """str: get/set the label of this axis"""
226
- return self._label
227
-
228
- @label.setter
229
- def label(self, lab: str):
230
- if not isinstance(lab, str):
231
- raise TypeError('label for the Axis class should be a string')
232
- self._label = lab
233
-
234
- @property
235
- def units(self) -> str:
236
- """str: get/set the units for this axis"""
237
- return self._units
238
-
239
- @units.setter
240
- def units(self, units: str):
241
- if not isinstance(units, str):
242
- raise TypeError('units for the Axis class should be a string')
243
- units = check_units(units)
244
- self._units = units
245
-
246
- @property
247
- def index(self) -> int:
248
- """int: get/set the index this axis corresponds to in a DataWithAxis object"""
249
- return self._index
250
-
251
- @index.setter
252
- def index(self, ind: int):
253
- self._check_index_valid(ind)
254
- self._index = ind
255
-
256
- @property
257
- def data(self):
258
- """np.ndarray: get/set the data of Axis"""
259
- return self._data
260
-
261
- @data.setter
262
- def data(self, data: np.ndarray):
263
- if data is not None:
264
- self._check_data_valid(data)
265
- self.get_scale_offset_from_data(data)
266
- self._size = data.size
267
- elif self.size is None:
268
- self._size = 0
269
- self._data = data
270
-
271
- def get_data(self) -> np.ndarray:
272
- """Convenience method to obtain the axis data (usually None because scaling and offset are used)"""
273
- return self._data if self._data is not None else self._linear_data(self.size)
274
-
275
- def get_data_at(self, indexes: Union[int, IterableType, slice]) -> np.ndarray:
276
- """ Get data at specified indexes
277
-
278
- Parameters
279
- ----------
280
- indexes:
281
- """
282
- if not (isinstance(indexes, np.ndarray) or isinstance(indexes, slice) or
283
- isinstance(indexes, int)):
284
- indexes = np.array(indexes)
285
- return self.get_data()[indexes]
286
-
287
- def get_scale_offset_from_data(self, data: np.ndarray = None):
288
- """Get the scaling and offset from the axis's data
289
-
290
- If data is not None, extract the scaling and offset
291
-
292
- Parameters
293
- ----------
294
- data: ndarray
295
- """
296
- if data is None and self._data is not None:
297
- data = self._data
298
-
299
- if self.is_axis_linear(data):
300
- if len(data) == 1:
301
- self._scaling = 1
302
- else:
303
- self._scaling = np.mean(np.diff(data))
304
- self._offset = data[0]
305
- self._data = None
306
-
307
- def is_axis_linear(self, data=None):
308
- if data is None:
309
- data = self.get_data()
310
- if data is not None:
311
- return np.allclose(np.diff(data), np.mean(np.diff(data)))
312
- else:
313
- return False
314
-
315
- @property
316
- def scaling(self):
317
- return self._scaling
318
-
319
- @scaling.setter
320
- def scaling(self, _scaling: float):
321
- self._scaling = _scaling
322
-
323
- @property
324
- def offset(self):
325
- return self._offset
326
-
327
- @offset.setter
328
- def offset(self, _offset: float):
329
- self._offset = _offset
330
-
331
- @property
332
- def size(self) -> int:
333
- """int: get/set the size/length of the 1D ndarray"""
334
- return self._size
335
-
336
- @size.setter
337
- def size(self, _size: int):
338
- if self._data is None:
339
- self._size = _size
340
-
341
- @staticmethod
342
- def _check_index_valid(index: int):
343
- if not isinstance(index, int):
344
- raise TypeError('index for the Axis class should be a positive integer')
345
- elif index < 0:
346
- raise ValueError('index for the Axis class should be a positive integer')
347
-
348
- @staticmethod
349
- def _check_data_valid(data):
350
- if not isinstance(data, np.ndarray):
351
- raise TypeError(f'data for the Axis class should be a 1D numpy array')
352
- elif len(data.shape) != 1:
353
- raise ValueError(f'data for the Axis class should be a 1D numpy array')
354
-
355
- def _linear_data(self, nsteps: int):
356
- """create axis data with a linear version using scaling and offset"""
357
- return self._offset + self._scaling * np.linspace(0, nsteps-1, nsteps)
358
-
359
- def create_linear_data(self, nsteps:int):
360
- """replace the axis data with a linear version using scaling and offset"""
361
- self.data = self._linear_data(nsteps)
362
-
363
- @staticmethod
364
- def create_simple_linear_data(nsteps: int):
365
- return np.linspace(0, nsteps-1, nsteps)
366
-
367
- def __len__(self):
368
- return self.size
369
-
370
- def _compute_slices(self, slices, *ignored, **ignored_also):
371
- return slices
372
-
373
- def _slicer(self, _slice, *ignored, **ignored_also):
374
- ax: Axis = copy.deepcopy(self)
375
- if isinstance(_slice, int):
376
- ax.data = np.array([ax.get_data()[_slice]])
377
- return ax
378
- elif _slice is Ellipsis:
379
- return ax
380
- elif isinstance(_slice, slice):
381
- if ax._data is not None:
382
- ax.data = ax._data.__getitem__(_slice)
383
- return ax
384
- else:
385
- start = _slice.start if _slice.start is not None else 0
386
- stop = _slice.stop if _slice.stop is not None else self.size
387
-
388
- ax._offset = ax.offset + start * ax.scaling
389
- ax._size = stop - start
390
- return ax
391
-
392
- def __getitem__(self, item):
393
- if hasattr(self, item):
394
- # for when axis was a dict
395
- deprecation_msg('attributes from an Axis object should not be fetched using __getitem__')
396
- return getattr(self, item)
397
-
398
- def __repr__(self):
399
- return f'{self.__class__.__name__}: <label: {self.label}> - <units: {self.units}> - <index: {self.index}>'
400
-
401
- def __mul__(self, scale: numbers.Real):
402
- if isinstance(scale, numbers.Real):
403
- ax = copy.deepcopy(self)
404
- if self.data is not None:
405
- ax.data *= scale
406
- else:
407
- ax._offset *= scale
408
- ax._scaling *= scale
409
- return ax
410
-
411
- def __add__(self, offset: numbers.Real):
412
- if isinstance(offset, numbers.Real):
413
- ax = copy.deepcopy(self)
414
- if self.data is not None:
415
- ax.data += offset
416
- else:
417
- ax._offset += offset
418
- return ax
419
-
420
- def __eq__(self, other: Axis):
421
- if isinstance(other, Axis):
422
- eq = self.label == other.label
423
- eq = eq and (Unit(self.units).is_compatible_with(other.units))
424
- eq = eq and (self.index == other.index)
425
- if self.data is not None and other.data is not None:
426
- eq = eq and (np.allclose(Q_(self.data, self.units),
427
- Q_(other.data, other.units)))
428
- else:
429
- eq = eq and (np.allclose(Q_(self.offset, self.units),
430
- Q_(other.offset, other.units)))
431
- eq = eq and (np.allclose(Q_(self.scaling, self.units),
432
- Q_(other.scaling, other.units)))
433
-
434
- return eq
435
- else:
436
- return False
437
-
438
- def mean(self):
439
- if self._data is not None:
440
- return np.mean(self._data)
441
- else:
442
- return self.offset + self.size / 2 * self.scaling
443
-
444
- def min(self):
445
- if self._data is not None:
446
- return np.min(self._data)
447
- else:
448
- return self.offset + (self.size * self.scaling if self.scaling < 0 else 0)
449
-
450
- def max(self):
451
- if self._data is not None:
452
- return np.max(self._data)
453
- else:
454
- return self.offset + (self.size * self.scaling if self.scaling > 0 else 0)
455
-
456
- def find_index(self, threshold: float) -> int:
457
- """find the index of the threshold value within the axis"""
458
- if threshold < self.min():
459
- return 0
460
- elif threshold > self.max():
461
- return len(self) - 1
462
- elif self._data is not None:
463
- return mutils.find_index(self._data, threshold)[0][0]
464
- else:
465
- return int((threshold - self.offset) / self.scaling)
466
-
467
- def find_indexes(self, thresholds: IterableType[float]) -> IterableType[int]:
468
- if isinstance(thresholds, numbers.Number):
469
- thresholds = [thresholds]
470
- return [self.find_index(threshold) for threshold in thresholds]
471
-
472
-
473
- class NavAxis(Axis):
474
- def __init__(self, *args, **kwargs):
475
- super().__init__(*args, **kwargs)
476
- deprecation_msg('NavAxis should not be used anymore, please use Axis object with correct index.'
477
- 'The navigation index should be specified in the Data object')
478
-
479
-
480
- class DataLowLevel:
481
- """Abstract object for all Data Object
482
-
483
- Parameters
484
- ----------
485
- name: str
486
- the identifier of the data
487
-
488
- Attributes
489
- ----------
490
- name: str
491
- timestamp: float
492
- Time in seconds since epoch. See method time.time()
493
- """
494
-
495
- def __init__(self, name: str):
496
- self._timestamp = time()
497
- self._name = name
498
-
499
- @property
500
- def name(self):
501
- """Get/Set the identifier of the data"""
502
- return self._name
503
-
504
- @name.setter
505
- def name(self, other_name: str):
506
- self._name = other_name
507
-
508
- @property
509
- def timestamp(self):
510
- """Get/Set the timestamp of when the object has been created"""
511
- return self._timestamp
512
-
513
- @timestamp.setter
514
- def timestamp(self, timestamp: float):
515
- """The timestamp of when the object has been created"""
516
- self._timestamp = timestamp
517
-
518
-
519
- class DataBase(DataLowLevel):
520
- """Base object to store homogeneous data and metadata generated by pymodaq's objects.
521
-
522
- To be inherited for real data
523
-
524
- Parameters
525
- ----------
526
- name: str
527
- the identifier of these data
528
- source: DataSource or str
529
- Enum specifying if data are raw or processed (for instance from roi)
530
- dim: DataDim or str
531
- The identifier of the data type
532
- distribution: DataDistribution or str
533
- The distribution type of the data: uniform if distributed on a regular grid or spread if on
534
- specific unordered points
535
- data: list of ndarray
536
- The data the object is storing
537
- labels: list of str
538
- The labels of the data nd-arrays
539
- origin: str
540
- An identifier of the element where the data originated, for instance the DAQ_Viewer's name.
541
- Used when appending DataToExport in DAQ_Scan to disintricate from which origin data comes
542
- from when scanning multiple detectors.
543
- units: str
544
- A unit string identifier as specified in the UnitRegistry of the pint module
545
-
546
- kwargs: named parameters
547
- All other parameters are stored dynamically using the name/value pair. The name of these
548
- extra parameters are added into the extra_attributes attribute
549
-
550
- Attributes
551
- ----------
552
- name: str
553
- the identifier of these data
554
- source: DataSource or str
555
- Enum specifying if data are raw or processed (for instance from roi)
556
- dim: DataDim or str
557
- The identifier of the data type
558
- distribution: DataDistribution or str
559
- The distribution type of the data: uniform if distributed on a regular grid or spread if on specific
560
- unordered points
561
- data: list of ndarray
562
- The data the object is storing
563
- labels: list of str
564
- The labels of the data nd-arrays
565
- origin: str
566
- An identifier of the element where the data originated, for instance the DAQ_Viewer's name. Used when appending
567
- DataToExport in DAQ_Scan to disintricate from which origin data comes from when scanning multiple detectors.
568
- shape: Tuple[int]
569
- The shape of the underlying data
570
- size: int
571
- The size of the ndarrays stored in the object
572
- length: int
573
- The number of ndarrays stored in the object
574
- extra_attributes: List[str]
575
- list of string giving identifiers of the attributes added dynamically at the initialization (for instance
576
- to save extra metadata using the DataSaverLoader
577
-
578
- See Also
579
- --------
580
- DataWithAxes, DataFromPlugins, DataRaw, DataSaverLoader
581
-
582
- Examples
583
- --------
584
- >>> import numpy as np
585
- >>> from pymodaq.utils.data import DataBase, DataSource, DataDim, DataDistribution
586
- >>> data = DataBase('mydata', source=DataSource['raw'], dim=DataDim['Data1D'], \
587
- distribution=DataDistribution['uniform'], data=[np.array([1.,2.,3.]), np.array([4.,5.,6.])],\
588
- labels=['channel1', 'channel2'], origin='docutils code')
589
- >>> data.dim
590
- <DataDim.Data1D: 1>
591
- >>> data.source
592
- <DataSource.raw: 0>
593
- >>> data.shape
594
- (3,)
595
- >>> data.length
596
- 2
597
- >>> data.size
598
- 3
599
- """
600
-
601
- base_type = 'Data'
602
-
603
- def __init__(self, name: str,
604
- source: DataSource = None, dim: DataDim = None,
605
- distribution: DataDistribution = DataDistribution['uniform'],
606
- data: List[np.ndarray] = None,
607
- labels: List[str] = None, origin: str = '',
608
- units: str = '',
609
- **kwargs):
610
-
611
- super().__init__(name=name)
612
- self._iter_index = 0
613
- self._shape = None
614
- self._size = None
615
- self._data = None
616
- self._length = None
617
- self._labels = None
618
- self._dim = dim
619
- self._units = check_units(units)
620
- self._errors = None
621
- self.origin = origin
622
-
623
- source = enum_checker(DataSource, source)
624
- self._source = source
625
-
626
- distribution = enum_checker(DataDistribution, distribution)
627
- self._distribution = distribution
628
-
629
- self.data = data # dim consistency is actually checked within the setter method
630
-
631
- self._check_labels(labels)
632
- self.extra_attributes = []
633
- self.add_extra_attribute(**kwargs)
634
-
635
- @property
636
- def units(self):
637
- """ Get/Set the object units
638
-
639
- Setting to other units should retain the unit compatibility
640
- """
641
- return self._units
642
-
643
- @units.setter
644
- def units(self, units: str):
645
- units = check_units(units)
646
- self.units_as(units, inplace=True)
647
-
648
- def force_units(self, units: str):
649
- """ Change immediately the units to whatever else. Use this with care!"""
650
- self._units = units
651
-
652
- def to_base_units(self):
653
- dwa = self.deepcopy()
654
- data_quantities = [quantity.to_base_units() for quantity in self.quantities]
655
- dwa.data = [quantity.magnitude for quantity in data_quantities]
656
- dwa.force_units(str(data_quantities[0].units))
657
- return dwa
658
-
659
- def units_as(self, units: str, inplace=True) -> 'DataBase':
660
- """ Set the object units to the new one (if possible)
661
-
662
- Parameters
663
- ----------
664
- units: str
665
- The new unit to convert the data to
666
- inplace: bool
667
- default True.
668
- If True replace the data's arrays by array in the new units
669
- If False, return a new data object
670
- """
671
- arrays = []
672
- try:
673
- for ind_array in range(len(self)):
674
- arrays.append(self.quantities[ind_array].m_as(units))
675
-
676
- except pint.errors.DimensionalityError as e:
677
- raise DataUnitError(
678
- f'Cannot change the Data units to {units} \n'
679
- f'{e}')
680
-
681
- if inplace:
682
- self.data = arrays
683
- self._units = units
684
- return self
685
- else:
686
- new_data = copy.deepcopy(self)
687
- new_data.data = arrays
688
- new_data._units = units
689
- return new_data
690
-
691
- def as_dte(self, name: str = 'mydte') -> DataToExport:
692
- """Convenience method to wrap the DataWithAxes object into a DataToExport"""
693
- return DataToExport(name, data=[self])
694
-
695
- def add_extra_attribute(self, **kwargs):
696
- for key in kwargs:
697
- if key not in self.extra_attributes:
698
- self.extra_attributes.append(key)
699
- setattr(self, key, kwargs[key])
700
-
701
- def get_full_name(self) -> str:
702
- """Get the data ful name including the origin attribute into the returned value
703
-
704
- Returns
705
- -------
706
- str: the name of the ataWithAxes data constructed as : origin/name
707
-
708
- Examples
709
- --------
710
- d0 = DataBase(name='datafromdet0', origin='det0')
711
- """
712
- return f'{self.origin}/{self.name}'
713
-
714
- def __repr__(self):
715
- return (f'{self.__class__.__name__} <{self.name}> '
716
- f'<u: {self.units}> '
717
- f'<{self.dim}> <{self.source}> <{self.shape}>')
718
-
719
- def __len__(self):
720
- return self.length
721
-
722
- def __iter__(self):
723
- self._iter_index = 0
724
- return self
725
-
726
- def __next__(self):
727
- if self._iter_index < len(self):
728
- self._iter_index += 1
729
- return self.data[self._iter_index-1]
730
- else:
731
- raise StopIteration
732
-
733
- def __getitem__(self, item) -> np.ndarray:
734
- if (isinstance(item, int) and item < len(self)) or isinstance(item, slice):
735
- return self.data[item]
736
- else:
737
- raise IndexError(f'The index should be an integer lower than the data length')
738
-
739
- def __setitem__(self, key, value):
740
- if isinstance(key, int) and key < len(self) and isinstance(value, np.ndarray) and value.shape == self.shape:
741
- self.data[key] = value
742
- else:
743
- raise IndexError(f'The index should be an positive integer lower than the data length')
744
-
745
- def __add__(self, other: object):
746
- if isinstance(other, DataBase) and len(other) == len(self):
747
- new_data = copy.deepcopy(self)
748
- for ind_array in range(len(new_data)):
749
- if self[ind_array].shape != other[ind_array].shape:
750
- raise ValueError('The shapes of arrays stored into the data are not consistent')
751
- try:
752
- new_data[ind_array] = (Q_(self[ind_array], self.units) +
753
- Q_(other[ind_array], other.units)).m_as(self.units)
754
- except pint.errors.DimensionalityError as e:
755
- raise DataUnitError(
756
- f'Cannot sum Data objects not having the same dimension: {e}')
757
- return new_data
758
- elif isinstance(other, numbers.Number) and self.length == 1 and self.size == 1:
759
- new_data = copy.deepcopy(self)
760
- new_data = new_data + DataActuator(data=other)
761
- return new_data
762
- else:
763
- raise TypeError(f'Could not add a {other.__class__.__name__} or a {self.__class__.__name__} '
764
- f'of a different length')
765
-
766
- def __sub__(self, other: object):
767
- return self.__add__(other * -1)
768
- #
769
- # if isinstance(other, DataBase) and len(other) == len(self):
770
- # new_data = copy.deepcopy(self)
771
- # for ind_array in range(len(new_data)):
772
- # new_data[ind_array] = self[ind_array] - other[ind_array]
773
- # return new_data
774
- # elif isinstance(other, numbers.Number) and self.length == 1 and self.size == 1:
775
- # new_data = copy.deepcopy(self)
776
- # new_data = new_data - DataActuator(data=other)
777
- # return new_data
778
- # else:
779
- # raise TypeError(f'Could not substract a {other.__class__.__name__} or a {self.__class__.__name__} '
780
- # f'of a different length')
781
-
782
- def __mul__(self, other):
783
- if (isinstance(other, numbers.Number) or
784
- (isinstance(other, np.ndarray) and other.shape == self._shape)):
785
- new_data = copy.deepcopy(self)
786
- for ind_array in range(len(new_data)):
787
- new_data[ind_array] = self[ind_array] * other
788
- return new_data
789
- elif isinstance(other, DataBase) and other.shape == self._shape:
790
- new_data = copy.deepcopy(self)
791
- new_unit = str((Q_(self[0], self.units) *
792
- Q_(other[0], other.units)).to_base_units().units)
793
- for ind_array in range(len(new_data)):
794
- new_data[ind_array] = \
795
- ((Q_(self[ind_array], self.units) * Q_(other[ind_array], other.units))
796
- .to_base_units()).magnitude
797
- new_data._units = new_unit
798
- return new_data
799
- else:
800
- raise TypeError(f'Could not multiply a {other.__class__.__name__} and a {self.__class__.__name__} '
801
- f'of a different length')
802
-
803
- def __truediv__(self, other):
804
- if isinstance(other, numbers.Number):
805
- return self * (1 / other)
806
- else:
807
- raise TypeError(f'Could not divide a {other.__class__.__name__} and a {self.__class__.__name__} '
808
- f'of a different length')
809
-
810
- def _comparison_common(self, other, operator='__eq__'):
811
- if isinstance(other, DataBase):
812
- if not (self.name == other.name and
813
- len(self) == len(other) and
814
- Unit(self.units).is_compatible_with(other.units)):
815
- return False
816
- if self.dim != other.dim:
817
- return False
818
- eq = True
819
- for ind in range(len(self)):
820
- if self[ind].shape != other[ind].shape:
821
- eq = False
822
- break
823
- eq = eq and np.all(getattr(self.quantities[ind], operator)(other.quantities[ind]))
824
- # extra attributes are not relevant as they may contain module specific data...
825
- # eq = eq and (self.extra_attributes == other.extra_attributes)
826
- # for attribute in self.extra_attributes:
827
- # eq = eq and (getattr(self, attribute) == getattr(other, attribute))
828
- return eq
829
- elif isinstance(other, numbers.Number):
830
- return np.all(getattr(self[0], operator)(other))
831
- else:
832
- raise TypeError()
833
-
834
- def __eq__(self, other):
835
- return self._comparison_common(other, '__eq__')
836
-
837
- def __le__(self, other):
838
- return self._comparison_common(other, '__le__')
839
-
840
- def __lt__(self, other):
841
- return self._comparison_common(other, '__lt__')
842
-
843
- def __ge__(self, other):
844
- return self._comparison_common(other, '__ge__')
845
-
846
- def __gt__(self, other):
847
- return self._comparison_common(other, '__gt__')
848
-
849
- def deepcopy(self):
850
- return copy.deepcopy(self)
851
-
852
- def average(self, other: 'DataBase', weight: int) -> 'DataBase':
853
- """ Compute the weighted average between self and other DataBase
854
-
855
- Parameters
856
- ----------
857
- other_data: DataBase
858
- weight: int
859
- The weight the 'other' holds with respect to self
860
- Returns
861
- -------
862
- DataBase: the averaged DataBase object
863
- """
864
- if isinstance(other, DataBase) and len(other) == len(self) and isinstance(weight, numbers.Number):
865
- return (other * weight + self) / (weight + 1)
866
- else:
867
- raise TypeError(f'Could not average a {other.__class__.__name__} or a {self.__class__.__name__} '
868
- f'of a different length')
869
-
870
- def abs(self):
871
- """ Take the absolute value of itself"""
872
- new_data = copy.copy(self)
873
- new_data.data = [np.abs(dat) for dat in new_data]
874
- return new_data
875
-
876
- def angle(self):
877
- """ Take the phase value of itself"""
878
- new_data = copy.copy(self)
879
- new_data.data = [np.angle(dat) for dat in new_data]
880
- return new_data
881
-
882
- def real(self):
883
- """ Take the real part of itself"""
884
- new_data = copy.copy(self)
885
- new_data.data = [np.real(dat) for dat in new_data]
886
- return new_data
887
-
888
- def imag(self):
889
- """ Take the imaginary part of itself"""
890
- new_data = copy.copy(self)
891
- new_data.data = [np.imag(dat) for dat in new_data]
892
- return new_data
893
-
894
- def flipud(self):
895
- """Reverse the order of elements along axis 0 (up/down)"""
896
- new_data = copy.copy(self)
897
- new_data.data = [np.flipud(dat) for dat in new_data]
898
- return new_data
899
-
900
- def fliplr(self):
901
- """Reverse the order of elements along axis 1 (left/right)"""
902
- new_data = copy.copy(self)
903
- new_data.data = [np.fliplr(dat) for dat in new_data]
904
- return new_data
905
-
906
- def append(self, data: DataWithAxes):
907
- """Append data content if the underlying arrays have the same shape and compatible units"""
908
- for dat in data:
909
- if dat.shape != self.shape:
910
- raise DataShapeError('Cannot append those ndarrays, they don\'t have the same shape'
911
- ' as self')
912
- self.data += [Q_(data_array, data.units).m_as(self.units) for data_array in data.data]
913
- self.labels.extend(data.labels)
914
-
915
- def pop(self, index: int) -> DataBase:
916
- """ Returns a copy of self but with data taken at the specified index"""
917
- dwa = self.deepcopy()
918
- dwa.data = [dwa.data[index]]
919
- dwa.labels = [dwa.labels[index]]
920
- return dwa
921
-
922
- @property
923
- def shape(self):
924
- """The shape of the nd-arrays"""
925
- return self._shape
926
-
927
- def stack_as_array(self, axis=0, dtype=None) -> np.ndarray:
928
- """ Stack all data arrays in a single numpy array
929
-
930
- Parameters
931
- ----------
932
- axis: int
933
- The new stack axis index, default 0
934
- dtype: str or np.dtype
935
- the dtype of the stacked array
936
-
937
- Returns
938
- -------
939
- np.ndarray
940
-
941
- See Also
942
- --------
943
- :meth:`np.stack`
944
- """
945
-
946
- return np.stack(self.data, axis=axis, dtype=dtype)
947
-
948
- @property
949
- def size(self):
950
- """The size of the nd-arrays"""
951
- return self._size
952
-
953
- @property
954
- def dim(self):
955
- """DataDim: the enum representing the dimensionality of the stored data"""
956
- return self._dim
957
-
958
- def set_dim(self, dim: Union[DataDim, str]):
959
- """Addhoc modification of dim independantly of the real data shape,
960
- should be used with extra care"""
961
- self._dim = enum_checker(DataDim, dim)
962
-
963
- @property
964
- def source(self):
965
- """DataSource: the enum representing the source of the data"""
966
- return self._source
967
-
968
- @source.setter
969
- def source(self, source_type: Union[str, DataSource]):
970
- """DataSource: the enum representing the source of the data"""
971
- source_type = enum_checker(DataSource, source_type)
972
- self._source = source_type
973
-
974
- @property
975
- def distribution(self):
976
- """DataDistribution: the enum representing the distribution of the stored data"""
977
- return self._distribution
978
-
979
- @property
980
- def length(self):
981
- """The length of data. This is the length of the list containing the nd-arrays"""
982
- return self._length
983
-
984
- @property
985
- def labels(self):
986
- return self._labels
987
-
988
- @labels.setter
989
- def labels(self, labels: List['str']):
990
- self._check_labels(labels)
991
-
992
- def _check_labels(self, labels: List['str']):
993
- if labels is None:
994
- labels = []
995
- else:
996
- labels = labels[:]
997
- while len(labels) < self.length:
998
- labels.append(f'CH{len(labels):02d}')
999
- self._labels = labels
1000
-
1001
- def get_data_index(self, index: int = 0) -> np.ndarray:
1002
- """Get the data by its index in the list, same as self[index]"""
1003
- return self.data[index]
1004
-
1005
- @staticmethod
1006
- def _check_data_type(data: List[np.ndarray]) -> List[np.ndarray]:
1007
- """make sure data is a list of nd-arrays"""
1008
- is_valid = True
1009
- if data is None:
1010
- is_valid = False
1011
- if not isinstance(data, list):
1012
- # try to transform the data to regular type
1013
- if isinstance(data, np.ndarray):
1014
- warnings.warn(DataTypeWarning(f'Your data should be a list of numpy arrays not just a single numpy'
1015
- f' array, wrapping them with a list'))
1016
- data = [data]
1017
- elif isinstance(data, numbers.Number):
1018
- warnings.warn(DataTypeWarning(f'Your data should be a list of numpy arrays not just a single numpy'
1019
- f' array, wrapping them with a list'))
1020
- data = [np.array([data])]
1021
- else:
1022
- is_valid = False
1023
- if isinstance(data, list):
1024
- if len(data) == 0:
1025
- is_valid = False
1026
- elif not isinstance(data[0], np.ndarray):
1027
- is_valid = False
1028
- elif len(data[0].shape) == 0:
1029
- is_valid = False
1030
- if not is_valid:
1031
- raise TypeError(f'Data should be an non-empty list of non-empty numpy arrays')
1032
- return data
1033
-
1034
- def check_shape_from_data(self, data: List[np.ndarray]):
1035
- self._shape = data[0].shape
1036
-
1037
- @staticmethod
1038
- def _get_dim_from_data(data: List[np.ndarray]) -> DataDim:
1039
- shape = data[0].shape
1040
- size = data[0].size
1041
- if len(shape) == 1 and size == 1:
1042
- dim = DataDim['Data0D']
1043
- elif len(shape) == 1 and size > 1:
1044
- dim = DataDim['Data1D']
1045
- elif len(shape) == 2:
1046
- dim = DataDim['Data2D']
1047
- else:
1048
- dim = DataDim['DataND']
1049
- return dim
1050
-
1051
- def get_dim_from_data(self, data: List[np.ndarray]):
1052
- """Get the dimensionality DataDim from data"""
1053
- self.check_shape_from_data(data)
1054
- self._size = data[0].size
1055
- self._length = len(data)
1056
- if len(self._shape) == 1 and self._size == 1:
1057
- dim = DataDim['Data0D']
1058
- elif len(self._shape) == 1 and self._size > 1:
1059
- dim = DataDim['Data1D']
1060
- elif len(self._shape) == 2:
1061
- dim = DataDim['Data2D']
1062
- else:
1063
- dim = DataDim['DataND']
1064
- return dim
1065
-
1066
- def _check_shape_dim_consistency(self, data: List[np.ndarray]):
1067
- """Process the dim from data or make sure data and DataDim are coherent"""
1068
- dim = self.get_dim_from_data(data)
1069
- if self._dim is None:
1070
- self._dim = dim
1071
- else:
1072
- self._dim = enum_checker(DataDim, self._dim)
1073
- if self._dim != dim:
1074
- warnings.warn(
1075
- DataDimWarning('The specified dimensionality is not coherent with the data '
1076
- 'shape, replacing it'))
1077
- self._dim = dim
1078
-
1079
- def _check_same_shape(self, data: List[np.ndarray]):
1080
- """Check that all nd-arrays have the same shape"""
1081
- for dat in data:
1082
- if dat.shape != self.shape:
1083
- raise DataShapeError('The shape of the ndarrays in data is not the same')
1084
-
1085
- @property
1086
- def quantities(self) -> list[Q_]:
1087
- """ Get the arrays as pint quantities (with units)"""
1088
- return [Q_(array, self.units) for array in self.data]
1089
-
1090
- @property
1091
- def data(self) -> List[np.ndarray]:
1092
- """List[np.ndarray]: get/set (and check) the data the object is storing"""
1093
- return self._data
1094
-
1095
- @data.setter
1096
- def data(self, data: List[np.ndarray]):
1097
- data = self._check_data_type(data)
1098
- self._check_shape_dim_consistency(data)
1099
- self._check_same_shape(data)
1100
- self._data = data
1101
-
1102
- def to_dict(self):
1103
- """ Get the data arrays into dictionary whose keys are the labels"""
1104
- data_dict = OrderedDict([])
1105
- for ind in range(len(self)):
1106
- data_dict[self.labels[ind]] = self[ind]
1107
- return data_dict
1108
-
1109
- def to_dB(self) -> DataBase:
1110
- """ Get a new data object in decibels
1111
-
1112
- new in 4.3.0
1113
- """
1114
- new_data = copy.deepcopy(self)
1115
- for ind_array in range(len(new_data)):
1116
- new_data[ind_array] = 10 * np.log10(self[ind_array] / self[ind_array].max())
1117
- new_data._units = 'dB'
1118
- return new_data
1119
-
1120
-
1121
- class AxesManagerBase:
1122
- def __init__(self, data_shape: Tuple[int], axes: List[Axis], nav_indexes=None, sig_indexes=None, **kwargs):
1123
- self._data_shape = data_shape[:] # initial shape needed for self._check_axis
1124
- self._axes = axes[:]
1125
- self._nav_indexes = nav_indexes
1126
- self._sig_indexes = sig_indexes if sig_indexes is not None else self.compute_sig_indexes()
1127
-
1128
- self._check_axis(self._axes)
1129
- self._manage_named_axes(self._axes, **kwargs)
1130
-
1131
- @property
1132
- def axes(self):
1133
- return self._axes
1134
-
1135
- @axes.setter
1136
- def axes(self, axes: List[Axis]):
1137
- self._axes = axes[:]
1138
- self._check_axis(self._axes)
1139
-
1140
- @abstractmethod
1141
- def _check_axis(self, axes):
1142
- ...
1143
-
1144
- @abstractmethod
1145
- def get_sorted_index(self, axis_index: int = 0, spread_index=0) -> Tuple[np.ndarray, Tuple[slice]]:
1146
- """ Get the index to sort the specified axis
1147
-
1148
- Parameters
1149
- ----------
1150
- axis_index: int
1151
- The index along which one should sort the data
1152
- spread_index: int
1153
- for spread data only, specifies which spread axis to use
1154
-
1155
- Returns
1156
- -------
1157
- np.ndarray: the sorted index from the specified axis
1158
- tuple of slice:
1159
- used to slice the underlying data
1160
- """
1161
- ...
1162
-
1163
- @abstractmethod
1164
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1165
- """in spread mode, different nav axes have the same index (but not
1166
- the same spread_order integer value)
1167
-
1168
- """
1169
- ...
1170
-
1171
- def compute_sig_indexes(self):
1172
- _shape = list(self._data_shape)
1173
- indexes = list(np.arange(len(self._data_shape)))
1174
- for index in self.nav_indexes:
1175
- if index in indexes:
1176
- indexes.pop(indexes.index(index))
1177
- return tuple(indexes)
1178
-
1179
- def _has_get_axis_from_index(self, index: int):
1180
- """Check if the axis referred by a given data dimensionality index is present
1181
-
1182
- Returns
1183
- -------
1184
- bool: True if the axis has been found else False
1185
- Axis or None: return the axis instance if has the axis else None
1186
- """
1187
- if index > len(self._data_shape) or index < 0:
1188
- raise IndexError('The specified index does not correspond to any data dimension')
1189
- for axis in self.axes:
1190
- if axis.index == index:
1191
- return True, axis
1192
- return False, None
1193
-
1194
- def _manage_named_axes(self, axes, x_axis=None, y_axis=None, nav_x_axis=None, nav_y_axis=None):
1195
- """This method make sur old style Data is still compatible, especially when using x_axis or y_axis parameters"""
1196
- modified = False
1197
- if x_axis is not None:
1198
- modified = True
1199
- index = 0
1200
- if len(self._data_shape) == 1 and not self._has_get_axis_from_index(0)[0]:
1201
- # in case of Data1D the x_axis corresponds to the first data dim
1202
- index = 0
1203
- elif len(self._data_shape) == 2 and not self._has_get_axis_from_index(1)[0]:
1204
- # in case of Data2D the x_axis corresponds to the second data dim (columns)
1205
- index = 1
1206
- axes.append(Axis(x_axis.label, x_axis.units, x_axis.data, index=index))
1207
-
1208
- if y_axis is not None:
1209
-
1210
- if len(self._data_shape) == 2 and not self._has_get_axis_from_index(0)[0]:
1211
- modified = True
1212
- # in case of Data2D the y_axis corresponds to the first data dim (lines)
1213
- axes.append(Axis(y_axis.label, y_axis.units, y_axis.data, index=0))
1214
-
1215
- if nav_x_axis is not None:
1216
- if len(self.nav_indexes) > 0:
1217
- modified = True
1218
- # in case of DataND the y_axis corresponds to the first data dim (lines)
1219
- axes.append(Axis(nav_x_axis.label, nav_x_axis.units, nav_x_axis.data, index=self._nav_indexes[0]))
1220
-
1221
- if nav_y_axis is not None:
1222
- if len(self.nav_indexes) > 1:
1223
- modified = True
1224
- # in case of Data2D the y_axis corresponds to the first data dim (lines)
1225
- axes.append(Axis(nav_y_axis.label, nav_y_axis.units, nav_y_axis.data, index=self._nav_indexes[1]))
1226
-
1227
- if modified:
1228
- self._check_axis(axes)
1229
-
1230
- @property
1231
- def shape(self) -> Tuple[int]:
1232
- # self._data_shape = self.compute_shape_from_axes()
1233
- return self._data_shape
1234
-
1235
- @abstractmethod
1236
- def compute_shape_from_axes(self):
1237
- ...
1238
-
1239
- @property
1240
- def sig_shape(self) -> tuple:
1241
- return tuple([self.shape[ind] for ind in self.sig_indexes])
1242
-
1243
- @property
1244
- def nav_shape(self) -> tuple:
1245
- return tuple([self.shape[ind] for ind in self.nav_indexes])
1246
-
1247
- def append_axis(self, axis: Axis):
1248
- self._axes.append(axis)
1249
- self._check_axis([axis])
1250
-
1251
- @property
1252
- def nav_indexes(self) -> IterableType[int]:
1253
- return self._nav_indexes
1254
-
1255
- @nav_indexes.setter
1256
- def nav_indexes(self, nav_indexes: IterableType[int]):
1257
- if isinstance(nav_indexes, Iterable):
1258
- nav_indexes = tuple(nav_indexes)
1259
- valid = True
1260
- for index in nav_indexes:
1261
- if index not in self.get_axes_index():
1262
- logger.warning('Could not set the corresponding nav_index into the data object, not enough'
1263
- ' Axis declared')
1264
- valid = False
1265
- break
1266
- if valid:
1267
- self._nav_indexes = nav_indexes
1268
- else:
1269
- logger.warning('Could not set the corresponding sig_indexes into the data object, should be an iterable')
1270
- self.sig_indexes = self.compute_sig_indexes()
1271
- self.shape
1272
-
1273
- @property
1274
- def sig_indexes(self) -> IterableType[int]:
1275
- return self._sig_indexes
1276
-
1277
- @sig_indexes.setter
1278
- def sig_indexes(self, sig_indexes: IterableType[int]):
1279
- if isinstance(sig_indexes, Iterable):
1280
- sig_indexes = tuple(sig_indexes)
1281
- valid = True
1282
- for index in sig_indexes:
1283
- if index in self._nav_indexes:
1284
- logger.warning('Could not set the corresponding sig_index into the axis manager object, '
1285
- 'the axis is already affected to the navigation axis')
1286
- valid = False
1287
- break
1288
- if index not in self.get_axes_index():
1289
- logger.warning('Could not set the corresponding nav_index into the data object, not enough'
1290
- ' Axis declared')
1291
- valid = False
1292
- break
1293
- if valid:
1294
- self._sig_indexes = sig_indexes
1295
- else:
1296
- logger.warning('Could not set the corresponding sig_indexes into the data object, should be an iterable')
1297
-
1298
- @property
1299
- def nav_axes(self) -> List[int]:
1300
- deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
1301
- return self._nav_indexes
1302
-
1303
- @nav_axes.setter
1304
- def nav_axes(self, nav_indexes: List[int]):
1305
- deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
1306
- self.nav_indexes = nav_indexes
1307
-
1308
- def is_axis_signal(self, axis: Axis) -> bool:
1309
- """Check if an axis is considered signal or navigation"""
1310
- return axis.index in self._nav_indexes
1311
-
1312
- def is_axis_navigation(self, axis: Axis) -> bool:
1313
- """Check if an axis is considered signal or navigation"""
1314
- return axis.index not in self._nav_indexes
1315
-
1316
- @abstractmethod
1317
- def get_shape_from_index(self, index: int) -> int:
1318
- """Get the data shape at the given index"""
1319
- ...
1320
-
1321
- def get_axes_index(self) -> List[int]:
1322
- """Get the index list from the axis objects"""
1323
- return [axis.index for axis in self._axes]
1324
-
1325
- @abstractmethod
1326
- def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
1327
- ...
1328
-
1329
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1330
- """Only valid for Spread data"""
1331
- ...
1332
-
1333
- def get_nav_axes(self) -> List[Axis]:
1334
- """Get the navigation axes corresponding to the data
1335
-
1336
- Use get_axis_from_index for all index in self.nav_indexes, but in spread distribution, one index may
1337
- correspond to multiple nav axes, see Spread data distribution
1338
-
1339
-
1340
- """
1341
- return list(mutils.flatten([copy.copy(self.get_axis_from_index(index, create=True))
1342
- for index in self.nav_indexes]))
1343
-
1344
- def get_signal_axes(self):
1345
- if self.sig_indexes is None:
1346
- self._sig_indexes = tuple([int(axis.index) for axis in self.axes if axis.index not in self.nav_indexes])
1347
- axes = []
1348
- for index in self._sig_indexes:
1349
- axes_tmp = copy.copy(self.get_axis_from_index(index, create=True))
1350
- for ax in axes_tmp:
1351
- if ax.size > 1:
1352
- axes.append(ax)
1353
- return axes
1354
-
1355
- def is_axis_signal(self, axis: Axis) -> bool:
1356
- """Check if an axis is considered signal or navigation"""
1357
- return axis.index in self._nav_indexes
1358
-
1359
- def is_axis_navigation(self, axis: Axis) -> bool:
1360
- """Check if an axis is considered signal or navigation"""
1361
- return axis.index not in self._nav_indexes
1362
-
1363
- def __repr__(self):
1364
- return self._get_dimension_str()
1365
-
1366
- @abstractmethod
1367
- def _get_dimension_str(self):
1368
- ...
1369
-
1370
-
1371
- class AxesManagerUniform(AxesManagerBase):
1372
- def __init__(self, *args, **kwargs):
1373
- super().__init__(*args, **kwargs)
1374
-
1375
- def compute_shape_from_axes(self):
1376
- if len(self.axes) != 0:
1377
- shape = []
1378
- for ind in range(len(self.axes)):
1379
- shape.append(len(self.get_axis_from_index(ind, create=True)[0]))
1380
- else:
1381
- shape = self._data_shape
1382
- return tuple(shape)
1383
-
1384
- def get_shape_from_index(self, index: int) -> int:
1385
- """Get the data shape at the given index"""
1386
- if index > len(self._data_shape) or index < 0:
1387
- raise IndexError('The specified index does not correspond to any data dimension')
1388
- return self._data_shape[index]
1389
-
1390
- def _check_axis(self, axes: List[Axis]):
1391
- """Check all axis to make sure of their type and make sure their data are properly referring to the data index
1392
-
1393
- See Also
1394
- --------
1395
- :py:meth:`Axis.create_linear_data`
1396
- """
1397
- for ind, axis in enumerate(axes):
1398
- if not isinstance(axis, Axis):
1399
- raise TypeError(f'An axis of {self.__class__.__name__} should be an Axis object')
1400
- if self.get_shape_from_index(axis.index) != axis.size:
1401
- warnings.warn(DataSizeWarning('The size of the axis is not coherent with the shape of the data. '
1402
- 'Replacing it with a linspaced version: np.array([0, 1, 2, ...])'))
1403
- axis.size = self.get_shape_from_index(axis.index)
1404
- axis.scaling = 1
1405
- axis.offset = 0
1406
- axes[ind] = axis
1407
- self._axes = axes
1408
-
1409
- def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
1410
- """Get the axis referred by a given data dimensionality index
1411
-
1412
- If the axis is absent, create a linear one to fit the data shape if parameter create is True
1413
-
1414
- Parameters
1415
- ----------
1416
- index: int
1417
- The index referring to the data ndarray shape
1418
- create: bool
1419
- If True and the axis referred by index has not been found in axes, create one
1420
-
1421
- Returns
1422
- -------
1423
- List[Axis] or None: return the list of axis instance if Data has the axis (or it has been created) else None
1424
-
1425
- See Also
1426
- --------
1427
- :py:meth:`Axis.create_linear_data`
1428
- """
1429
- index = int(index)
1430
- has_axis, axis = self._has_get_axis_from_index(index)
1431
- if not has_axis:
1432
- if create:
1433
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, '
1434
- f'creating a linear one...'))
1435
- axis = Axis(index=index, offset=0, scaling=1)
1436
- axis.size = self.get_shape_from_index(index)
1437
- else:
1438
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, returning None'))
1439
- return [axis]
1440
-
1441
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1442
- """in spread mode, different nav axes have the same index (but not
1443
- the same spread_order integer value)
1444
-
1445
- """
1446
- return None
1447
-
1448
- def get_sorted_index(self, axis_index: int = 0, spread_index=0) -> Tuple[np.ndarray, Tuple[slice]]:
1449
- """ Get the index to sort the specified axis
1450
-
1451
- Parameters
1452
- ----------
1453
- axis_index: int
1454
- The index along which one should sort the data
1455
- spread_index: int
1456
- for spread data only, specifies which spread axis to use
1457
-
1458
- Returns
1459
- -------
1460
- np.ndarray: the sorted index from the specified axis
1461
- tuple of slice:
1462
- used to slice the underlying data
1463
- """
1464
-
1465
- axes = self.get_axis_from_index(axis_index)
1466
- if axes[0] is not None:
1467
- sorted_index = np.argsort(axes[0].get_data())
1468
- axes[0].data = axes[0].get_data()[sorted_index]
1469
- slices = []
1470
- for ind in range(len(self.shape)):
1471
- if ind == axis_index:
1472
- slices.append(sorted_index)
1473
- else:
1474
- slices.append(Ellipsis)
1475
- slices = tuple(slices)
1476
- return sorted_index, slices
1477
- else:
1478
- return None, None
1479
-
1480
- def _get_dimension_str(self):
1481
- string = "("
1482
- for nav_index in self.nav_indexes:
1483
- string += str(self._data_shape[nav_index]) + ", "
1484
- string = string.rstrip(", ")
1485
- string += "|"
1486
- for sig_index in self.sig_indexes:
1487
- string += str(self._data_shape[sig_index]) + ", "
1488
- string = string.rstrip(", ")
1489
- string += ")"
1490
- return string
1491
-
1492
-
1493
- class AxesManagerSpread(AxesManagerBase):
1494
- """For this particular data category, some explanation is needed, see example below:
1495
-
1496
- Examples
1497
- --------
1498
- One take images data (20x30) as a function of 2 parameters, say xaxis and yaxis non-linearly spaced on a regular
1499
- grid.
1500
-
1501
- data.shape = (150, 20, 30)
1502
- data.nav_indexes = (0,)
1503
-
1504
- The first dimension (150) corresponds to the navigation (there are 150 non uniform data points taken)
1505
- The second and third could correspond to signal data, here an image of size (20x30)
1506
- so:
1507
- * nav_indexes is (0, )
1508
- * sig_indexes are (1, 2)
1509
-
1510
- xaxis = Axis(name=xaxis, index=0, data...) length 150
1511
- yaxis = Axis(name=yaxis, index=0, data...) length 150
1512
-
1513
- In fact from such a data shape the number of navigation axes in unknown . In our example, they are 2. To somehow
1514
- keep track of some ordering in these navigation axes, one adds an attribute to the Axis object: the spread_order
1515
- xaxis = Axis(name=xaxis, index=0, spread_order=0, data...) length 150
1516
- yaxis = Axis(name=yaxis, index=0, spread_order=1, data...) length 150
1517
- """
1518
-
1519
- def __init__(self, *args, **kwargs):
1520
- super().__init__(*args, **kwargs)
1521
-
1522
- def _check_axis(self, axes: List[Axis]):
1523
- """Check all axis to make sure of their type and make sure their data are properly referring to the data index
1524
-
1525
- """
1526
- for axis in axes:
1527
- if not isinstance(axis, Axis):
1528
- raise TypeError(f'An axis of {self.__class__.__name__} should be an Axis object')
1529
- elif len(self.nav_indexes) != 1:
1530
- raise ValueError('Spread data should have only one specified index in self.nav_indexes')
1531
- elif axis.index in self.nav_indexes:
1532
- if axis.size != 1 and (axis.size != self._data_shape[self.nav_indexes[0]]):
1533
- raise DataLengthError('all navigation axes should have the same size')
1534
-
1535
- def compute_shape_from_axes(self):
1536
- """Get data shape from axes
1537
-
1538
- First get the nav length from one of the navigation axes
1539
- Then check for signal axes
1540
- """
1541
- if len(self.axes) != 0:
1542
-
1543
- axes = sorted(self.axes, key=lambda axis: axis.index)
1544
-
1545
- shape = []
1546
- for axis in axes:
1547
- if axis.index in self.nav_indexes:
1548
- shape.append(axis.size)
1549
- break
1550
- for axis in axes:
1551
- if axis.index not in self.nav_indexes:
1552
- shape.append(axis.size)
1553
- else:
1554
- shape = self._data_shape
1555
- return tuple(shape)
1556
-
1557
- def get_shape_from_index(self, index: int) -> int:
1558
- """Get the data shape at the given index"""
1559
- if index > len(self._data_shape) or index < 0:
1560
- raise IndexError('The specified index does not correspond to any data dimension')
1561
- return self._data_shape[index]
1562
-
1563
- def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
1564
- """in spread mode, different nav axes have the same index (but not
1565
- the same spread_order integer value) so may return multiple axis
1566
-
1567
- No possible "linear" creation in this mode except if the index is a signal index
1568
-
1569
- """
1570
- if index in self.nav_indexes:
1571
- axes = []
1572
- for axis in self.axes:
1573
- if axis.index == index:
1574
- axes.append(axis)
1575
- return axes
1576
- else:
1577
- index = int(index)
1578
- try:
1579
- has_axis, axis = self._has_get_axis_from_index(index)
1580
- except IndexError:
1581
- axis = [None]
1582
- has_axis = False
1583
- return axis
1584
-
1585
- if not has_axis and index in self.sig_indexes:
1586
- if create:
1587
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, '
1588
- f'creating a linear one...'))
1589
- axis = Axis(index=index, offset=0, scaling=1)
1590
- axis.size = self.get_shape_from_index(index)
1591
- else:
1592
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, returning None'))
1593
-
1594
- return [axis]
1595
-
1596
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1597
- """in spread mode, different nav axes have the same index (but not
1598
- the same spread_order integer value)
1599
-
1600
- """
1601
- for axis in self.axes:
1602
- if axis.index == index and axis.spread_order == spread_order:
1603
- return axis
1604
-
1605
- def get_sorted_index(self, axis_index: int = 0, spread_index=0) -> Tuple[np.ndarray, Tuple[slice]]:
1606
- """ Get the index to sort the specified axis
1607
-
1608
- Parameters
1609
- ----------
1610
- axis_index: int
1611
- The index along which one should sort the data
1612
- spread_index: int
1613
- for spread data only, specifies which spread axis to use
1614
-
1615
- Returns
1616
- -------
1617
- np.ndarray: the sorted index from the specified axis
1618
- tuple of slice:
1619
- used to slice the underlying data
1620
- """
1621
-
1622
- if axis_index in self.nav_indexes:
1623
- axis = self.get_axis_from_index_spread(axis_index, spread_index)
1624
- else:
1625
- axis = self.get_axis_from_index(axis_index)[0]
1626
-
1627
- if axis is not None:
1628
- sorted_index = np.argsort(axis.get_data())
1629
- slices = []
1630
- for ind in range(len(self.shape)):
1631
- if ind == axis_index:
1632
- slices.append(sorted_index)
1633
- else:
1634
- if slices[-1] is Ellipsis: # only one ellipsis
1635
- slices.append(Ellipsis)
1636
- slices = tuple(slices)
1637
-
1638
- for nav_index in self.nav_indexes:
1639
- for axis in self.get_axis_from_index(nav_index):
1640
- axis.data = axis.get_data()[sorted_index]
1641
-
1642
- return sorted_index, slices
1643
- else:
1644
- return None, None
1645
-
1646
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1647
- for axis in self.axes:
1648
- if axis.index == index and axis.spread_order == spread_order:
1649
- return axis
1650
-
1651
- def _get_dimension_str(self):
1652
- try:
1653
- string = "("
1654
- for nav_index in self.nav_indexes:
1655
- string += str(self._data_shape[nav_index]) + ", "
1656
- break
1657
- string = string.rstrip(", ")
1658
- string += "|"
1659
- for sig_index in self.sig_indexes:
1660
- string += str(self._data_shape[sig_index]) + ", "
1661
- string = string.rstrip(", ")
1662
- string += ")"
1663
- except Exception as e:
1664
- string = f'({self._data_shape})'
1665
- finally:
1666
- return string
1667
-
1668
-
1669
- class DataWithAxes(DataBase):
1670
- """Data object with Axis objects corresponding to underlying data nd-arrays
1671
-
1672
- Parameters
1673
- ----------
1674
- axes: list of Axis
1675
- the list of Axis object for proper plotting, calibration ...
1676
- nav_indexes: tuple of int
1677
- highlight which Axis in axes is Signal or Navigation axis depending on the content:
1678
- For instance, nav_indexes = (2,), means that the axis with index 2 in a at least 3D ndarray data is the first
1679
- navigation axis
1680
- For instance, nav_indexes = (3,2), means that the axis with index 3 in a at least 4D ndarray data is the first
1681
- navigation axis while the axis with index 2 is the second navigation Axis. Axes with index 0 and 1 are signal
1682
- axes of 2D ndarray data
1683
- errors: list of ndarray.
1684
- The list should match the length of the data attribute while the ndarrays
1685
- should match the data ndarray
1686
- """
1687
-
1688
- def __init__(self, *args, axes: List[Axis] = [],
1689
- nav_indexes: Tuple[int] = (),
1690
- errors: Iterable[np.ndarray] = None,
1691
- **kwargs):
1692
-
1693
- if 'nav_axes' in kwargs:
1694
- deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
1695
- nav_indexes = kwargs.pop('nav_axes')
1696
-
1697
- x_axis = kwargs.pop('x_axis') if 'x_axis' in kwargs else None
1698
- y_axis = kwargs.pop('y_axis') if 'y_axis' in kwargs else None
1699
-
1700
- nav_x_axis = kwargs.pop('nav_x_axis') if 'nav_x_axis' in kwargs else None
1701
- nav_y_axis = kwargs.pop('nav_y_axis') if 'nav_y_axis' in kwargs else None
1702
-
1703
- super().__init__(*args, **kwargs)
1704
-
1705
- self._axes = axes
1706
-
1707
- other_kwargs = dict(x_axis=x_axis, y_axis=y_axis, nav_x_axis=nav_x_axis, nav_y_axis=nav_y_axis)
1708
-
1709
- self.set_axes_manager(self.shape, axes=axes, nav_indexes=nav_indexes, **other_kwargs)
1710
-
1711
- self.inav: Iterable[DataWithAxes] = SpecialSlicersData(self, True)
1712
- self.isig: Iterable[DataWithAxes] = SpecialSlicersData(self, False)
1713
-
1714
- self.get_dim_from_data_axes() # in DataBase, dim is processed from the shape of data, but if axes are provided
1715
- #then use get_dim_from axes
1716
- self._check_errors(errors)
1717
-
1718
- def _check_errors(self, errors: Iterable[np.ndarray]):
1719
- """ Make sure the errors object is adapted to the len/shape of the dwa object
1720
-
1721
- new in 4.2.0
1722
- """
1723
- check = False
1724
- if errors is None:
1725
- self._errors = None
1726
- return
1727
- if isinstance(errors, (tuple, list)) and len(errors) == len(self):
1728
- if np.all([isinstance(error, np.ndarray) for error in errors]):
1729
- if np.all([error_array.shape == self.shape for error_array in errors]):
1730
- check = True
1731
- else:
1732
- logger.warning(f'All error objects should have the same shape as the data'
1733
- f'objects')
1734
- else:
1735
- logger.warning(f'All error objects should be np.ndarray')
1736
-
1737
- if not check:
1738
- logger.warning('the errors field is incompatible with the structure of the data')
1739
- self._errors = None
1740
- else:
1741
- self._errors = errors
1742
-
1743
- @property
1744
- def errors(self):
1745
- """ Get/Set the errors bar values as a list of np.ndarray
1746
-
1747
- new in 4.2.0
1748
- """
1749
- return self._errors
1750
-
1751
- @errors.setter
1752
- def errors(self, errors: Iterable[np.ndarray]):
1753
- self._check_errors(errors)
1754
-
1755
- def get_error(self, index):
1756
- """ Get a particular error ndarray at the given index in the list
1757
-
1758
- new in 4.2.0
1759
- """
1760
- if self._errors is not None: #because to the initial check we know it is a list of ndarrays
1761
- return self._errors[index]
1762
- else:
1763
- return np.array([0]) # this could be added to any numpy array of any shape
1764
-
1765
- def errors_as_dwa(self):
1766
- """ Get a dwa from self replacing the data content with the error attribute (if not None)
1767
-
1768
- New in 4.2.0
1769
- """
1770
- if self.errors is not None:
1771
- dwa = self.deepcopy_with_new_data(self.errors)
1772
- dwa.name = f'{self.name}_errors'
1773
- dwa.errors = None
1774
- return dwa
1775
- else:
1776
- raise ValueError(f'Cannot create a dwa from a None, should be a list of ndarray')
1777
-
1778
- def plot(self, plotter_backend: str = config('plotting', 'backend'), *args, viewer=None,
1779
- **kwargs):
1780
- """ Call a plotter factory and its plot method over the actual data"""
1781
- return plotter_factory.get(plotter_backend).plot(self, *args, viewer=viewer, **kwargs)
1782
-
1783
- def set_axes_manager(self, data_shape, axes, nav_indexes, **kwargs):
1784
- if self.distribution.name == 'uniform' or len(nav_indexes) == 0:
1785
- self._distribution = DataDistribution['uniform']
1786
- self.axes_manager = AxesManagerUniform(data_shape=data_shape, axes=axes,
1787
- nav_indexes=nav_indexes,
1788
- **kwargs)
1789
- elif self.distribution.name == 'spread':
1790
- self.axes_manager = AxesManagerSpread(data_shape=data_shape, axes=axes,
1791
- nav_indexes=nav_indexes,
1792
- **kwargs)
1793
- else:
1794
- raise ValueError(f'Such a data distribution ({data.distribution}) has no AxesManager')
1795
-
1796
- def __eq__(self, other):
1797
- is_equal = super().__eq__(other)
1798
- if not is_equal:
1799
- return is_equal
1800
- if isinstance(other, DataWithAxes):
1801
- for ind in list(self.nav_indexes) + list(self.sig_indexes):
1802
- axes_self = self.get_axis_from_index(ind)
1803
- axes_other = other.get_axis_from_index(ind)
1804
- if len(axes_other) != len(axes_self):
1805
- return False
1806
- for ind_ax in range(len(axes_self)):
1807
- if axes_self[ind_ax] != axes_other[ind_ax]:
1808
- return False
1809
- if self.errors is None:
1810
- is_equal = is_equal and other.errors is None
1811
- else:
1812
- for ind_error in range(len(self.errors)):
1813
- if not np.allclose(self.errors[ind_error], other.errors[ind_error]):
1814
- return False
1815
- return is_equal
1816
-
1817
- def __repr__(self):
1818
- return (f'<{self.__class__.__name__}: {self.name} '
1819
- f'<u: {self.units}> '
1820
- f'<len:{self.length}> {self._am}>')
1821
-
1822
- def sort_data(self, axis_index: int = 0, spread_index=0, inplace=False) -> DataWithAxes:
1823
- """ Sort data along a given axis, default is 0
1824
-
1825
- Parameters
1826
- ----------
1827
- axis_index: int
1828
- The index along which one should sort the data
1829
- spread_index: int
1830
- for spread data only, specifies which spread axis to use
1831
- inplace: bool
1832
- modify in place or not the data (and its axes)
1833
-
1834
- Returns
1835
- -------
1836
- DataWithAxes
1837
- """
1838
- if inplace:
1839
- data = self
1840
- else:
1841
- data = self.deepcopy()
1842
- sorted_index, slices = data._am.get_sorted_index(axis_index, spread_index)
1843
- if sorted_index is not None:
1844
- for ind in range(len(data)):
1845
- data.data[ind] = data.data[ind][slices]
1846
- return data
1847
-
1848
- def transpose(self):
1849
- """replace the data by their transposed version
1850
-
1851
- Valid only for 2D data
1852
- """
1853
- if self.dim == 'Data2D':
1854
- self.data[:] = [data.T for data in self.data]
1855
- for axis in self.axes:
1856
- axis.index = 0 if axis.index == 1 else 1
1857
-
1858
- def crop_at_along(self, coordinates_tuple: Tuple):
1859
- slices = []
1860
- for coordinates in coordinates_tuple:
1861
- axis = self.get_axis_from_index(0)[0]
1862
- indexes = axis.find_indexes(coordinates)
1863
- slices.append(slice(indexes))
1864
-
1865
- return self._slicer(slices, False)
1866
-
1867
- def mean(self, axis: int = 0) -> DataWithAxes:
1868
- """Process the mean of the data on the specified axis and returns the new data
1869
-
1870
- Parameters
1871
- ----------
1872
- axis: int
1873
-
1874
- Returns
1875
- -------
1876
- DataWithAxes
1877
- """
1878
- dat_mean = []
1879
- for dat in self.data:
1880
- mean = np.mean(dat, axis=axis)
1881
- if isinstance(mean, numbers.Number):
1882
- mean = np.array([mean])
1883
- dat_mean.append(mean)
1884
- return self.deepcopy_with_new_data(dat_mean, remove_axes_index=axis)
1885
-
1886
- def sum(self, axis: int = 0) -> DataWithAxes:
1887
- """Process the sum of the data on the specified axis and returns the new data
1888
-
1889
- Parameters
1890
- ----------
1891
- axis: int
1892
-
1893
- Returns
1894
- -------
1895
- DataWithAxes
1896
- """
1897
- dat_sum = []
1898
- for dat in self.data:
1899
- dat_sum.append(np.sum(dat, axis=axis))
1900
- return self.deepcopy_with_new_data(dat_sum, remove_axes_index=axis)
1901
-
1902
- def interp(self, new_axis_data: Union[Axis, np.ndarray], **kwargs) -> DataWithAxes:
1903
- """Performs linear interpolation for 1D data only.
1904
-
1905
- For more complex ones, see :py:meth:`scipy.interpolate`
1906
-
1907
- Parameters
1908
- ----------
1909
- new_axis_data: Union[Axis, np.ndarray]
1910
- The coordinates over which to do the interpolation
1911
- kwargs: dict
1912
- extra named parameters to be passed to the :py:meth:`~numpy.interp` method
1913
-
1914
- Returns
1915
- -------
1916
- DataWithAxes
1917
-
1918
- See Also
1919
- --------
1920
- :py:meth:`~numpy.interp`
1921
- :py:meth:`~scipy.interpolate`
1922
- """
1923
- if self.dim != DataDim['Data1D']:
1924
- raise ValueError('For basic interpolation, only 1D data are supported')
1925
-
1926
- data_interpolated = []
1927
- axis_obj = self.get_axis_from_index(0)[0]
1928
- if isinstance(new_axis_data, np.ndarray):
1929
- new_axis_data = Axis(axis_obj.label, axis_obj.units, data=new_axis_data)
1930
-
1931
- for dat in self.data:
1932
- data_interpolated.append(np.interp(new_axis_data.get_data(), axis_obj.get_data(), dat,
1933
- **kwargs))
1934
- new_data = DataCalculated(f'{self.name}_interp', data=data_interpolated,
1935
- axes=[new_axis_data],
1936
- labels=self.labels)
1937
- return new_data
1938
-
1939
- def ft(self, axis: int = 0) -> DataWithAxes:
1940
- """Process the Fourier Transform of the data on the specified axis and returns the new data
1941
-
1942
- Parameters
1943
- ----------
1944
- axis: int
1945
-
1946
- Returns
1947
- -------
1948
- DataWithAxes
1949
-
1950
- See Also
1951
- --------
1952
- :py:meth:`~pymodaq.utils.math_utils.ft`, :py:meth:`~numpy.fft.fft`
1953
- """
1954
- dat_ft = []
1955
- axis_obj = self.get_axis_from_index(axis)[0].copy()
1956
- omega_grid, time_grid = mutils.ftAxis_time(len(axis_obj),
1957
- np.abs(axis_obj.max() - axis_obj.min()))
1958
- for dat in self.data:
1959
- dat_ft.append(mutils.ft(dat, dim=axis))
1960
- new_data = self.deepcopy_with_new_data(dat_ft)
1961
- axis_obj = new_data.get_axis_from_index(axis)[0]
1962
- axis_obj.data = omega_grid
1963
- axis_obj.label = f'ft({axis_obj.label})'
1964
- axis_obj.units = f'rad/{axis_obj.units}'
1965
- return new_data
1966
-
1967
- def ift(self, axis: int = 0) -> DataWithAxes:
1968
- """Process the inverse Fourier Transform of the data on the specified axis and returns the
1969
- new data
1970
-
1971
- Parameters
1972
- ----------
1973
- axis: int
1974
-
1975
- Returns
1976
- -------
1977
- DataWithAxes
1978
-
1979
- See Also
1980
- --------
1981
- :py:meth:`~pymodaq.utils.math_utils.ift`, :py:meth:`~numpy.fft.ifft`
1982
- """
1983
- dat_ift = []
1984
- axis_obj = self.get_axis_from_index(axis)[0].copy()
1985
- omega_grid, time_grid = mutils.ftAxis_time(len(axis_obj),
1986
- np.abs(axis_obj.max() - axis_obj.min()))
1987
- for dat in self.data:
1988
- dat_ift.append(mutils.ift(dat, dim=axis))
1989
- new_data = self.deepcopy_with_new_data(dat_ift)
1990
- axis_obj = new_data.get_axis_from_index(axis)[0]
1991
- axis_obj.data = omega_grid
1992
- axis_obj.label = f'ift({axis_obj.label})'
1993
- axis_obj.units = str(Unit(f'rad/({axis_obj.units})'))
1994
- return new_data
1995
-
1996
- def fit(self, function: Callable, initial_guess: IterableType, data_index: int = None,
1997
- axis_index: int = 0, **kwargs) -> DataCalculated:
1998
- """ Apply 1D curve fitting using the scipy optimization package
1999
-
2000
- Parameters
2001
- ----------
2002
- function: Callable
2003
- a callable to be used for the fit
2004
- initial_guess: Iterable
2005
- The initial parameters for the fit
2006
- data_index: int
2007
- The index of the data over which to do the fit, if None apply the fit to all
2008
- axis_index: int
2009
- the axis index to use for the fit (if multiple) but there should be only one
2010
- kwargs: dict
2011
- extra named parameters applied to the curve_fit scipy method
2012
-
2013
- Returns
2014
- -------
2015
- DataCalculated containing the evaluation of the fit on the specified axis
2016
-
2017
- See Also
2018
- --------
2019
- :py:meth:`~scipy.optimize.curve_fit`
2020
- """
2021
- import scipy.optimize as opt
2022
- if self.dim != DataDim['Data1D']:
2023
- raise ValueError('Integrated fitting only works for 1D data')
2024
- axis = self.get_axis_from_index(axis_index)[0].copy()
2025
- axis_array = axis.get_data()
2026
- if data_index is None:
2027
- datalist_to_fit = self.data
2028
- labels = [f'{label}_fit' for label in self.labels]
2029
- else:
2030
- datalist_to_fit = [self.data[data_index]]
2031
- labels = [f'{self.labels[data_index]}_fit']
2032
-
2033
- datalist_fitted = []
2034
- fit_coeffs = []
2035
- for data_array in datalist_to_fit:
2036
- popt, pcov = opt.curve_fit(function, axis_array, data_array, p0=initial_guess, **kwargs)
2037
- datalist_fitted.append(function(axis_array, *popt))
2038
- fit_coeffs.append(popt)
2039
-
2040
- return DataCalculated(f'{self.name}_fit', data=datalist_fitted,
2041
- labels=labels,
2042
- axes=[axis], fit_coeffs=fit_coeffs)
2043
-
2044
- def find_peaks(self, height=None, threshold=None, **kwargs) -> DataToExport:
2045
- """ Apply the scipy find_peaks method to 1D data
2046
-
2047
- Parameters
2048
- ----------
2049
- height: number or ndarray or sequence, optional
2050
- threshold: number or ndarray or sequence, optional
2051
- kwargs: dict
2052
- extra named parameters applied to the find_peaks scipy method
2053
-
2054
- Returns
2055
- -------
2056
- DataCalculated
2057
-
2058
- See Also
2059
- --------
2060
- :py:meth:`~scipy.optimize.find_peaks`
2061
- """
2062
- if self.dim != DataDim['Data1D']:
2063
- raise ValueError('Finding peaks only works for 1D data')
2064
- from scipy.signal import find_peaks
2065
- peaks_indices = []
2066
- dte = DataToExport('peaks')
2067
- for ind in range(len(self)):
2068
- peaks, properties = find_peaks(self[ind], height, threshold, **kwargs)
2069
- peaks_indices.append(peaks)
2070
-
2071
- dte.append(DataCalculated(f'{self.labels[ind]}',
2072
- data=[self[ind][peaks_indices[-1]],
2073
- peaks_indices[-1]
2074
- ],
2075
- labels=['peak value', 'peak indexes'],
2076
- axes=[Axis('peak position', self.axes[0].units,
2077
- data=self.axes[0].get_data_at(peaks_indices[-1]))])
2078
- )
2079
- return dte
2080
-
2081
- def get_dim_from_data_axes(self) -> DataDim:
2082
- """Get the dimensionality DataDim from data taking into account nav indexes
2083
- """
2084
- if len(self.axes) != len(self.shape):
2085
- self._dim = self.get_dim_from_data(self.data)
2086
- else:
2087
- if len(self.nav_indexes) > 0:
2088
- self._dim = DataDim['DataND']
2089
- else:
2090
- if len(self.axes) == 0:
2091
- self._dim = DataDim['Data0D']
2092
- elif len(self.axes) == 1:
2093
- self._dim = DataDim['Data1D']
2094
- elif len(self.axes) == 2:
2095
- self._dim = DataDim['Data2D']
2096
- if len(self.nav_indexes) > 0:
2097
- self._dim = DataDim['DataND']
2098
- return self._dim
2099
-
2100
- @property
2101
- def n_axes(self):
2102
- """Get the number of axes (even if not specified)"""
2103
- return len(self.axes)
2104
-
2105
- @property
2106
- def axes(self):
2107
- """convenience property to fetch attribute from axis_manager"""
2108
- return self._am.axes
2109
-
2110
- @axes.setter
2111
- def axes(self, axes: List[Axis]):
2112
- """convenience property to set attribute from axis_manager"""
2113
- self.set_axes_manager(self.shape, axes=axes, nav_indexes=self.nav_indexes)
2114
-
2115
- def axes_limits(self, axes_indexes: List[int] = None) -> List[Tuple[float, float]]:
2116
- """Get the limits of specified axes (all if axes_indexes is None)"""
2117
- if axes_indexes is None:
2118
- return [(axis.min(), axis.max()) for axis in self.axes]
2119
- else:
2120
- return [(axis.min(), axis.max()) for axis in self.axes if axis.index in axes_indexes]
2121
-
2122
- @property
2123
- def sig_indexes(self):
2124
- """convenience property to fetch attribute from axis_manager"""
2125
- return self._am.sig_indexes
2126
-
2127
- @property
2128
- def nav_indexes(self):
2129
- """convenience property to fetch attribute from axis_manager"""
2130
- return self._am.nav_indexes
2131
-
2132
- @nav_indexes.setter
2133
- def nav_indexes(self, indexes: List[int]):
2134
- """create new axis manager with new navigation indexes"""
2135
- self.set_axes_manager(self.shape, axes=self.axes, nav_indexes=indexes)
2136
- self.get_dim_from_data_axes()
2137
-
2138
- def get_nav_axes(self) -> List[Axis]:
2139
- return self._am.get_nav_axes()
2140
-
2141
- def get_sig_index(self) -> List[Axis]:
2142
- return self._am.get_signal_axes()
2143
-
2144
- def get_nav_axes_with_data(self) -> List[Axis]:
2145
- """Get the data's navigation axes making sure there is data in the data field"""
2146
- axes = self.get_nav_axes()
2147
- for axis in axes:
2148
- if axis.get_data() is None:
2149
- axis.create_linear_data(self.shape[axis.index])
2150
- return axes
2151
-
2152
- def get_axis_indexes(self) -> List[int]:
2153
- """Get all present different axis indexes"""
2154
- return sorted(list(set([axis.index for axis in self.axes])))
2155
-
2156
- def get_axis_from_index(self, index, create=False):
2157
- return self._am.get_axis_from_index(index, create)
2158
-
2159
- def get_axis_from_index_spread(self, index: int, spread: int):
2160
- return self._am.get_axis_from_index_spread(index, spread)
2161
-
2162
- def get_axis_from_label(self, label: str) -> Axis:
2163
- """Get the axis referred by a given label
2164
-
2165
- Parameters
2166
- ----------
2167
- label: str
2168
- The label of the axis
2169
-
2170
- Returns
2171
- -------
2172
- Axis or None: return the axis instance if it has the right label else None
2173
- """
2174
- for axis in self.axes:
2175
- if axis.label == label:
2176
- return axis
2177
-
2178
- def create_missing_axes(self):
2179
- """Check if given the data shape, some axes are missing to properly define the data
2180
- (especially for plotting)"""
2181
- axes = self.axes[:]
2182
- for index in self.nav_indexes + self.sig_indexes:
2183
- if (len(self.get_axis_from_index(index)) != 0 and
2184
- self.get_axis_from_index(index)[0] is None):
2185
- axes_tmp = self.get_axis_from_index(index, create=True)
2186
- for ax in axes_tmp:
2187
- if ax.size > 1:
2188
- axes.append(ax)
2189
- self.axes = axes
2190
-
2191
- def _compute_slices(self, slices, is_navigation=True):
2192
- """Compute the total slice to apply to the data
2193
-
2194
- Filling in Ellipsis when no slicing should be done
2195
- """
2196
- if isinstance(slices, numbers.Number) or isinstance(slices, slice):
2197
- slices = [slices]
2198
- if is_navigation:
2199
- indexes = self._am.nav_indexes
2200
- else:
2201
- indexes = self._am.sig_indexes
2202
- total_slices = []
2203
- slices = list(slices)
2204
- for ind in range(len(self.shape)):
2205
- if ind in indexes:
2206
- total_slices.append(slices.pop(0))
2207
- elif len(total_slices) == 0:
2208
- total_slices.append(Ellipsis)
2209
- elif not (Ellipsis in total_slices and total_slices[-1] is Ellipsis):
2210
- total_slices.append(slice(None))
2211
- total_slices = tuple(total_slices)
2212
- return total_slices
2213
-
2214
- def check_squeeze(self, total_slices: List[slice], is_navigation: bool):
2215
-
2216
- do_squeeze = True
2217
- if 1 in self.data[0][total_slices].shape:
2218
- if not is_navigation and self.data[0][total_slices].shape.index(1) in self.nav_indexes:
2219
- do_squeeze = False
2220
- elif is_navigation and self.data[0][total_slices].shape.index(1) in self.sig_indexes:
2221
- do_squeeze = False
2222
- return do_squeeze
2223
-
2224
- def _slicer(self, slices, is_navigation=True):
2225
- """Apply a given slice to the data either navigation or signal dimension
2226
-
2227
- Parameters
2228
- ----------
2229
- slices: tuple of slice or int
2230
- the slices to apply to the data
2231
- is_navigation: bool
2232
- if True apply the slices to the navigation dimension else to the signal ones
2233
-
2234
- Returns
2235
- -------
2236
- DataWithAxes
2237
- Object of the same type as the initial data, derived from DataWithAxes. But with lower
2238
- data size due to the slicing and with eventually less axes.
2239
- """
2240
-
2241
- if isinstance(slices, numbers.Number) or isinstance(slices, slice):
2242
- slices = [slices]
2243
- total_slices = self._compute_slices(slices, is_navigation)
2244
-
2245
- do_squeeze = self.check_squeeze(total_slices, is_navigation)
2246
- new_arrays_data = [squeeze(dat[total_slices], do_squeeze) for dat in self.data]
2247
- tmp_axes = self._am.get_signal_axes() if is_navigation else self._am.get_nav_axes()
2248
- axes_to_append = [copy.deepcopy(axis) for axis in tmp_axes]
2249
-
2250
- # axes_to_append are the axes to append to the new produced data
2251
- # (basically the ones to keep)
2252
-
2253
- indexes_to_get = self.nav_indexes if is_navigation else self.sig_indexes
2254
- # indexes_to_get are the indexes of the axes where the slice should be applied
2255
-
2256
- _indexes = list(self.nav_indexes)
2257
- _indexes.extend(self.sig_indexes)
2258
- lower_indexes = dict(zip(_indexes, [0 for _ in range(len(_indexes))]))
2259
- # lower_indexes will store for each *axis index* how much the index should be reduced
2260
- # because one axis has
2261
- # been removed
2262
-
2263
- axes = []
2264
- nav_indexes = [] if is_navigation else list(self._am.nav_indexes)
2265
- for ind_slice, _slice in enumerate(slices):
2266
- if ind_slice < len(indexes_to_get):
2267
- ax = self._am.get_axis_from_index(indexes_to_get[ind_slice])
2268
- if len(ax) != 0 and ax[0] is not None:
2269
- for ind in range(len(ax)):
2270
- ax[ind] = ax[ind].iaxis[_slice]
2271
-
2272
- if not(ax[0] is None or ax[0].size <= 1): # means the slice kept part of the axis
2273
- if is_navigation:
2274
- nav_indexes.append(self._am.nav_indexes[ind_slice])
2275
- axes.extend(ax)
2276
- else:
2277
- for axis in axes_to_append: # means we removed one of the axes (and data dim),
2278
- # hence axis index above current index should be lowered by 1
2279
- if axis.index > indexes_to_get[ind_slice]:
2280
- lower_indexes[axis.index] += 1
2281
- for index in indexes_to_get[ind_slice+1:]:
2282
- lower_indexes[index] += 1
2283
-
2284
- axes.extend(axes_to_append)
2285
- for axis in axes:
2286
- axis.index -= lower_indexes[axis.index]
2287
- for ind in range(len(nav_indexes)):
2288
- nav_indexes[ind] -= lower_indexes[nav_indexes[ind]]
2289
-
2290
- if len(nav_indexes) != 0:
2291
- distribution = self.distribution
2292
- else:
2293
- distribution = DataDistribution['uniform']
2294
-
2295
- data = DataWithAxes(self.name, data=new_arrays_data, nav_indexes=tuple(nav_indexes),
2296
- axes=axes,
2297
- source='calculated', origin=self.origin,
2298
- labels=self.labels[:],
2299
- distribution=distribution)
2300
- return data
2301
-
2302
- def deepcopy_with_new_data(self, data: List[np.ndarray] = None,
2303
- remove_axes_index: Union[int, List[int]] = None,
2304
- source: DataSource = 'calculated',
2305
- keep_dim=False) -> DataWithAxes:
2306
- """deepcopy without copying the initial data (saving memory)
2307
-
2308
- The new data, may have some axes stripped as specified in remove_axes_index
2309
-
2310
- Parameters
2311
- ----------
2312
- data: list of numpy ndarray
2313
- The new data
2314
- remove_axes_index: tuple of int
2315
- indexes of the axis to be removed
2316
- source: DataSource
2317
- keep_dim: bool
2318
- if False (the default) will calculate the new dim based on the data shape
2319
- else keep the same (be aware it could lead to issues)
2320
-
2321
- Returns
2322
- -------
2323
- DataWithAxes
2324
- """
2325
- try:
2326
- old_data = self.data
2327
- self._data = None
2328
- new_data = self.deepcopy()
2329
- new_data._data = data
2330
- new_data.get_dim_from_data(data)
2331
-
2332
- if source is not None:
2333
- source = enum_checker(DataSource, source)
2334
- new_data._source = source
2335
-
2336
- if remove_axes_index is not None:
2337
- if not isinstance(remove_axes_index, Iterable):
2338
- remove_axes_index = [remove_axes_index]
2339
-
2340
- lower_indexes = dict(zip(new_data.get_axis_indexes(),
2341
- [0 for _ in range(len(new_data.get_axis_indexes()))]))
2342
- # lower_indexes will store for each *axis index* how much the index should be reduced because one axis has
2343
- # been removed
2344
-
2345
- nav_indexes = list(new_data.nav_indexes)
2346
- sig_indexes = list(new_data.sig_indexes)
2347
- for index in remove_axes_index:
2348
- for axis in new_data.get_axis_from_index(index):
2349
- if axis is not None:
2350
- new_data.axes.remove(axis)
2351
-
2352
- if index in new_data.nav_indexes:
2353
- nav_indexes.pop(nav_indexes.index(index))
2354
- if index in new_data.sig_indexes:
2355
- sig_indexes.pop(sig_indexes.index(index))
2356
-
2357
- # for ind, nav_ind in enumerate(nav_indexes):
2358
- # if nav_ind > index and nav_ind not in remove_axes_index:
2359
- # nav_indexes[ind] -= 1
2360
-
2361
- # for ind, sig_ind in enumerate(sig_indexes):
2362
- # if sig_ind > index:
2363
- # sig_indexes[ind] -= 1
2364
- for axis in new_data.axes:
2365
- if axis.index > index and axis.index not in remove_axes_index:
2366
- lower_indexes[axis.index] += 1
2367
-
2368
- for axis in new_data.axes:
2369
- axis.index -= lower_indexes[axis.index]
2370
- for ind in range(len(nav_indexes)):
2371
- nav_indexes[ind] -= lower_indexes[nav_indexes[ind]]
2372
-
2373
- new_data.nav_indexes = tuple(nav_indexes)
2374
- # new_data._am.sig_indexes = tuple(sig_indexes)
2375
-
2376
- new_data._shape = data[0].shape
2377
- if not keep_dim:
2378
- new_data._dim = self._get_dim_from_data(data)
2379
- return new_data
2380
-
2381
- except Exception as e:
2382
- pass
2383
- finally:
2384
- self._data = old_data
2385
-
2386
- @property
2387
- def _am(self) -> AxesManagerBase:
2388
- return self.axes_manager
2389
-
2390
- def get_data_dimension(self) -> str:
2391
- return str(self._am)
1
+ import numpy as np
2
+ import numbers
3
+ import warnings
4
+ import copy
2392
5
 
2393
- def get_data_as_dwa(self, index: int = 0) -> DataWithAxes:
2394
- """ Get the underlying data selected from the list at index, returned as a DataWithAxes"""
2395
- return self.deepcopy_with_new_data([self[index]])
6
+ from typing import List
2396
7
 
8
+ from pymodaq_utils.warnings import deprecation_msg, user_warning
2397
9
 
2398
- class DataRaw(DataWithAxes):
2399
- """Specialized DataWithAxes set with source as 'raw'. To be used for raw data"""
2400
- def __init__(self, *args, **kwargs):
2401
- if 'source' in kwargs:
2402
- kwargs.pop('source')
2403
- super().__init__(*args, source=DataSource['raw'], **kwargs)
10
+ from pymodaq_data.data import (DataRaw, DataWithAxes, DataToExport, DataCalculated, DataDim,
11
+ DataSource, DataBase, Axis, NavAxis, DataDistribution, Q_, Unit,
12
+ ) # imported here for backcompatibility
2404
13
 
2405
14
 
2406
15
  class DataActuator(DataRaw):
@@ -2421,48 +30,14 @@ class DataActuator(DataRaw):
2421
30
  else:
2422
31
  return f'<{self.__class__.__name__} ({self.shape} {self.units})>'
2423
32
 
2424
- def value(self, units: str = None) -> float:
2425
- """Returns the underlying float value (of the first elt in the data list) if this data
2426
- holds only a float otherwise returns a mean of the underlying data
2427
-
2428
- Parameters
2429
- ----------
2430
-
2431
- units: str
2432
- if unit is compatible with self.units, convert the data to these new units before
2433
- getting the value
2434
-
2435
-
2436
- """
2437
- if self.length == 1 and self.size == 1:
2438
- if units is not None:
2439
- data = Q_(float(self.data[0][0]), self.units)
2440
- return data.m_as(units)
2441
- else:
2442
- return float(self.data[0][0])
2443
- else:
2444
- if units is not None:
2445
- data = Q_(float(np.mean(self.data[0])), self.units)
2446
- return data.m_as(units)
2447
- else:
2448
- return float(np.mean(self.data[0]))
33
+ def __add__(self, other: object):
34
+ if isinstance(other, numbers.Number) and self.length == 1 and self.size == 1:
35
+ new_data = copy.deepcopy(self)
36
+ new_data = new_data + DataActuator(data=other)
37
+ return new_data
2449
38
 
2450
- def values(self, units: str = None) -> List[float]:
2451
- """Returns the underlying float value (for each data array in the data list) if this data
2452
- holds only a float otherwise returns a mean of the underlying data"""
2453
- if self.length == 1 and self.size == 1:
2454
- if units is not None:
2455
- return [float(Q_(data_array[0], self.units).m_as(units))
2456
- for data_array in self.data]
2457
- else:
2458
- return [float(data_array[0])
2459
- for data_array in self.data]
2460
39
  else:
2461
- if units is not None:
2462
- return [float(Q_(np.mean(data_array), self.units).m_as(units))
2463
- for data_array in self.data]
2464
- else:
2465
- return [float(np.mean(data_array)) for data_array in self.data]
40
+ return super().__add__(other)
2466
41
 
2467
42
 
2468
43
  class DataFromPlugins(DataRaw):
@@ -2508,489 +83,6 @@ class DataFromPlugins(DataRaw):
2508
83
  super().__init__(*args, **kwargs)
2509
84
 
2510
85
 
2511
- class DataCalculated(DataWithAxes):
2512
- """Specialized DataWithAxes set with source as 'calculated'. To be used for processed/calculated data"""
2513
- def __init__(self, *args, axes=[], **kwargs):
2514
- if 'source' in kwargs:
2515
- kwargs.pop('source')
2516
- super().__init__(*args, source=DataSource['calculated'], axes=axes, **kwargs)
2517
-
2518
-
2519
- class DataFromRoi(DataCalculated):
2520
- """Specialized DataWithAxes set with source as 'calculated'.To be used for processed data from region of interest"""
2521
- def __init__(self, *args, axes=[], **kwargs):
2522
- super().__init__(*args, axes=axes, **kwargs)
2523
-
2524
-
2525
- class DataToExport(DataLowLevel):
2526
- """Object to store all raw and calculated DataWithAxes data for later exporting, saving, sending signal...
2527
-
2528
- Includes methods to retrieve data from dim, source...
2529
- Stored data have a unique identifier their name. If some data is appended with an existing name, it will replace
2530
- the existing data. So if you want to append data that has the same name
2531
-
2532
- Parameters
2533
- ----------
2534
- name: str
2535
- The identifier of the exporting object
2536
- data: list of DataWithAxes
2537
- All the raw and calculated data to be exported
2538
-
2539
- Attributes
2540
- ----------
2541
- name
2542
- timestamp
2543
- data
2544
- """
2545
-
2546
- def __init__(self, name: str, data: List[DataWithAxes] = [], **kwargs):
2547
- """
2548
-
2549
- Parameters
2550
- ----------
2551
- name
2552
- data
2553
- """
2554
- super().__init__(name)
2555
- if not isinstance(data, list):
2556
- raise TypeError('Data stored in a DataToExport object should be as a list of objects'
2557
- ' inherited from DataWithAxis')
2558
- self._data = []
2559
-
2560
- self.data = data
2561
- for key in kwargs:
2562
- setattr(self, key, kwargs[key])
2563
-
2564
- def plot(self, plotter_backend: str = config('plotting', 'backend'), *args, **kwargs):
2565
- """ Call a plotter factory and its plot method over the actual data"""
2566
- return plotter_factory.get(plotter_backend).plot(self, *args, **kwargs)
2567
-
2568
- def affect_name_to_origin_if_none(self):
2569
- """Affect self.name to all DataWithAxes children's attribute origin if this origin is not defined"""
2570
- for dat in self.data:
2571
- if dat.origin is None or dat.origin == '':
2572
- dat.origin = self.name
2573
-
2574
- def __sub__(self, other: object):
2575
- if isinstance(other, DataToExport) and len(other) == len(self):
2576
- new_data = copy.deepcopy(self)
2577
- for ind_dfp in range(len(self)):
2578
- new_data[ind_dfp] = self[ind_dfp] - other[ind_dfp]
2579
- return new_data
2580
- else:
2581
- raise TypeError(f'Could not substract a {other.__class__.__name__} or a {self.__class__.__name__} '
2582
- f'of a different length')
2583
-
2584
- def __add__(self, other: object):
2585
- if isinstance(other, DataToExport) and len(other) == len(self):
2586
- new_data = copy.deepcopy(self)
2587
- for ind_dfp in range(len(self)):
2588
- new_data[ind_dfp] = self[ind_dfp] + other[ind_dfp]
2589
- return new_data
2590
- else:
2591
- raise TypeError(f'Could not add a {other.__class__.__name__} or a {self.__class__.__name__} '
2592
- f'of a different length')
2593
-
2594
- def __mul__(self, other: object):
2595
- if isinstance(other, numbers.Number):
2596
- new_data = copy.deepcopy(self)
2597
- for ind_dfp in range(len(self)):
2598
- new_data[ind_dfp] = self[ind_dfp] * other
2599
- return new_data
2600
- else:
2601
- raise TypeError(f'Could not multiply a {other.__class__.__name__} with a {self.__class__.__name__} '
2602
- f'of a different length')
2603
-
2604
- def __truediv__(self, other: object):
2605
- if isinstance(other, numbers.Number):
2606
- return self * (1 / other)
2607
- else:
2608
- raise TypeError(f'Could not divide a {other.__class__.__name__} with a {self.__class__.__name__} '
2609
- f'of a different length')
2610
-
2611
- def average(self, other: DataToExport, weight: int) -> DataToExport:
2612
- """ Compute the weighted average between self and other DataToExport and attributes it to self
2613
-
2614
- Parameters
2615
- ----------
2616
- other: DataToExport
2617
- weight: int
2618
- The weight the 'other_data' holds with respect to self
2619
-
2620
- """
2621
- if isinstance(other, DataToExport) and len(other) == len(self):
2622
- new_data = copy.copy(self)
2623
- for ind_dfp in range(len(self)):
2624
- new_data[ind_dfp] = self[ind_dfp].average(other[ind_dfp], weight)
2625
- return new_data
2626
- else:
2627
- raise TypeError(f'Could not average a {other.__class__.__name__} with a {self.__class__.__name__} '
2628
- f'of a different length')
2629
-
2630
- def merge_as_dwa(self, dim: Union[str, DataDim], name: str = None) -> DataRaw:
2631
- """ attempt to merge filtered dwa into one
2632
-
2633
- Only possible if all filtered dwa and underlying data have same shape
2634
-
2635
- Parameters
2636
- ----------
2637
- dim: DataDim or str
2638
- will only try to merge dwa having this dimensionality
2639
- name: str
2640
- The new name of the returned dwa
2641
- """
2642
- dim = enum_checker(DataDim, dim)
2643
-
2644
- filtered_data = self.get_data_from_dim(dim)
2645
- if len(filtered_data) != 0:
2646
- dwa = filtered_data[0].deepcopy()
2647
- for dwa_tmp in filtered_data[1:]:
2648
- if dwa_tmp.shape == dwa.shape and dwa_tmp.distribution == dwa.distribution:
2649
- dwa.append(dwa_tmp)
2650
- if name is None:
2651
- name = self.name
2652
- dwa.name = name
2653
- return dwa
2654
-
2655
- def __repr__(self):
2656
- repr = f'{self.__class__.__name__}: {self.name} <len:{len(self)}>\n'
2657
- for dwa in self:
2658
- repr += f' * {str(dwa)}\n'
2659
- return repr
2660
-
2661
- def __len__(self):
2662
- return len(self.data)
2663
-
2664
- def __iter__(self):
2665
- self._iter_index = 0
2666
- return self
2667
-
2668
- def __next__(self) -> DataWithAxes:
2669
- if self._iter_index < len(self):
2670
- self._iter_index += 1
2671
- return self.data[self._iter_index-1]
2672
- else:
2673
- raise StopIteration
2674
-
2675
- def __getitem__(self, item) -> Union[DataWithAxes, DataToExport]:
2676
- if isinstance(item, int) and 0 <= item < len(self):
2677
- return self.data[item]
2678
- elif isinstance(item, slice):
2679
- return DataToExport(self.name, data=[self[ind] for ind in list(range(len(self))[item])])
2680
- else:
2681
- raise IndexError(f'The index should be a positive integer lower than the data length')
2682
-
2683
- def __setitem__(self, key, value: DataWithAxes):
2684
- if isinstance(key, int) and 0 <= key < len(self) and isinstance(value, DataWithAxes):
2685
- self.data[key] = value
2686
- else:
2687
- raise IndexError(f'The index should be a positive integer lower than the data length')
2688
-
2689
- def get_names(self, dim: DataDim = None) -> List[str]:
2690
- """Get the names of the stored DataWithAxes, eventually filtered by dim
2691
-
2692
- Parameters
2693
- ----------
2694
- dim: DataDim or str
2695
-
2696
- Returns
2697
- -------
2698
- list of str: the names of the (filtered) DataWithAxes data
2699
- """
2700
- if dim is None:
2701
- return [data.name for data in self.data]
2702
- else:
2703
- return [data.name for data in self.get_data_from_dim(dim).data]
2704
-
2705
- def get_full_names(self, dim: DataDim = None):
2706
- """Get the ful names including the origin attribute into the returned value, eventually filtered by dim
2707
-
2708
- Parameters
2709
- ----------
2710
- dim: DataDim or str
2711
-
2712
- Returns
2713
- -------
2714
- list of str: the names of the (filtered) DataWithAxes data constructed as : origin/name
2715
-
2716
- Examples
2717
- --------
2718
- d0 = DataWithAxes(name='datafromdet0', origin='det0')
2719
- """
2720
- if dim is None:
2721
- return [data.get_full_name() for data in self.data]
2722
- else:
2723
- return [data.get_full_name() for data in self.get_data_from_dim(dim).data]
2724
-
2725
- def get_origins(self, dim: DataDim = None):
2726
- """Get the origins of the underlying data into the returned value, eventually filtered by dim
2727
-
2728
- Parameters
2729
- ----------
2730
- dim: DataDim or str
2731
-
2732
- Returns
2733
- -------
2734
- list of str: the origins of the (filtered) DataWithAxes data
2735
-
2736
- Examples
2737
- --------
2738
- d0 = DataWithAxes(name='datafromdet0', origin='det0')
2739
- """
2740
- if dim is None:
2741
- return list({dwa.origin for dwa in self.data})
2742
- else:
2743
- return list({dwa.origin for dwa in self.get_data_from_dim(dim).data})
2744
-
2745
-
2746
- def get_data_from_full_name(self, full_name: str, deepcopy=False) -> DataWithAxes:
2747
- """Get the DataWithAxes with matching full name"""
2748
- if deepcopy:
2749
- data = self.get_data_from_name_origin(full_name.split('/')[1], full_name.split('/')[0]).deepcopy()
2750
- else:
2751
- data = self.get_data_from_name_origin(full_name.split('/')[1], full_name.split('/')[0])
2752
- return data
2753
-
2754
- def get_data_from_full_names(self, full_names: List[str], deepcopy=False) -> DataToExport:
2755
- data = [self.get_data_from_full_name(full_name, deepcopy) for full_name in full_names]
2756
- return DataToExport(name=self.name, data=data)
2757
-
2758
- def get_dim_presents(self) -> List[str]:
2759
- dims = []
2760
- for dim in DataDim.names():
2761
- if len(self.get_data_from_dim(dim)) != 0:
2762
- dims.append(dim)
2763
-
2764
- return dims
2765
-
2766
- def get_data_from_source(self, source: DataSource, deepcopy=False) -> DataToExport:
2767
- """Get the data matching the given DataSource
2768
-
2769
- Returns
2770
- -------
2771
- DataToExport: filtered with data matching the dimensionality
2772
- """
2773
- source = enum_checker(DataSource, source)
2774
- return self.get_data_from_attribute('source', source, deepcopy=deepcopy)
2775
-
2776
- def get_data_from_missing_attribute(self, attribute: str, deepcopy=False) -> DataToExport:
2777
- """ Get the data matching a given attribute value
2778
-
2779
- Parameters
2780
- ----------
2781
- attribute: str
2782
- a string of a possible attribute
2783
- deepcopy: bool
2784
- if True the returned DataToExport will contain deepcopies of the DataWithAxes
2785
- Returns
2786
- -------
2787
- DataToExport: filtered with data missing the given attribute
2788
- """
2789
- if deepcopy:
2790
- return DataToExport(self.name, data=[dwa.deepcopy() for dwa in self if not hasattr(dwa, attribute)])
2791
- else:
2792
- return DataToExport(self.name, data=[dwa for dwa in self if not hasattr(dwa, attribute)])
2793
-
2794
- def get_data_from_attribute(self, attribute: str, attribute_value: Any, deepcopy=False) -> DataToExport:
2795
- """Get the data matching a given attribute value
2796
-
2797
- Returns
2798
- -------
2799
- DataToExport: filtered with data matching the attribute presence and value
2800
- """
2801
- selection = find_objects_in_list_from_attr_name_val(self.data, attribute, attribute_value,
2802
- return_first=False)
2803
- selection.sort(key=lambda elt: elt[0].name)
2804
- if deepcopy:
2805
- data = [sel[0].deepcopy() for sel in selection]
2806
- else:
2807
- data = [sel[0] for sel in selection]
2808
- return DataToExport(name=self.name, data=data)
2809
-
2810
- def get_data_from_dim(self, dim: DataDim, deepcopy=False) -> DataToExport:
2811
- """Get the data matching the given DataDim
2812
-
2813
- Returns
2814
- -------
2815
- DataToExport: filtered with data matching the dimensionality
2816
- """
2817
- dim = enum_checker(DataDim, dim)
2818
- return self.get_data_from_attribute('dim', dim, deepcopy=deepcopy)
2819
-
2820
- def get_data_from_dims(self, dims: List[DataDim], deepcopy=False) -> DataToExport:
2821
- """Get the data matching the given DataDim
2822
-
2823
- Returns
2824
- -------
2825
- DataToExport: filtered with data matching the dimensionality
2826
- """
2827
- data = DataToExport(name=self.name)
2828
- for dim in dims:
2829
- data.append(self.get_data_from_dim(dim, deepcopy=deepcopy))
2830
- return data
2831
-
2832
- def get_data_from_sig_axes(self, Naxes: int, deepcopy: bool = False) -> DataToExport:
2833
- """Get the data matching the given number of signal axes
2834
-
2835
- Parameters
2836
- ----------
2837
- Naxes: int
2838
- Number of signal axes in the DataWithAxes objects
2839
-
2840
- Returns
2841
- -------
2842
- DataToExport: filtered with data matching the number of signal axes
2843
- """
2844
- data = DataToExport(name=self.name)
2845
- for _data in self:
2846
- if len(_data.sig_indexes) == Naxes:
2847
- if deepcopy:
2848
- data.append(_data.deepcopy())
2849
- else:
2850
- data.append(_data)
2851
- return data
2852
-
2853
- def get_data_from_Naxes(self, Naxes: int, deepcopy: bool = False) -> DataToExport:
2854
- """Get the data matching the given number of axes
2855
-
2856
- Parameters
2857
- ----------
2858
- Naxes: int
2859
- Number of axes in the DataWithAxes objects
2860
-
2861
- Returns
2862
- -------
2863
- DataToExport: filtered with data matching the number of axes
2864
- """
2865
- data = DataToExport(name=self.name)
2866
- for _data in self:
2867
- if len(_data.shape) == Naxes:
2868
- if deepcopy:
2869
- data.append(_data.deepcopy())
2870
- else:
2871
- data.append(_data)
2872
- return data
2873
-
2874
- def get_data_with_naxes_lower_than(self, n_axes=2, deepcopy: bool = False) -> DataToExport:
2875
- """Get the data with n axes lower than the given number
2876
-
2877
- Parameters
2878
- ----------
2879
- Naxes: int
2880
- Number of axes in the DataWithAxes objects
2881
-
2882
- Returns
2883
- -------
2884
- DataToExport: filtered with data matching the number of axes
2885
- """
2886
- data = DataToExport(name=self.name)
2887
- for _data in self:
2888
- if _data.n_axes <= n_axes:
2889
- if deepcopy:
2890
- data.append(_data.deepcopy())
2891
- else:
2892
- data.append(_data)
2893
- return data
2894
-
2895
- def get_data_from_name(self, name: str) -> DataWithAxes:
2896
- """Get the data matching the given name"""
2897
- data, _ = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
2898
- return data
2899
-
2900
- def get_data_from_names(self, names: List[str]) -> DataToExport:
2901
- return DataToExport(self.name, data=[dwa for dwa in self if dwa.name in names])
2902
-
2903
- def get_data_from_name_origin(self, name: str, origin: str = '') -> DataWithAxes:
2904
- """Get the data matching the given name and the given origin"""
2905
- if origin == '':
2906
- data, _ = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
2907
- else:
2908
- selection = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=False)
2909
- selection = [sel[0] for sel in selection]
2910
- data, _ = find_objects_in_list_from_attr_name_val(selection, 'origin', origin)
2911
- return data
2912
-
2913
- def index(self, data: DataWithAxes):
2914
- return self.data.index(data)
2915
-
2916
- def index_from_name_origin(self, name: str, origin: str = '') -> List[DataWithAxes]:
2917
- """Get the index of a given DataWithAxes within the list of data"""
2918
- """Get the data matching the given name and the given origin"""
2919
- if origin == '':
2920
- _, index = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
2921
- else:
2922
- selection = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=False)
2923
- data_selection = [sel[0] for sel in selection]
2924
- index_selection = [sel[1] for sel in selection]
2925
- _, index = find_objects_in_list_from_attr_name_val(data_selection, 'origin', origin)
2926
- index = index_selection[index]
2927
- return index
2928
-
2929
- def pop(self, index: int) -> DataWithAxes:
2930
- """return and remove the DataWithAxes referred by its index
2931
-
2932
- Parameters
2933
- ----------
2934
- index: int
2935
- index as returned by self.index_from_name_origin
2936
-
2937
- See Also
2938
- --------
2939
- index_from_name_origin
2940
- """
2941
- return self.data.pop(index)
2942
-
2943
- def remove(self, dwa: DataWithAxes):
2944
- return self.pop(self.data.index(dwa))
2945
-
2946
- @property
2947
- def data(self) -> List[DataWithAxes]:
2948
- """List[DataWithAxes]: get the data contained in the object"""
2949
- return self._data
2950
-
2951
- @data.setter
2952
- def data(self, new_data: List[DataWithAxes]):
2953
- for dat in new_data:
2954
- self._check_data_type(dat)
2955
- self._data[:] = [dat for dat in new_data] # shallow copyto make sure that if the original
2956
- # list is changed, the change will not be applied in here
2957
-
2958
- self.affect_name_to_origin_if_none()
2959
-
2960
- @staticmethod
2961
- def _check_data_type(data: DataWithAxes):
2962
- """Make sure data is a DataWithAxes object or inherited"""
2963
- if not isinstance(data, DataWithAxes):
2964
- raise TypeError('Data stored in a DataToExport object should be objects inherited from DataWithAxis')
2965
-
2966
- def deepcopy(self):
2967
- return DataToExport('Copy', data=[data.deepcopy() for data in self])
2968
-
2969
- @dispatch(list)
2970
- def append(self, data_list: List[DataWithAxes]):
2971
- for dwa in data_list:
2972
- self.append(dwa)
2973
-
2974
- @dispatch(DataWithAxes)
2975
- def append(self, dwa: DataWithAxes):
2976
- """Append/replace DataWithAxes object to the data attribute
2977
-
2978
- Make sure only one DataWithAxes object with a given name is in the list except if they don't have the same
2979
- origin identifier
2980
- """
2981
- dwa = dwa.deepcopy()
2982
- self._check_data_type(dwa)
2983
- obj = self.get_data_from_name_origin(dwa.name, dwa.origin)
2984
- if obj is not None:
2985
- self._data.pop(self.data.index(obj))
2986
- self._data.append(dwa)
2987
-
2988
- @dispatch(object)
2989
- def append(self, dte: DataToExport):
2990
- if isinstance(dte, DataToExport):
2991
- self.append(dte.data)
2992
-
2993
-
2994
86
  class DataScan(DataToExport):
2995
87
  """Specialized DataToExport.To be used for data to be saved """
2996
88
  def __init__(self, name: str, data: List[DataWithAxes] = [], **kwargs):
@@ -3015,7 +107,8 @@ class DataToActuators(DataToExport):
3015
107
 
3016
108
  def __init__(self, *args, mode='rel', **kwargs):
3017
109
  if mode not in ['rel', 'abs']:
3018
- warnings.warn('Incorrect mode for the actuators, switching to default relative mode: rel')
110
+ user_warning('Incorrect mode for the actuators, '
111
+ 'switching to default relative mode: rel')
3019
112
  mode = 'rel'
3020
113
  kwargs.update({'mode': mode})
3021
114
  super().__init__(*args, **kwargs)
@@ -3023,38 +116,3 @@ class DataToActuators(DataToExport):
3023
116
  def __repr__(self):
3024
117
  return f'{super().__repr__()}: {self.mode}'
3025
118
 
3026
-
3027
-
3028
- if __name__ == '__main__':
3029
- d = DataRaw('hjk', units='m', data=[np.array([0, 1, 2])])
3030
- dm = DataRaw('hjk', units='mm', data=[np.array([0, 1, 2])])
3031
- d + d
3032
- d - d
3033
-
3034
- d1 = DataFromRoi(name=f'Hlineout_', data=[np.zeros((24,))],
3035
- x_axis=Axis(data=np.zeros((24,)), units='myunits', label='mylabel1'))
3036
- d2 = DataFromRoi(name=f'Hlineout_', data=[np.zeros((12,))],
3037
- x_axis=Axis(data=np.zeros((12,)),
3038
- units='myunits2',
3039
- label='mylabel2'))
3040
-
3041
- Nsig = 200
3042
- Nnav = 10
3043
- x = np.linspace(-Nsig/2, Nsig/2-1, Nsig)
3044
-
3045
- dat = np.zeros((Nnav, Nsig))
3046
- for ind in range(Nnav):
3047
- dat[ind] = mutils.gauss1D(x, 50 * (ind -Nnav / 2), 25 / np.sqrt(2))
3048
-
3049
- data = DataRaw('mydata', data=[dat], nav_indexes=(0,),
3050
- axes=[Axis('nav', data=np.linspace(0, Nnav-1, Nnav), index=0),
3051
- Axis('sig', data=x, index=1)])
3052
-
3053
- data + data
3054
-
3055
- data2 = copy.copy(data)
3056
-
3057
- data3 = data.deepcopy_with_new_data([np.sum(dat, 1)], remove_axes_index=(1,))
3058
-
3059
- print('done')
3060
-