pymodaq 4.2.3__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pymodaq might be problematic. Click here for more details.

Files changed (399) hide show
  1. pymodaq/__init__.py +30 -23
  2. pymodaq/control_modules/daq_move.py +27 -14
  3. pymodaq/control_modules/daq_move_ui.py +33 -16
  4. pymodaq/control_modules/daq_viewer.py +38 -28
  5. pymodaq/control_modules/daq_viewer_ui.py +6 -6
  6. pymodaq/control_modules/mocks.py +1 -1
  7. pymodaq/control_modules/move_utility_classes.py +19 -10
  8. pymodaq/control_modules/utils.py +18 -12
  9. pymodaq/control_modules/viewer_utility_classes.py +13 -4
  10. pymodaq/dashboard.py +166 -118
  11. pymodaq/examples/custom_app.py +13 -16
  12. pymodaq/examples/custom_viewer.py +7 -7
  13. pymodaq/examples/function_plotter.py +13 -12
  14. pymodaq/examples/parameter_ex.py +50 -25
  15. pymodaq/examples/tcp_client.py +1 -1
  16. pymodaq/extensions/__init__.py +1 -1
  17. pymodaq/extensions/bayesian/bayesian_optimisation.py +15 -12
  18. pymodaq/extensions/bayesian/utils.py +10 -10
  19. pymodaq/extensions/console.py +10 -13
  20. pymodaq/extensions/{daq_logger.py → daq_logger/daq_logger.py} +36 -56
  21. pymodaq/{utils/db/db_logger → extensions/daq_logger/db}/db_logger.py +16 -15
  22. pymodaq/{utils/db/db_logger → extensions/daq_logger/db}/db_logger_models.py +2 -0
  23. pymodaq/{utils/h5modules → extensions/daq_logger}/h5logging.py +7 -8
  24. pymodaq/extensions/daq_scan.py +153 -247
  25. pymodaq/extensions/daq_scan_ui.py +11 -9
  26. pymodaq/extensions/h5browser.py +8 -8
  27. pymodaq/extensions/pid/__init__.py +6 -3
  28. pymodaq/extensions/pid/daq_move_PID.py +4 -2
  29. pymodaq/extensions/pid/pid_controller.py +15 -12
  30. pymodaq/extensions/pid/utils.py +10 -5
  31. pymodaq/extensions/utils.py +5 -3
  32. pymodaq/post_treatment/load_and_plot.py +10 -7
  33. pymodaq/resources/preset_default.xml +1 -1
  34. pymodaq/utils/array_manipulation.py +4 -384
  35. pymodaq/utils/calibration_camera.py +12 -9
  36. pymodaq/utils/chrono_timer.py +7 -5
  37. pymodaq/utils/config.py +3 -450
  38. pymodaq/utils/daq_utils.py +6 -664
  39. pymodaq/utils/data.py +9 -2774
  40. pymodaq/utils/exceptions.py +0 -4
  41. pymodaq/utils/gui_utils/__init__.py +8 -8
  42. pymodaq/utils/gui_utils/loader_utils.py +38 -0
  43. pymodaq/utils/gui_utils/utils.py +6 -138
  44. pymodaq/utils/h5modules/__init__.py +0 -4
  45. pymodaq/utils/h5modules/module_saving.py +15 -8
  46. pymodaq/utils/leco/__init__.py +2 -2
  47. pymodaq/utils/leco/daq_move_LECODirector.py +2 -2
  48. pymodaq/utils/leco/daq_xDviewer_LECODirector.py +2 -2
  49. pymodaq/utils/leco/director_utils.py +2 -2
  50. pymodaq/utils/leco/leco_director.py +3 -3
  51. pymodaq/utils/leco/pymodaq_listener.py +2 -2
  52. pymodaq/utils/leco/utils.py +1 -1
  53. pymodaq/utils/logger.py +4 -76
  54. pymodaq/utils/managers/batchscan_manager.py +16 -19
  55. pymodaq/utils/managers/modules_manager.py +10 -7
  56. pymodaq/utils/managers/overshoot_manager.py +3 -5
  57. pymodaq/utils/managers/preset_manager.py +37 -15
  58. pymodaq/utils/managers/preset_manager_utils.py +11 -9
  59. pymodaq/utils/managers/remote_manager.py +12 -10
  60. pymodaq/utils/math_utils.py +4 -572
  61. pymodaq/utils/parameter/__init__.py +4 -11
  62. pymodaq/utils/parameter/utils.py +4 -299
  63. pymodaq/utils/scanner/scan_config.py +1 -1
  64. pymodaq/utils/scanner/scan_factory.py +16 -12
  65. pymodaq/utils/{plotting → scanner}/scan_selector.py +19 -20
  66. pymodaq/utils/scanner/scanner.py +10 -8
  67. pymodaq/utils/scanner/scanners/_1d_scanners.py +8 -5
  68. pymodaq/utils/scanner/scanners/_2d_scanners.py +5 -5
  69. pymodaq/utils/scanner/scanners/sequential.py +8 -8
  70. pymodaq/utils/scanner/scanners/tabular.py +9 -9
  71. pymodaq/utils/scanner/utils.py +6 -4
  72. pymodaq/utils/svg/svg_viewer2D.py +3 -4
  73. pymodaq/utils/tcp_ip/serializer.py +64 -16
  74. pymodaq/utils/tcp_ip/tcp_server_client.py +10 -8
  75. {pymodaq-4.2.3.dist-info → pymodaq-5.0.0.dist-info}/METADATA +5 -3
  76. pymodaq-5.0.0.dist-info/RECORD +123 -0
  77. {pymodaq-4.2.3.dist-info → pymodaq-5.0.0.dist-info}/WHEEL +1 -1
  78. pymodaq/post_treatment/process_to_scalar.py +0 -263
  79. pymodaq/resources/QtDesigner_Ressources/Icon_Library/1d.png +0 -0
  80. pymodaq/resources/QtDesigner_Ressources/Icon_Library/2d.png +0 -0
  81. pymodaq/resources/QtDesigner_Ressources/Icon_Library/3d.png +0 -0
  82. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Add2.png +0 -0
  83. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Add_Step.png +0 -0
  84. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Approve.png +0 -0
  85. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Approve_All.png +0 -0
  86. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Browse_Dir_Path.png +0 -0
  87. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Calculator.png +0 -0
  88. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnGroup.png +0 -0
  89. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnNum.png +0 -0
  90. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnText.png +0 -0
  91. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnTime.png +0 -0
  92. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ChnWave.png +0 -0
  93. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Close3.png +0 -0
  94. pymodaq/resources/QtDesigner_Ressources/Icon_Library/CollapseAll.png +0 -0
  95. pymodaq/resources/QtDesigner_Ressources/Icon_Library/CollapseAll_32.png +0 -0
  96. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ColorPicker.png +0 -0
  97. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Contract.png +0 -0
  98. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Create.png +0 -0
  99. pymodaq/resources/QtDesigner_Ressources/Icon_Library/DeleteLayer.png +0 -0
  100. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EditOpen.png +0 -0
  101. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EditRedo.png +0 -0
  102. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EditUndo.png +0 -0
  103. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Ellipse.png +0 -0
  104. pymodaq/resources/QtDesigner_Ressources/Icon_Library/EllipseFilled.png +0 -0
  105. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Error.png +0 -0
  106. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ErrorMessage.png +0 -0
  107. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Error_16.png +0 -0
  108. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Exit.png +0 -0
  109. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Expand.png +0 -0
  110. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ExpandAll.png +0 -0
  111. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ExpandAll_32.png +0 -0
  112. pymodaq/resources/QtDesigner_Ressources/Icon_Library/FFT.png +0 -0
  113. pymodaq/resources/QtDesigner_Ressources/Icon_Library/HLM.ico +0 -0
  114. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Help.png +0 -0
  115. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Help_32.png +0 -0
  116. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Histogram.png +0 -0
  117. pymodaq/resources/QtDesigner_Ressources/Icon_Library/LUT_LookUpTable.png +0 -0
  118. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MagnifyingGlass.png +0 -0
  119. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MagnifyingGlass_24.png +0 -0
  120. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Marker.png +0 -0
  121. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Math.png +0 -0
  122. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MeasurementStudio_32.png +0 -0
  123. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Move.png +0 -0
  124. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MoveDown.png +0 -0
  125. pymodaq/resources/QtDesigner_Ressources/Icon_Library/MoveUp.png +0 -0
  126. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Multiply.png +0 -0
  127. pymodaq/resources/QtDesigner_Ressources/Icon_Library/NewFile.png +0 -0
  128. pymodaq/resources/QtDesigner_Ressources/Icon_Library/NewLayer.png +0 -0
  129. pymodaq/resources/QtDesigner_Ressources/Icon_Library/New_File.png +0 -0
  130. pymodaq/resources/QtDesigner_Ressources/Icon_Library/New_Folder.png +0 -0
  131. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open.png +0 -0
  132. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_1D.png +0 -0
  133. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_2D.png +0 -0
  134. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_File.png +0 -0
  135. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_File_32.png +0 -0
  136. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Open_sim.png +0 -0
  137. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Options.png +0 -0
  138. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Oscilloscope.png +0 -0
  139. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Oscilloscope_16.png +0 -0
  140. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Pass.png +0 -0
  141. pymodaq/resources/QtDesigner_Ressources/Icon_Library/RGB.png +0 -0
  142. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Rectangle.png +0 -0
  143. pymodaq/resources/QtDesigner_Ressources/Icon_Library/RectangleFilled.png +0 -0
  144. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Redo.png +0 -0
  145. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Refresh.png +0 -0
  146. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Refresh2.png +0 -0
  147. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Refresh_32.png +0 -0
  148. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Region.png +0 -0
  149. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Rendezvous.png +0 -0
  150. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SELECT.png +0 -0
  151. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Save.png +0 -0
  152. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAll.png +0 -0
  153. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAll_32.png +0 -0
  154. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAs.png +0 -0
  155. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SaveAs_32.png +0 -0
  156. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Save_24.png +0 -0
  157. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Save_32.png +0 -0
  158. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Search.png +0 -0
  159. pymodaq/resources/QtDesigner_Ressources/Icon_Library/SelectPolygon.png +0 -0
  160. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Select_24.png +0 -0
  161. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Settings.png +0 -0
  162. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snap&Save.png +0 -0
  163. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot.png +0 -0
  164. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot2.png +0 -0
  165. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot2_16.png +0 -0
  166. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Snapshot2_32.png +0 -0
  167. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Spreadsheet.png +0 -0
  168. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Statistics.png +0 -0
  169. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Statistics2.png +0 -0
  170. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Status.png +0 -0
  171. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Subtract.png +0 -0
  172. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Vision.png +0 -0
  173. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Volts.png +0 -0
  174. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Wait2.png +0 -0
  175. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_1_1.png +0 -0
  176. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_in.png +0 -0
  177. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_out.png +0 -0
  178. pymodaq/resources/QtDesigner_Ressources/Icon_Library/Zoom_to_Selection.png +0 -0
  179. pymodaq/resources/QtDesigner_Ressources/Icon_Library/abort.png +0 -0
  180. pymodaq/resources/QtDesigner_Ressources/Icon_Library/advanced2.png +0 -0
  181. pymodaq/resources/QtDesigner_Ressources/Icon_Library/autoscale.png +0 -0
  182. pymodaq/resources/QtDesigner_Ressources/Icon_Library/b_icon.png +0 -0
  183. pymodaq/resources/QtDesigner_Ressources/Icon_Library/back.png +0 -0
  184. pymodaq/resources/QtDesigner_Ressources/Icon_Library/bg_icon.png +0 -0
  185. pymodaq/resources/QtDesigner_Ressources/Icon_Library/camera.png +0 -0
  186. pymodaq/resources/QtDesigner_Ressources/Icon_Library/camera_snap.png +0 -0
  187. pymodaq/resources/QtDesigner_Ressources/Icon_Library/cartesian.png +0 -0
  188. pymodaq/resources/QtDesigner_Ressources/Icon_Library/clear2.png +0 -0
  189. pymodaq/resources/QtDesigner_Ressources/Icon_Library/clear_ROI.png +0 -0
  190. pymodaq/resources/QtDesigner_Ressources/Icon_Library/close2.png +0 -0
  191. pymodaq/resources/QtDesigner_Ressources/Icon_Library/cluster2.png +0 -0
  192. pymodaq/resources/QtDesigner_Ressources/Icon_Library/color.png +0 -0
  193. pymodaq/resources/QtDesigner_Ressources/Icon_Library/color2.png +0 -0
  194. pymodaq/resources/QtDesigner_Ressources/Icon_Library/continuous.png +0 -0
  195. pymodaq/resources/QtDesigner_Ressources/Icon_Library/data.png +0 -0
  196. pymodaq/resources/QtDesigner_Ressources/Icon_Library/delay.png +0 -0
  197. pymodaq/resources/QtDesigner_Ressources/Icon_Library/download.png +0 -0
  198. pymodaq/resources/QtDesigner_Ressources/Icon_Library/download2.png +0 -0
  199. pymodaq/resources/QtDesigner_Ressources/Icon_Library/error2.png +0 -0
  200. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ethernet.png +0 -0
  201. pymodaq/resources/QtDesigner_Ressources/Icon_Library/exit2.png +0 -0
  202. pymodaq/resources/QtDesigner_Ressources/Icon_Library/fan.png +0 -0
  203. pymodaq/resources/QtDesigner_Ressources/Icon_Library/filter2.png +0 -0
  204. pymodaq/resources/QtDesigner_Ressources/Icon_Library/g_icon.png +0 -0
  205. pymodaq/resources/QtDesigner_Ressources/Icon_Library/gear2.png +0 -0
  206. pymodaq/resources/QtDesigner_Ressources/Icon_Library/go_to.png +0 -0
  207. pymodaq/resources/QtDesigner_Ressources/Icon_Library/go_to_1.png +0 -0
  208. pymodaq/resources/QtDesigner_Ressources/Icon_Library/go_to_2.png +0 -0
  209. pymodaq/resources/QtDesigner_Ressources/Icon_Library/grab.png +0 -0
  210. pymodaq/resources/QtDesigner_Ressources/Icon_Library/graph.png +0 -0
  211. pymodaq/resources/QtDesigner_Ressources/Icon_Library/greenLight2.png +0 -0
  212. pymodaq/resources/QtDesigner_Ressources/Icon_Library/greenLight2_32.png +0 -0
  213. pymodaq/resources/QtDesigner_Ressources/Icon_Library/green_light.png +0 -0
  214. pymodaq/resources/QtDesigner_Ressources/Icon_Library/grey_icon.png +0 -0
  215. pymodaq/resources/QtDesigner_Ressources/Icon_Library/greyscale.png +0 -0
  216. pymodaq/resources/QtDesigner_Ressources/Icon_Library/help1.png +0 -0
  217. pymodaq/resources/QtDesigner_Ressources/Icon_Library/help1_32.png +0 -0
  218. pymodaq/resources/QtDesigner_Ressources/Icon_Library/home2.png +0 -0
  219. pymodaq/resources/QtDesigner_Ressources/Icon_Library/information2.png +0 -0
  220. pymodaq/resources/QtDesigner_Ressources/Icon_Library/ini.png +0 -0
  221. pymodaq/resources/QtDesigner_Ressources/Icon_Library/integrator.png +0 -0
  222. pymodaq/resources/QtDesigner_Ressources/Icon_Library/joystick.png +0 -0
  223. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_green.png +0 -0
  224. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_green_16.png +0 -0
  225. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_red.png +0 -0
  226. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_red_16.png +0 -0
  227. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_yellow.png +0 -0
  228. pymodaq/resources/QtDesigner_Ressources/Icon_Library/light_yellow_16.png +0 -0
  229. pymodaq/resources/QtDesigner_Ressources/Icon_Library/limiter.png +0 -0
  230. pymodaq/resources/QtDesigner_Ressources/Icon_Library/load_ROI.png +0 -0
  231. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meshPlot.png +0 -0
  232. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meter.png +0 -0
  233. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meter2.png +0 -0
  234. pymodaq/resources/QtDesigner_Ressources/Icon_Library/meter_32.png +0 -0
  235. pymodaq/resources/QtDesigner_Ressources/Icon_Library/move_contour.png +0 -0
  236. pymodaq/resources/QtDesigner_Ressources/Icon_Library/move_straight_line.png +0 -0
  237. pymodaq/resources/QtDesigner_Ressources/Icon_Library/movie.png +0 -0
  238. pymodaq/resources/QtDesigner_Ressources/Icon_Library/multi_point.png +0 -0
  239. pymodaq/resources/QtDesigner_Ressources/Icon_Library/multiplexer.png +0 -0
  240. pymodaq/resources/QtDesigner_Ressources/Icon_Library/new.png +0 -0
  241. pymodaq/resources/QtDesigner_Ressources/Icon_Library/openArrow.png +0 -0
  242. pymodaq/resources/QtDesigner_Ressources/Icon_Library/openTree.png +0 -0
  243. pymodaq/resources/QtDesigner_Ressources/Icon_Library/oscilloscope2.png +0 -0
  244. pymodaq/resources/QtDesigner_Ressources/Icon_Library/oscilloscope3.png +0 -0
  245. pymodaq/resources/QtDesigner_Ressources/Icon_Library/overlay.png +0 -0
  246. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pass2.png +0 -0
  247. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pass2_16.png +0 -0
  248. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pass_32.png +0 -0
  249. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pause.png +0 -0
  250. pymodaq/resources/QtDesigner_Ressources/Icon_Library/permute.png +0 -0
  251. pymodaq/resources/QtDesigner_Ressources/Icon_Library/phase.png +0 -0
  252. pymodaq/resources/QtDesigner_Ressources/Icon_Library/play.png +0 -0
  253. pymodaq/resources/QtDesigner_Ressources/Icon_Library/polar.png +0 -0
  254. pymodaq/resources/QtDesigner_Ressources/Icon_Library/pole_zero.png +0 -0
  255. pymodaq/resources/QtDesigner_Ressources/Icon_Library/powerMeter.png +0 -0
  256. pymodaq/resources/QtDesigner_Ressources/Icon_Library/powerSwitch.png +0 -0
  257. pymodaq/resources/QtDesigner_Ressources/Icon_Library/powerSwitch_16.png +0 -0
  258. pymodaq/resources/QtDesigner_Ressources/Icon_Library/print2.png +0 -0
  259. pymodaq/resources/QtDesigner_Ressources/Icon_Library/print2_32.png +0 -0
  260. pymodaq/resources/QtDesigner_Ressources/Icon_Library/properties.png +0 -0
  261. pymodaq/resources/QtDesigner_Ressources/Icon_Library/r_icon.png +0 -0
  262. pymodaq/resources/QtDesigner_Ressources/Icon_Library/radiocontrolbutton.png +0 -0
  263. pymodaq/resources/QtDesigner_Ressources/Icon_Library/read2.png +0 -0
  264. pymodaq/resources/QtDesigner_Ressources/Icon_Library/red_light.png +0 -0
  265. pymodaq/resources/QtDesigner_Ressources/Icon_Library/remove.png +0 -0
  266. pymodaq/resources/QtDesigner_Ressources/Icon_Library/reset.png +0 -0
  267. pymodaq/resources/QtDesigner_Ressources/Icon_Library/rgb_icon.png +0 -0
  268. pymodaq/resources/QtDesigner_Ressources/Icon_Library/robot.png +0 -0
  269. pymodaq/resources/QtDesigner_Ressources/Icon_Library/rotation2.png +0 -0
  270. pymodaq/resources/QtDesigner_Ressources/Icon_Library/run2.png +0 -0
  271. pymodaq/resources/QtDesigner_Ressources/Icon_Library/run_all.png +0 -0
  272. pymodaq/resources/QtDesigner_Ressources/Icon_Library/saturation.png +0 -0
  273. pymodaq/resources/QtDesigner_Ressources/Icon_Library/saveTree.png +0 -0
  274. pymodaq/resources/QtDesigner_Ressources/Icon_Library/save_ROI.png +0 -0
  275. pymodaq/resources/QtDesigner_Ressources/Icon_Library/scale_horizontally.png +0 -0
  276. pymodaq/resources/QtDesigner_Ressources/Icon_Library/scale_vertically.png +0 -0
  277. pymodaq/resources/QtDesigner_Ressources/Icon_Library/search2.png +0 -0
  278. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select2.png +0 -0
  279. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select_all.png +0 -0
  280. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select_all2.png +0 -0
  281. pymodaq/resources/QtDesigner_Ressources/Icon_Library/select_none.png +0 -0
  282. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sequence.png +0 -0
  283. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sequence2.png +0 -0
  284. pymodaq/resources/QtDesigner_Ressources/Icon_Library/snap.png +0 -0
  285. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sort_ascend.png +0 -0
  286. pymodaq/resources/QtDesigner_Ressources/Icon_Library/spectrumAnalyzer.png +0 -0
  287. pymodaq/resources/QtDesigner_Ressources/Icon_Library/start.png +0 -0
  288. pymodaq/resources/QtDesigner_Ressources/Icon_Library/status_cancelled.png +0 -0
  289. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop.png +0 -0
  290. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop2.png +0 -0
  291. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop3.png +0 -0
  292. pymodaq/resources/QtDesigner_Ressources/Icon_Library/stop_all.png +0 -0
  293. pymodaq/resources/QtDesigner_Ressources/Icon_Library/sum.png +0 -0
  294. pymodaq/resources/QtDesigner_Ressources/Icon_Library/surfacePlot.png +0 -0
  295. pymodaq/resources/QtDesigner_Ressources/Icon_Library/tree.png +0 -0
  296. pymodaq/resources/QtDesigner_Ressources/Icon_Library/updateTree.png +0 -0
  297. pymodaq/resources/QtDesigner_Ressources/Icon_Library/utility2.png +0 -0
  298. pymodaq/resources/QtDesigner_Ressources/Icon_Library/utility_small.png +0 -0
  299. pymodaq/resources/QtDesigner_Ressources/Icon_Library/vector.png +0 -0
  300. pymodaq/resources/QtDesigner_Ressources/Icon_Library/verify.png +0 -0
  301. pymodaq/resources/QtDesigner_Ressources/Icon_Library/video.png +0 -0
  302. pymodaq/resources/QtDesigner_Ressources/Icon_Library/wait.png +0 -0
  303. pymodaq/resources/QtDesigner_Ressources/Icon_Library/waterfallPlot.png +0 -0
  304. pymodaq/resources/QtDesigner_Ressources/Icon_Library/watershed.png +0 -0
  305. pymodaq/resources/QtDesigner_Ressources/Icon_Library/yellow_light.png +0 -0
  306. pymodaq/resources/QtDesigner_Ressources/Icon_Library/zip_file.png +0 -0
  307. pymodaq/resources/QtDesigner_Ressources/Icon_Library/zoomAuto.png +0 -0
  308. pymodaq/resources/QtDesigner_Ressources/Icon_Library/zoomReset.png +0 -0
  309. pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.bat +0 -2
  310. pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources.qrc +0 -234
  311. pymodaq/resources/QtDesigner_Ressources/QtDesigner_ressources_rc.py +0 -127150
  312. pymodaq/resources/QtDesigner_Ressources/icons.svg +0 -142
  313. pymodaq/resources/VERSION +0 -1
  314. pymodaq/resources/config_template.toml +0 -90
  315. pymodaq/resources/triangulation_data.npy +0 -0
  316. pymodaq/utils/abstract/__init__.py +0 -48
  317. pymodaq/utils/db/__init__.py +0 -0
  318. pymodaq/utils/db/db_logger/__init__.py +0 -0
  319. pymodaq/utils/enums.py +0 -76
  320. pymodaq/utils/factory.py +0 -82
  321. pymodaq/utils/gui_utils/custom_app.py +0 -133
  322. pymodaq/utils/gui_utils/dock.py +0 -107
  323. pymodaq/utils/gui_utils/file_io.py +0 -93
  324. pymodaq/utils/gui_utils/layout.py +0 -34
  325. pymodaq/utils/gui_utils/list_picker.py +0 -38
  326. pymodaq/utils/gui_utils/widgets/__init__.py +0 -5
  327. pymodaq/utils/gui_utils/widgets/label.py +0 -24
  328. pymodaq/utils/gui_utils/widgets/lcd.py +0 -111
  329. pymodaq/utils/gui_utils/widgets/push.py +0 -149
  330. pymodaq/utils/gui_utils/widgets/qled.py +0 -62
  331. pymodaq/utils/gui_utils/widgets/spinbox.py +0 -24
  332. pymodaq/utils/gui_utils/widgets/table.py +0 -263
  333. pymodaq/utils/gui_utils/widgets/tree_layout.py +0 -188
  334. pymodaq/utils/gui_utils/widgets/tree_toml.py +0 -102
  335. pymodaq/utils/h5modules/backends.py +0 -1022
  336. pymodaq/utils/h5modules/browsing.py +0 -625
  337. pymodaq/utils/h5modules/data_saving.py +0 -1101
  338. pymodaq/utils/h5modules/exporter.py +0 -119
  339. pymodaq/utils/h5modules/exporters/__init__.py +0 -0
  340. pymodaq/utils/h5modules/exporters/base.py +0 -111
  341. pymodaq/utils/h5modules/exporters/flimj.py +0 -63
  342. pymodaq/utils/h5modules/exporters/hyperspy.py +0 -143
  343. pymodaq/utils/h5modules/saving.py +0 -866
  344. pymodaq/utils/h5modules/utils.py +0 -115
  345. pymodaq/utils/managers/action_manager.py +0 -489
  346. pymodaq/utils/managers/parameter_manager.py +0 -282
  347. pymodaq/utils/managers/roi_manager.py +0 -726
  348. pymodaq/utils/messenger.py +0 -66
  349. pymodaq/utils/parameter/ioxml.py +0 -542
  350. pymodaq/utils/parameter/pymodaq_ptypes/__init__.py +0 -38
  351. pymodaq/utils/parameter/pymodaq_ptypes/bool.py +0 -31
  352. pymodaq/utils/parameter/pymodaq_ptypes/date.py +0 -126
  353. pymodaq/utils/parameter/pymodaq_ptypes/filedir.py +0 -143
  354. pymodaq/utils/parameter/pymodaq_ptypes/itemselect.py +0 -265
  355. pymodaq/utils/parameter/pymodaq_ptypes/led.py +0 -44
  356. pymodaq/utils/parameter/pymodaq_ptypes/list.py +0 -150
  357. pymodaq/utils/parameter/pymodaq_ptypes/numeric.py +0 -18
  358. pymodaq/utils/parameter/pymodaq_ptypes/pixmap.py +0 -175
  359. pymodaq/utils/parameter/pymodaq_ptypes/slide.py +0 -145
  360. pymodaq/utils/parameter/pymodaq_ptypes/table.py +0 -135
  361. pymodaq/utils/parameter/pymodaq_ptypes/tableview.py +0 -149
  362. pymodaq/utils/parameter/pymodaq_ptypes/text.py +0 -142
  363. pymodaq/utils/plotting/__init__.py +0 -0
  364. pymodaq/utils/plotting/data_viewers/__init__.py +0 -10
  365. pymodaq/utils/plotting/data_viewers/base.py +0 -286
  366. pymodaq/utils/plotting/data_viewers/viewer.py +0 -274
  367. pymodaq/utils/plotting/data_viewers/viewer0D.py +0 -298
  368. pymodaq/utils/plotting/data_viewers/viewer1D.py +0 -820
  369. pymodaq/utils/plotting/data_viewers/viewer1Dbasic.py +0 -231
  370. pymodaq/utils/plotting/data_viewers/viewer2D.py +0 -1118
  371. pymodaq/utils/plotting/data_viewers/viewer2D_basic.py +0 -146
  372. pymodaq/utils/plotting/data_viewers/viewerND.py +0 -800
  373. pymodaq/utils/plotting/gant_chart.py +0 -123
  374. pymodaq/utils/plotting/image_viewer.py +0 -97
  375. pymodaq/utils/plotting/items/__init__.py +0 -0
  376. pymodaq/utils/plotting/items/axis_scaled.py +0 -93
  377. pymodaq/utils/plotting/items/crosshair.py +0 -94
  378. pymodaq/utils/plotting/items/image.py +0 -388
  379. pymodaq/utils/plotting/navigator.py +0 -353
  380. pymodaq/utils/plotting/plotter/plotter.py +0 -94
  381. pymodaq/utils/plotting/plotter/plotters/__init__.py +0 -0
  382. pymodaq/utils/plotting/plotter/plotters/matplotlib_plotters.py +0 -134
  383. pymodaq/utils/plotting/plotter/plotters/qt_plotters.py +0 -78
  384. pymodaq/utils/plotting/utils/__init__.py +0 -0
  385. pymodaq/utils/plotting/utils/axes_viewer.py +0 -88
  386. pymodaq/utils/plotting/utils/filter.py +0 -585
  387. pymodaq/utils/plotting/utils/lineout.py +0 -226
  388. pymodaq/utils/plotting/utils/plot_utils.py +0 -579
  389. pymodaq/utils/plotting/utils/signalND.py +0 -1347
  390. pymodaq/utils/plotting/widgets.py +0 -76
  391. pymodaq/utils/qvariant.py +0 -12
  392. pymodaq/utils/slicing.py +0 -63
  393. pymodaq/utils/units.py +0 -216
  394. pymodaq-4.2.3.dist-info/RECORD +0 -438
  395. /pymodaq/{post_treatment/daq_analysis → extensions/daq_logger}/__init__.py +0 -0
  396. /pymodaq/{utils/abstract/logger.py → extensions/daq_logger/abstract.py} +0 -0
  397. /pymodaq/{resources/QtDesigner_Ressources → extensions/daq_logger/db}/__init__.py +0 -0
  398. {pymodaq-4.2.3.dist-info → pymodaq-5.0.0.dist-info}/entry_points.txt +0 -0
  399. {pymodaq-4.2.3.dist-info → pymodaq-5.0.0.dist-info}/licenses/LICENSE +0 -0
pymodaq/utils/data.py CHANGED
@@ -1,2266 +1,13 @@
1
- # -*- coding: utf-8 -*-
2
- """
3
- Created the 28/10/2022
4
-
5
- @author: Sebastien Weber
6
- """
7
- from __future__ import annotations
8
-
9
- from abc import ABCMeta, abstractmethod, abstractproperty
10
- import numbers
11
- import numpy as np
12
- from typing import List, Tuple, Union, Any, Callable
13
- from typing import Iterable as IterableType
14
- from collections.abc import Iterable
15
- from collections import OrderedDict
16
- import logging
17
-
18
- import warnings
19
- from time import time
20
- import copy
21
-
22
- from multipledispatch import dispatch
23
- from pymodaq.utils.enums import BaseEnum, enum_checker
24
- from pymodaq.utils.messenger import deprecation_msg
25
- from pymodaq.utils.daq_utils import find_objects_in_list_from_attr_name_val
26
- from pymodaq.utils.logger import set_logger, get_module_name
27
- from pymodaq.utils.slicing import SpecialSlicersData
28
- from pymodaq.utils import math_utils as mutils
29
- from pymodaq.utils.config import Config
30
- from pymodaq.utils.plotting.plotter.plotter import PlotterFactory
31
-
32
- config = Config()
33
- plotter_factory = PlotterFactory()
34
- logger = set_logger(get_module_name(__file__))
35
-
36
-
37
- def squeeze(data_array: np.ndarray, do_squeeze=True, squeeze_indexes: Tuple[int]=None) -> np.ndarray:
38
- """ Squeeze numpy arrays return at least 1D arrays except if do_squeeze is False"""
39
- if do_squeeze:
40
- return np.atleast_1d(np.squeeze(data_array, axis=squeeze_indexes))
41
- else:
42
- return np.atleast_1d(data_array)
43
-
44
-
45
- class DataIndexWarning(Warning):
46
- pass
47
-
48
-
49
- class DataTypeWarning(Warning):
50
- pass
51
-
52
-
53
- class DataDimWarning(Warning):
54
- pass
55
-
56
-
57
- class DataSizeWarning(Warning):
58
- pass
59
-
60
-
61
- WARNINGS = [DataIndexWarning, DataTypeWarning, DataDimWarning, DataSizeWarning]
62
-
63
- if logging.getLevelName(logger.level) == 'DEBUG':
64
- for warning in WARNINGS:
65
- warnings.filterwarnings('default', category=warning)
66
- else:
67
- for warning in WARNINGS:
68
- warnings.filterwarnings('ignore', category=warning)
69
-
70
-
71
- class DataShapeError(Exception):
72
- pass
73
-
74
-
75
- class DataLengthError(Exception):
76
- pass
77
-
78
-
79
- class DataDimError(Exception):
80
- pass
81
-
82
-
83
- class DwaType(BaseEnum):
84
- DataWithAxes = 0
85
- DataRaw = 1
86
- DataActuator = 2
87
- DataFromPlugins = 3
88
- DataCalculated = 4
89
-
90
-
91
- class DataDim(BaseEnum):
92
- """Enum for dimensionality representation of data"""
93
- Data0D = 0
94
- Data1D = 1
95
- Data2D = 2
96
- DataND = 3
97
-
98
- def __le__(self, other_dim: 'DataDim'):
99
- other_dim = enum_checker(DataDim, other_dim)
100
- return self.value.__le__(other_dim.value)
101
-
102
- def __lt__(self, other_dim: 'DataDim'):
103
- other_dim = enum_checker(DataDim, other_dim)
104
- return self.value.__lt__(other_dim.value)
105
-
106
- def __ge__(self, other_dim: 'DataDim'):
107
- other_dim = enum_checker(DataDim, other_dim)
108
- return self.value.__ge__(other_dim.value)
109
-
110
- def __gt__(self, other_dim: 'DataDim'):
111
- other_dim = enum_checker(DataDim, other_dim)
112
- return self.value.__gt__(other_dim.value)
113
-
114
- @property
115
- def dim_index(self):
116
- return self.value
117
-
118
- @staticmethod
119
- def from_data_array(data_array: np.ndarray):
120
- if len(data_array.shape) == 1 and data_array.size == 1:
121
- return DataDim['Data0D']
122
- elif len(data_array.shape) == 1 and data_array.size > 1:
123
- return DataDim['Data1D']
124
- elif len(data_array.shape) == 2:
125
- return DataDim['Data2D']
126
- else:
127
- return DataDim['DataND']
128
-
129
-
130
- class DataSource(BaseEnum):
131
- """Enum for source of data"""
132
- raw = 0
133
- calculated = 1
134
-
135
-
136
- class DataDistribution(BaseEnum):
137
- """Enum for distribution of data"""
138
- uniform = 0
139
- spread = 1
140
-
141
-
142
- class Axis:
143
- """Object holding info and data about physical axis of some data
144
-
145
- In case the axis's data is linear, store the info as a scale and offset else store the data
146
-
147
- Parameters
148
- ----------
149
- label: str
150
- The label of the axis, for instance 'time' for a temporal axis
151
- units: str
152
- The units of the data in the object, for instance 's' for seconds
153
- data: ndarray
154
- A 1D ndarray holding the data of the axis
155
- index: int
156
- an integer representing the index of the Data object this axis is related to
157
- scaling: float
158
- The scaling to apply to a linspace version in order to obtain the proper scaling
159
- offset: float
160
- The offset to apply to a linspace/scaled version in order to obtain the proper axis
161
- size: int
162
- The size of the axis array (to be specified if data is None)
163
- spread_order: int
164
- An integer needed in the case where data has a spread DataDistribution. It refers to the index along the data's
165
- spread_index dimension
166
-
167
- Examples
168
- --------
169
- >>> axis = Axis('myaxis', units='seconds', data=np.array([1,2,3,4,5]), index=0)
170
- """
171
-
172
- def __init__(self, label: str = '', units: str = '', data: np.ndarray = None, index: int = 0,
173
- scaling=None, offset=None, size=None, spread_order: int = 0):
174
- super().__init__()
175
-
176
- self.iaxis: Axis = SpecialSlicersData(self, False)
177
-
178
- self._size = size
179
- self._data = None
180
- self._index = None
181
- self._label = None
182
- self._units = None
183
- self._scaling = scaling
184
- self._offset = offset
185
-
186
- self.units = units
187
- self.label = label
188
- self.data = data
189
- self.index = index
190
- self.spread_order = spread_order
191
- if (scaling is None or offset is None or size is None) and data is not None:
192
- self.get_scale_offset_from_data(data)
193
-
194
- def copy(self):
195
- return copy.copy(self)
196
-
197
- def as_dwa(self) -> DataWithAxes:
198
- dwa = DataRaw(self.label, data=[self.get_data()],
199
- labels=[f'{self.label}_{self.units}'])
200
- dwa.create_missing_axes()
201
- return dwa
202
-
203
- @property
204
- def label(self) -> str:
205
- """str: get/set the label of this axis"""
206
- return self._label
207
-
208
- @label.setter
209
- def label(self, lab: str):
210
- if not isinstance(lab, str):
211
- raise TypeError('label for the Axis class should be a string')
212
- self._label = lab
213
-
214
- @property
215
- def units(self) -> str:
216
- """str: get/set the units for this axis"""
217
- return self._units
218
-
219
- @units.setter
220
- def units(self, units: str):
221
- if not isinstance(units, str):
222
- raise TypeError('units for the Axis class should be a string')
223
- self._units = units
224
-
225
- @property
226
- def index(self) -> int:
227
- """int: get/set the index this axis corresponds to in a DataWithAxis object"""
228
- return self._index
229
-
230
- @index.setter
231
- def index(self, ind: int):
232
- self._check_index_valid(ind)
233
- self._index = ind
234
-
235
- @property
236
- def data(self):
237
- """np.ndarray: get/set the data of Axis"""
238
- return self._data
239
-
240
- @data.setter
241
- def data(self, data: np.ndarray):
242
- if data is not None:
243
- self._check_data_valid(data)
244
- self.get_scale_offset_from_data(data)
245
- self._size = data.size
246
- elif self.size is None:
247
- self._size = 0
248
- self._data = data
249
-
250
- def get_data(self) -> np.ndarray:
251
- """Convenience method to obtain the axis data (usually None because scaling and offset are used)"""
252
- return self._data if self._data is not None else self._linear_data(self.size)
253
-
254
- def get_data_at(self, indexes: Union[int, IterableType, slice]) -> np.ndarray:
255
- """ Get data at specified indexes
256
-
257
- Parameters
258
- ----------
259
- indexes:
260
- """
261
- if not (isinstance(indexes, np.ndarray) or isinstance(indexes, slice) or
262
- isinstance(indexes, int)):
263
- indexes = np.array(indexes)
264
- return self.get_data()[indexes]
265
-
266
- def get_scale_offset_from_data(self, data: np.ndarray = None):
267
- """Get the scaling and offset from the axis's data
268
-
269
- If data is not None, extract the scaling and offset
270
-
271
- Parameters
272
- ----------
273
- data: ndarray
274
- """
275
- if data is None and self._data is not None:
276
- data = self._data
277
-
278
- if self.is_axis_linear(data):
279
- if len(data) == 1:
280
- self._scaling = 1
281
- else:
282
- self._scaling = np.mean(np.diff(data))
283
- self._offset = data[0]
284
- self._data = None
285
-
286
- def is_axis_linear(self, data=None):
287
- if data is None:
288
- data = self.get_data()
289
- if data is not None:
290
- return np.allclose(np.diff(data), np.mean(np.diff(data)))
291
- else:
292
- return False
293
-
294
- @property
295
- def scaling(self):
296
- return self._scaling
297
-
298
- @scaling.setter
299
- def scaling(self, _scaling: float):
300
- self._scaling = _scaling
301
-
302
- @property
303
- def offset(self):
304
- return self._offset
305
-
306
- @offset.setter
307
- def offset(self, _offset: float):
308
- self._offset = _offset
309
-
310
- @property
311
- def size(self) -> int:
312
- """int: get/set the size/length of the 1D ndarray"""
313
- return self._size
314
-
315
- @size.setter
316
- def size(self, _size: int):
317
- if self._data is None:
318
- self._size = _size
319
-
320
- @staticmethod
321
- def _check_index_valid(index: int):
322
- if not isinstance(index, int):
323
- raise TypeError('index for the Axis class should be a positive integer')
324
- elif index < 0:
325
- raise ValueError('index for the Axis class should be a positive integer')
326
-
327
- @staticmethod
328
- def _check_data_valid(data):
329
- if not isinstance(data, np.ndarray):
330
- raise TypeError(f'data for the Axis class should be a 1D numpy array')
331
- elif len(data.shape) != 1:
332
- raise ValueError(f'data for the Axis class should be a 1D numpy array')
333
-
334
- def _linear_data(self, nsteps: int):
335
- """create axis data with a linear version using scaling and offset"""
336
- return self._offset + self._scaling * np.linspace(0, nsteps-1, nsteps)
337
-
338
- def create_linear_data(self, nsteps:int):
339
- """replace the axis data with a linear version using scaling and offset"""
340
- self.data = self._linear_data(nsteps)
341
-
342
- @staticmethod
343
- def create_simple_linear_data(nsteps: int):
344
- return np.linspace(0, nsteps-1, nsteps)
345
-
346
- def __len__(self):
347
- return self.size
348
-
349
- def _compute_slices(self, slices, *ignored, **ignored_also):
350
- return slices
351
-
352
- def _slicer(self, _slice, *ignored, **ignored_also):
353
- ax: Axis = copy.deepcopy(self)
354
- if isinstance(_slice, int):
355
- ax.data = np.array([ax.get_data()[_slice]])
356
- return ax
357
- elif _slice is Ellipsis:
358
- return ax
359
- elif isinstance(_slice, slice):
360
- if ax._data is not None:
361
- ax.data = ax._data.__getitem__(_slice)
362
- return ax
363
- else:
364
- start = _slice.start if _slice.start is not None else 0
365
- stop = _slice.stop if _slice.stop is not None else self.size
366
-
367
- ax._offset = ax.offset + start * ax.scaling
368
- ax._size = stop - start
369
- return ax
370
-
371
- def __getitem__(self, item):
372
- if hasattr(self, item):
373
- # for when axis was a dict
374
- deprecation_msg('attributes from an Axis object should not be fetched using __getitem__')
375
- return getattr(self, item)
376
-
377
- def __repr__(self):
378
- return f'{self.__class__.__name__}: <label: {self.label}> - <units: {self.units}> - <index: {self.index}>'
379
-
380
- def __mul__(self, scale: numbers.Real):
381
- if isinstance(scale, numbers.Real):
382
- ax = copy.deepcopy(self)
383
- if self.data is not None:
384
- ax.data *= scale
385
- else:
386
- ax._offset *= scale
387
- ax._scaling *= scale
388
- return ax
389
-
390
- def __add__(self, offset: numbers.Real):
391
- if isinstance(offset, numbers.Real):
392
- ax = copy.deepcopy(self)
393
- if self.data is not None:
394
- ax.data += offset
395
- else:
396
- ax._offset += offset
397
- return ax
398
-
399
- def __eq__(self, other: Axis):
400
- if isinstance(other, Axis):
401
- eq = self.label == other.label
402
- eq = eq and (self.units == other.units)
403
- eq = eq and (self.index == other.index)
404
- if self.data is not None and other.data is not None:
405
- eq = eq and (np.allclose(self.data, other.data))
406
- else:
407
- eq = eq and self.offset == other.offset
408
- eq = eq and self.scaling == other.scaling
409
-
410
- return eq
411
- else:
412
- return False
413
-
414
- def mean(self):
415
- if self._data is not None:
416
- return np.mean(self._data)
417
- else:
418
- return self.offset + self.size / 2 * self.scaling
419
-
420
- def min(self):
421
- if self._data is not None:
422
- return np.min(self._data)
423
- else:
424
- return self.offset + (self.size * self.scaling if self.scaling < 0 else 0)
425
-
426
- def max(self):
427
- if self._data is not None:
428
- return np.max(self._data)
429
- else:
430
- return self.offset + (self.size * self.scaling if self.scaling > 0 else 0)
431
-
432
- def find_index(self, threshold: float) -> int:
433
- """find the index of the threshold value within the axis"""
434
- if threshold < self.min():
435
- return 0
436
- elif threshold > self.max():
437
- return len(self) - 1
438
- elif self._data is not None:
439
- return mutils.find_index(self._data, threshold)[0][0]
440
- else:
441
- return int((threshold - self.offset) / self.scaling)
442
-
443
- def find_indexes(self, thresholds: IterableType[float]) -> IterableType[int]:
444
- if isinstance(thresholds, numbers.Number):
445
- thresholds = [thresholds]
446
- return [self.find_index(threshold) for threshold in thresholds]
447
-
448
-
449
- class NavAxis(Axis):
450
- def __init__(self, *args, **kwargs):
451
- super().__init__(*args, **kwargs)
452
- deprecation_msg('NavAxis should not be used anymore, please use Axis object with correct index.'
453
- 'The navigation index should be specified in the Data object')
454
-
455
-
456
- class DataLowLevel:
457
- """Abstract object for all Data Object
458
-
459
- Parameters
460
- ----------
461
- name: str
462
- the identifier of the data
463
-
464
- Attributes
465
- ----------
466
- name: str
467
- timestamp: float
468
- Time in seconds since epoch. See method time.time()
469
- """
470
-
471
- def __init__(self, name: str):
472
- self._timestamp = time()
473
- self._name = name
474
-
475
- @property
476
- def name(self):
477
- """Get/Set the identifier of the data"""
478
- return self._name
479
-
480
- @name.setter
481
- def name(self, other_name: str):
482
- self._name = other_name
483
-
484
- @property
485
- def timestamp(self):
486
- """Get/Set the timestamp of when the object has been created"""
487
- return self._timestamp
488
-
489
- @timestamp.setter
490
- def timestamp(self, timestamp: float):
491
- """The timestamp of when the object has been created"""
492
- self._timestamp = timestamp
493
-
494
-
495
- class DataBase(DataLowLevel):
496
- """Base object to store homogeneous data and metadata generated by pymodaq's objects.
497
-
498
- To be inherited for real data
499
-
500
- Parameters
501
- ----------
502
- name: str
503
- the identifier of these data
504
- source: DataSource or str
505
- Enum specifying if data are raw or processed (for instance from roi)
506
- dim: DataDim or str
507
- The identifier of the data type
508
- distribution: DataDistribution or str
509
- The distribution type of the data: uniform if distributed on a regular grid or spread if on specific
510
- unordered points
511
- data: list of ndarray
512
- The data the object is storing
513
- labels: list of str
514
- The labels of the data nd-arrays
515
- origin: str
516
- An identifier of the element where the data originated, for instance the DAQ_Viewer's name. Used when appending
517
- DataToExport in DAQ_Scan to disintricate from which origin data comes from when scanning multiple detectors.
518
- kwargs: named parameters
519
- All other parameters are stored dynamically using the name/value pair. The name of these extra parameters are
520
- added into the extra_attributes attribute
521
-
522
- Attributes
523
- ----------
524
- name: str
525
- the identifier of these data
526
- source: DataSource or str
527
- Enum specifying if data are raw or processed (for instance from roi)
528
- dim: DataDim or str
529
- The identifier of the data type
530
- distribution: DataDistribution or str
531
- The distribution type of the data: uniform if distributed on a regular grid or spread if on specific
532
- unordered points
533
- data: list of ndarray
534
- The data the object is storing
535
- labels: list of str
536
- The labels of the data nd-arrays
537
- origin: str
538
- An identifier of the element where the data originated, for instance the DAQ_Viewer's name. Used when appending
539
- DataToExport in DAQ_Scan to disintricate from which origin data comes from when scanning multiple detectors.
540
- shape: Tuple[int]
541
- The shape of the underlying data
542
- size: int
543
- The size of the ndarrays stored in the object
544
- length: int
545
- The number of ndarrays stored in the object
546
- extra_attributes: List[str]
547
- list of string giving identifiers of the attributes added dynamically at the initialization (for instance
548
- to save extra metadata using the DataSaverLoader
549
-
550
- See Also
551
- --------
552
- DataWithAxes, DataFromPlugins, DataRaw, DataSaverLoader
553
-
554
- Examples
555
- --------
556
- >>> import numpy as np
557
- >>> from pymodaq.utils.data import DataBase, DataSource, DataDim, DataDistribution
558
- >>> data = DataBase('mydata', source=DataSource['raw'], dim=DataDim['Data1D'], \
559
- distribution=DataDistribution['uniform'], data=[np.array([1.,2.,3.]), np.array([4.,5.,6.])],\
560
- labels=['channel1', 'channel2'], origin='docutils code')
561
- >>> data.dim
562
- <DataDim.Data1D: 1>
563
- >>> data.source
564
- <DataSource.raw: 0>
565
- >>> data.shape
566
- (3,)
567
- >>> data.length
568
- 2
569
- >>> data.size
570
- 3
571
- """
572
-
573
- def __init__(self, name: str, source: DataSource = None, dim: DataDim = None,
574
- distribution: DataDistribution = DataDistribution['uniform'],
575
- data: List[np.ndarray] = None,
576
- labels: List[str] = None, origin: str = '',
577
- **kwargs):
578
-
579
- super().__init__(name=name)
580
- self._iter_index = 0
581
- self._shape = None
582
- self._size = None
583
- self._data = None
584
- self._length = None
585
- self._labels = None
586
- self._dim = dim
587
- self._errors = None
588
- self.origin = origin
589
-
590
- source = enum_checker(DataSource, source)
591
- self._source = source
592
-
593
- distribution = enum_checker(DataDistribution, distribution)
594
- self._distribution = distribution
595
-
596
- self.data = data # dim consistency is actually checked within the setter method
597
-
598
- self._check_labels(labels)
599
- self.extra_attributes = []
600
- self.add_extra_attribute(**kwargs)
601
-
602
- def as_dte(self, name: str = 'mydte') -> DataToExport:
603
- """Convenience method to wrap the DataWithAxes object into a DataToExport"""
604
- return DataToExport(name, data=[self])
605
-
606
- def add_extra_attribute(self, **kwargs):
607
- for key in kwargs:
608
- if key not in self.extra_attributes:
609
- self.extra_attributes.append(key)
610
- setattr(self, key, kwargs[key])
611
-
612
- def get_full_name(self) -> str:
613
- """Get the data ful name including the origin attribute into the returned value
614
-
615
- Returns
616
- -------
617
- str: the name of the ataWithAxes data constructed as : origin/name
618
-
619
- Examples
620
- --------
621
- d0 = DataBase(name='datafromdet0', origin='det0')
622
- """
623
- return f'{self.origin}/{self.name}'
624
-
625
- def __repr__(self):
626
- return f'{self.__class__.__name__} <{self.name}> <{self.dim}> <{self.source}> <{self.shape}>'
627
-
628
- def __len__(self):
629
- return self.length
630
-
631
- def __iter__(self):
632
- self._iter_index = 0
633
- return self
634
-
635
- def __next__(self):
636
- if self._iter_index < len(self):
637
- self._iter_index += 1
638
- return self.data[self._iter_index-1]
639
- else:
640
- raise StopIteration
641
-
642
- def __getitem__(self, item) -> np.ndarray:
643
- if (isinstance(item, int) and item < len(self)) or isinstance(item, slice):
644
- return self.data[item]
645
- else:
646
- raise IndexError(f'The index should be an integer lower than the data length')
647
-
648
- def __setitem__(self, key, value):
649
- if isinstance(key, int) and key < len(self) and isinstance(value, np.ndarray) and value.shape == self.shape:
650
- self.data[key] = value
651
- else:
652
- raise IndexError(f'The index should be an positive integer lower than the data length')
653
-
654
- def __add__(self, other: object):
655
- if isinstance(other, DataBase) and len(other) == len(self):
656
- new_data = copy.deepcopy(self)
657
- for ind_array in range(len(new_data)):
658
- if self[ind_array].shape != other[ind_array].shape:
659
- raise ValueError('The shapes of arrays stored into the data are not consistent')
660
- new_data[ind_array] = self[ind_array] + other[ind_array]
661
- return new_data
662
- elif isinstance(other, numbers.Number) and self.length == 1 and self.size == 1:
663
- new_data = copy.deepcopy(self)
664
- new_data = new_data + DataActuator(data=other)
665
- return new_data
666
- else:
667
- raise TypeError(f'Could not add a {other.__class__.__name__} or a {self.__class__.__name__} '
668
- f'of a different length')
669
-
670
- def __sub__(self, other: object):
671
- if isinstance(other, DataBase) and len(other) == len(self):
672
- new_data = copy.deepcopy(self)
673
- for ind_array in range(len(new_data)):
674
- new_data[ind_array] = self[ind_array] - other[ind_array]
675
- return new_data
676
- elif isinstance(other, numbers.Number) and self.length == 1 and self.size == 1:
677
- new_data = copy.deepcopy(self)
678
- new_data = new_data - DataActuator(data=other)
679
- return new_data
680
- else:
681
- raise TypeError(f'Could not substract a {other.__class__.__name__} or a {self.__class__.__name__} '
682
- f'of a different length')
683
-
684
- def __mul__(self, other):
685
- if isinstance(other, numbers.Number):
686
- new_data = copy.deepcopy(self)
687
- for ind_array in range(len(new_data)):
688
- new_data[ind_array] = self[ind_array] * other
689
- return new_data
690
- else:
691
- raise TypeError(f'Could not multiply a {other.__class__.__name__} and a {self.__class__.__name__} '
692
- f'of a different length')
693
-
694
- def __truediv__(self, other):
695
- if isinstance(other, numbers.Number):
696
- return self * (1 / other)
697
- else:
698
- raise TypeError(f'Could not divide a {other.__class__.__name__} and a {self.__class__.__name__} '
699
- f'of a different length')
700
-
701
- def _comparison_common(self, other, operator='__eq__'):
702
- if isinstance(other, DataBase):
703
- if not(self.name == other.name and len(self) == len(other)):
704
- return False
705
- if self.dim != other.dim:
706
- return False
707
- eq = True
708
- for ind in range(len(self)):
709
- if self[ind].shape != other[ind].shape:
710
- eq = False
711
- break
712
- eq = eq and np.all(getattr(self[ind], operator)(other[ind]))
713
- # extra attributes are not relevant as they may contain module specific data...
714
- # eq = eq and (self.extra_attributes == other.extra_attributes)
715
- # for attribute in self.extra_attributes:
716
- # eq = eq and (getattr(self, attribute) == getattr(other, attribute))
717
- return eq
718
- elif isinstance(other, numbers.Number):
719
- return np.all(getattr(self[0], operator)(other))
720
- else:
721
- raise TypeError()
722
-
723
- def __eq__(self, other):
724
- return self._comparison_common(other, '__eq__')
725
-
726
- def __le__(self, other):
727
- return self._comparison_common(other, '__le__')
728
-
729
- def __lt__(self, other):
730
- return self._comparison_common(other, '__lt__')
731
-
732
- def __ge__(self, other):
733
- return self._comparison_common(other, '__ge__')
734
-
735
- def __gt__(self, other):
736
- return self._comparison_common(other, '__gt__')
737
-
738
- def deepcopy(self):
739
- return copy.deepcopy(self)
740
-
741
- def average(self, other: 'DataBase', weight: int) -> 'DataBase':
742
- """ Compute the weighted average between self and other DataBase
743
-
744
- Parameters
745
- ----------
746
- other_data: DataBase
747
- weight: int
748
- The weight the 'other' holds with respect to self
749
- Returns
750
- -------
751
- DataBase: the averaged DataBase object
752
- """
753
- if isinstance(other, DataBase) and len(other) == len(self) and isinstance(weight, numbers.Number):
754
- return (other * weight + self) / (weight + 1)
755
- else:
756
- raise TypeError(f'Could not average a {other.__class__.__name__} or a {self.__class__.__name__} '
757
- f'of a different length')
758
-
759
- def abs(self):
760
- """ Take the absolute value of itself"""
761
- new_data = copy.copy(self)
762
- new_data.data = [np.abs(dat) for dat in new_data]
763
- return new_data
764
-
765
- def real(self):
766
- """ Take the real part of itself"""
767
- new_data = copy.copy(self)
768
- new_data.data = [np.real(dat) for dat in new_data]
769
- return new_data
770
-
771
- def imag(self):
772
- """ Take the imaginary part of itself"""
773
- new_data = copy.copy(self)
774
- new_data.data = [np.imag(dat) for dat in new_data]
775
- return new_data
776
-
777
- def flipud(self):
778
- """Reverse the order of elements along axis 0 (up/down)"""
779
- new_data = copy.copy(self)
780
- new_data.data = [np.flipud(dat) for dat in new_data]
781
- return new_data
782
-
783
- def fliplr(self):
784
- """Reverse the order of elements along axis 1 (left/right)"""
785
- new_data = copy.copy(self)
786
- new_data.data = [np.fliplr(dat) for dat in new_data]
787
- return new_data
788
-
789
- def append(self, data: DataWithAxes):
790
- for dat in data:
791
- if dat.shape != self.shape:
792
- raise DataShapeError('Cannot append those ndarrays, they don\'t have the same shape as self')
793
- self.data += data.data
794
- self.labels.extend(data.labels)
795
-
796
- def pop(self, index: int) -> DataBase:
797
- """ Returns a copy of self but with data taken at the specified index"""
798
- dwa = self.deepcopy()
799
- dwa.data = [dwa.data[index]]
800
- dwa.labels = [dwa.labels[index]]
801
- return dwa
802
-
803
- @property
804
- def shape(self):
805
- """The shape of the nd-arrays"""
806
- return self._shape
807
-
808
- def stack_as_array(self, axis=0, dtype=None) -> np.ndarray:
809
- """ Stack all data arrays in a single numpy array
810
-
811
- Parameters
812
- ----------
813
- axis: int
814
- The new stack axis index, default 0
815
- dtype: str or np.dtype
816
- the dtype of the stacked array
817
-
818
- Returns
819
- -------
820
- np.ndarray
821
-
822
- See Also
823
- --------
824
- :meth:`np.stack`
825
- """
826
-
827
- return np.stack(self.data, axis=axis, dtype=dtype)
828
-
829
- @property
830
- def size(self):
831
- """The size of the nd-arrays"""
832
- return self._size
833
-
834
- @property
835
- def dim(self):
836
- """DataDim: the enum representing the dimensionality of the stored data"""
837
- return self._dim
838
-
839
- def set_dim(self, dim: Union[DataDim, str]):
840
- """Addhoc modification of dim independantly of the real data shape, should be used with extra care"""
841
- self._dim = enum_checker(DataDim, dim)
842
-
843
- @property
844
- def source(self):
845
- """DataSource: the enum representing the source of the data"""
846
- return self._source
847
-
848
- @source.setter
849
- def source(self, source_type: Union[str, DataSource]):
850
- """DataSource: the enum representing the source of the data"""
851
- source_type = enum_checker(DataSource, source_type)
852
- self._source = source_type
853
-
854
- @property
855
- def distribution(self):
856
- """DataDistribution: the enum representing the distribution of the stored data"""
857
- return self._distribution
858
-
859
- @property
860
- def length(self):
861
- """The length of data. This is the length of the list containing the nd-arrays"""
862
- return self._length
863
-
864
- @property
865
- def labels(self):
866
- return self._labels
867
-
868
- @labels.setter
869
- def labels(self, labels: List['str']):
870
- self._check_labels(labels)
871
-
872
- def _check_labels(self, labels: List['str']):
873
- if labels is None:
874
- labels = []
875
- else:
876
- labels = labels[:]
877
- while len(labels) < self.length:
878
- labels.append(f'CH{len(labels):02d}')
879
- self._labels = labels
880
-
881
- def get_data_index(self, index: int = 0) -> np.ndarray:
882
- """Get the data by its index in the list, same as self[index]"""
883
- return self.data[index]
884
-
885
- @staticmethod
886
- def _check_data_type(data: List[np.ndarray]) -> List[np.ndarray]:
887
- """make sure data is a list of nd-arrays"""
888
- is_valid = True
889
- if data is None:
890
- is_valid = False
891
- if not isinstance(data, list):
892
- # try to transform the data to regular type
893
- if isinstance(data, np.ndarray):
894
- warnings.warn(DataTypeWarning(f'Your data should be a list of numpy arrays not just a single numpy'
895
- f' array, wrapping them with a list'))
896
- data = [data]
897
- elif isinstance(data, numbers.Number):
898
- warnings.warn(DataTypeWarning(f'Your data should be a list of numpy arrays not just a single numpy'
899
- f' array, wrapping them with a list'))
900
- data = [np.array([data])]
901
- else:
902
- is_valid = False
903
- if isinstance(data, list):
904
- if len(data) == 0:
905
- is_valid = False
906
- elif not isinstance(data[0], np.ndarray):
907
- is_valid = False
908
- elif len(data[0].shape) == 0:
909
- is_valid = False
910
- if not is_valid:
911
- raise TypeError(f'Data should be an non-empty list of non-empty numpy arrays')
912
- return data
913
-
914
- def check_shape_from_data(self, data: List[np.ndarray]):
915
- self._shape = data[0].shape
916
-
917
- @staticmethod
918
- def _get_dim_from_data(data: List[np.ndarray]) -> DataDim:
919
- shape = data[0].shape
920
- size = data[0].size
921
- if len(shape) == 1 and size == 1:
922
- dim = DataDim['Data0D']
923
- elif len(shape) == 1 and size > 1:
924
- dim = DataDim['Data1D']
925
- elif len(shape) == 2:
926
- dim = DataDim['Data2D']
927
- else:
928
- dim = DataDim['DataND']
929
- return dim
930
-
931
- def get_dim_from_data(self, data: List[np.ndarray]):
932
- """Get the dimensionality DataDim from data"""
933
- self.check_shape_from_data(data)
934
- self._size = data[0].size
935
- self._length = len(data)
936
- if len(self._shape) == 1 and self._size == 1:
937
- dim = DataDim['Data0D']
938
- elif len(self._shape) == 1 and self._size > 1:
939
- dim = DataDim['Data1D']
940
- elif len(self._shape) == 2:
941
- dim = DataDim['Data2D']
942
- else:
943
- dim = DataDim['DataND']
944
- return dim
945
-
946
- def _check_shape_dim_consistency(self, data: List[np.ndarray]):
947
- """Process the dim from data or make sure data and DataDim are coherent"""
948
- dim = self.get_dim_from_data(data)
949
- if self._dim is None:
950
- self._dim = dim
951
- else:
952
- self._dim = enum_checker(DataDim, self._dim)
953
- if self._dim != dim:
954
- warnings.warn(DataDimWarning('The specified dimensionality is not coherent with the data shape, '
955
- 'replacing it'))
956
- self._dim = dim
957
-
958
- def _check_same_shape(self, data: List[np.ndarray]):
959
- """Check that all nd-arrays have the same shape"""
960
- for dat in data:
961
- if dat.shape != self.shape:
962
- raise DataShapeError('The shape of the ndarrays in data is not the same')
963
-
964
- @property
965
- def data(self) -> List[np.ndarray]:
966
- """List[np.ndarray]: get/set (and check) the data the object is storing"""
967
- return self._data
968
-
969
- @data.setter
970
- def data(self, data: List[np.ndarray]):
971
- data = self._check_data_type(data)
972
- #data = [squeeze(data_array) for data_array in data]
973
- self._check_shape_dim_consistency(data)
974
- self._check_same_shape(data)
975
- self._data = data
976
-
977
- def to_dict(self):
978
- data_dict = OrderedDict([])
979
- for ind in range(len(self)):
980
- data_dict[self.labels[ind]] = self[ind]
981
- return data_dict
982
-
983
-
984
- class AxesManagerBase:
985
- def __init__(self, data_shape: Tuple[int], axes: List[Axis], nav_indexes=None, sig_indexes=None, **kwargs):
986
- self._data_shape = data_shape[:] # initial shape needed for self._check_axis
987
- self._axes = axes[:]
988
- self._nav_indexes = nav_indexes
989
- self._sig_indexes = sig_indexes if sig_indexes is not None else self.compute_sig_indexes()
990
-
991
- self._check_axis(self._axes)
992
- self._manage_named_axes(self._axes, **kwargs)
993
-
994
- @property
995
- def axes(self):
996
- return self._axes
997
-
998
- @axes.setter
999
- def axes(self, axes: List[Axis]):
1000
- self._axes = axes[:]
1001
- self._check_axis(self._axes)
1002
-
1003
- @abstractmethod
1004
- def _check_axis(self, axes):
1005
- ...
1006
-
1007
- @abstractmethod
1008
- def get_sorted_index(self, axis_index: int = 0, spread_index=0) -> Tuple[np.ndarray, Tuple[slice]]:
1009
- """ Get the index to sort the specified axis
1010
-
1011
- Parameters
1012
- ----------
1013
- axis_index: int
1014
- The index along which one should sort the data
1015
- spread_index: int
1016
- for spread data only, specifies which spread axis to use
1017
-
1018
- Returns
1019
- -------
1020
- np.ndarray: the sorted index from the specified axis
1021
- tuple of slice:
1022
- used to slice the underlying data
1023
- """
1024
- ...
1025
-
1026
- @abstractmethod
1027
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1028
- """in spread mode, different nav axes have the same index (but not
1029
- the same spread_order integer value)
1030
-
1031
- """
1032
- ...
1033
-
1034
- def compute_sig_indexes(self):
1035
- _shape = list(self._data_shape)
1036
- indexes = list(np.arange(len(self._data_shape)))
1037
- for index in self.nav_indexes:
1038
- if index in indexes:
1039
- indexes.pop(indexes.index(index))
1040
- return tuple(indexes)
1041
-
1042
- def _has_get_axis_from_index(self, index: int):
1043
- """Check if the axis referred by a given data dimensionality index is present
1044
-
1045
- Returns
1046
- -------
1047
- bool: True if the axis has been found else False
1048
- Axis or None: return the axis instance if has the axis else None
1049
- """
1050
- if index > len(self._data_shape) or index < 0:
1051
- raise IndexError('The specified index does not correspond to any data dimension')
1052
- for axis in self.axes:
1053
- if axis.index == index:
1054
- return True, axis
1055
- return False, None
1056
-
1057
- def _manage_named_axes(self, axes, x_axis=None, y_axis=None, nav_x_axis=None, nav_y_axis=None):
1058
- """This method make sur old style Data is still compatible, especially when using x_axis or y_axis parameters"""
1059
- modified = False
1060
- if x_axis is not None:
1061
- modified = True
1062
- index = 0
1063
- if len(self._data_shape) == 1 and not self._has_get_axis_from_index(0)[0]:
1064
- # in case of Data1D the x_axis corresponds to the first data dim
1065
- index = 0
1066
- elif len(self._data_shape) == 2 and not self._has_get_axis_from_index(1)[0]:
1067
- # in case of Data2D the x_axis corresponds to the second data dim (columns)
1068
- index = 1
1069
- axes.append(Axis(x_axis.label, x_axis.units, x_axis.data, index=index))
1070
-
1071
- if y_axis is not None:
1072
-
1073
- if len(self._data_shape) == 2 and not self._has_get_axis_from_index(0)[0]:
1074
- modified = True
1075
- # in case of Data2D the y_axis corresponds to the first data dim (lines)
1076
- axes.append(Axis(y_axis.label, y_axis.units, y_axis.data, index=0))
1077
-
1078
- if nav_x_axis is not None:
1079
- if len(self.nav_indexes) > 0:
1080
- modified = True
1081
- # in case of DataND the y_axis corresponds to the first data dim (lines)
1082
- axes.append(Axis(nav_x_axis.label, nav_x_axis.units, nav_x_axis.data, index=self._nav_indexes[0]))
1083
-
1084
- if nav_y_axis is not None:
1085
- if len(self.nav_indexes) > 1:
1086
- modified = True
1087
- # in case of Data2D the y_axis corresponds to the first data dim (lines)
1088
- axes.append(Axis(nav_y_axis.label, nav_y_axis.units, nav_y_axis.data, index=self._nav_indexes[1]))
1089
-
1090
- if modified:
1091
- self._check_axis(axes)
1092
-
1093
- @property
1094
- def shape(self) -> Tuple[int]:
1095
- # self._data_shape = self.compute_shape_from_axes()
1096
- return self._data_shape
1097
-
1098
- @abstractmethod
1099
- def compute_shape_from_axes(self):
1100
- ...
1101
-
1102
- @property
1103
- def sig_shape(self) -> tuple:
1104
- return tuple([self.shape[ind] for ind in self.sig_indexes])
1105
-
1106
- @property
1107
- def nav_shape(self) -> tuple:
1108
- return tuple([self.shape[ind] for ind in self.nav_indexes])
1109
-
1110
- def append_axis(self, axis: Axis):
1111
- self._axes.append(axis)
1112
- self._check_axis([axis])
1113
-
1114
- @property
1115
- def nav_indexes(self) -> IterableType[int]:
1116
- return self._nav_indexes
1117
-
1118
- @nav_indexes.setter
1119
- def nav_indexes(self, nav_indexes: IterableType[int]):
1120
- if isinstance(nav_indexes, Iterable):
1121
- nav_indexes = tuple(nav_indexes)
1122
- valid = True
1123
- for index in nav_indexes:
1124
- if index not in self.get_axes_index():
1125
- logger.warning('Could not set the corresponding nav_index into the data object, not enough'
1126
- ' Axis declared')
1127
- valid = False
1128
- break
1129
- if valid:
1130
- self._nav_indexes = nav_indexes
1131
- else:
1132
- logger.warning('Could not set the corresponding sig_indexes into the data object, should be an iterable')
1133
- self.sig_indexes = self.compute_sig_indexes()
1134
- self.shape
1135
-
1136
- @property
1137
- def sig_indexes(self) -> IterableType[int]:
1138
- return self._sig_indexes
1139
-
1140
- @sig_indexes.setter
1141
- def sig_indexes(self, sig_indexes: IterableType[int]):
1142
- if isinstance(sig_indexes, Iterable):
1143
- sig_indexes = tuple(sig_indexes)
1144
- valid = True
1145
- for index in sig_indexes:
1146
- if index in self._nav_indexes:
1147
- logger.warning('Could not set the corresponding sig_index into the axis manager object, '
1148
- 'the axis is already affected to the navigation axis')
1149
- valid = False
1150
- break
1151
- if index not in self.get_axes_index():
1152
- logger.warning('Could not set the corresponding nav_index into the data object, not enough'
1153
- ' Axis declared')
1154
- valid = False
1155
- break
1156
- if valid:
1157
- self._sig_indexes = sig_indexes
1158
- else:
1159
- logger.warning('Could not set the corresponding sig_indexes into the data object, should be an iterable')
1160
-
1161
- @property
1162
- def nav_axes(self) -> List[int]:
1163
- deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
1164
- return self._nav_indexes
1165
-
1166
- @nav_axes.setter
1167
- def nav_axes(self, nav_indexes: List[int]):
1168
- deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
1169
- self.nav_indexes = nav_indexes
1170
-
1171
- def is_axis_signal(self, axis: Axis) -> bool:
1172
- """Check if an axis is considered signal or navigation"""
1173
- return axis.index in self._nav_indexes
1174
-
1175
- def is_axis_navigation(self, axis: Axis) -> bool:
1176
- """Check if an axis is considered signal or navigation"""
1177
- return axis.index not in self._nav_indexes
1178
-
1179
- @abstractmethod
1180
- def get_shape_from_index(self, index: int) -> int:
1181
- """Get the data shape at the given index"""
1182
- ...
1183
-
1184
- def get_axes_index(self) -> List[int]:
1185
- """Get the index list from the axis objects"""
1186
- return [axis.index for axis in self._axes]
1187
-
1188
- @abstractmethod
1189
- def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
1190
- ...
1191
-
1192
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1193
- """Only valid for Spread data"""
1194
- ...
1195
-
1196
- def get_nav_axes(self) -> List[Axis]:
1197
- """Get the navigation axes corresponding to the data
1198
-
1199
- Use get_axis_from_index for all index in self.nav_indexes, but in spread distribution, one index may
1200
- correspond to multiple nav axes, see Spread data distribution
1201
-
1202
-
1203
- """
1204
- return list(mutils.flatten([copy.copy(self.get_axis_from_index(index, create=True))
1205
- for index in self.nav_indexes]))
1206
-
1207
- def get_signal_axes(self):
1208
- if self.sig_indexes is None:
1209
- self._sig_indexes = tuple([int(axis.index) for axis in self.axes if axis.index not in self.nav_indexes])
1210
- axes = []
1211
- for index in self._sig_indexes:
1212
- axes_tmp = copy.copy(self.get_axis_from_index(index, create=True))
1213
- for ax in axes_tmp:
1214
- if ax.size > 1:
1215
- axes.append(ax)
1216
- return axes
1217
-
1218
- def is_axis_signal(self, axis: Axis) -> bool:
1219
- """Check if an axis is considered signal or navigation"""
1220
- return axis.index in self._nav_indexes
1221
-
1222
- def is_axis_navigation(self, axis: Axis) -> bool:
1223
- """Check if an axis is considered signal or navigation"""
1224
- return axis.index not in self._nav_indexes
1225
-
1226
- def __repr__(self):
1227
- return self._get_dimension_str()
1228
-
1229
- @abstractmethod
1230
- def _get_dimension_str(self):
1231
- ...
1232
-
1233
-
1234
- class AxesManagerUniform(AxesManagerBase):
1235
- def __init__(self, *args, **kwargs):
1236
- super().__init__(*args, **kwargs)
1237
-
1238
- def compute_shape_from_axes(self):
1239
- if len(self.axes) != 0:
1240
- shape = []
1241
- for ind in range(len(self.axes)):
1242
- shape.append(len(self.get_axis_from_index(ind, create=True)[0]))
1243
- else:
1244
- shape = self._data_shape
1245
- return tuple(shape)
1246
-
1247
- def get_shape_from_index(self, index: int) -> int:
1248
- """Get the data shape at the given index"""
1249
- if index > len(self._data_shape) or index < 0:
1250
- raise IndexError('The specified index does not correspond to any data dimension')
1251
- return self._data_shape[index]
1252
-
1253
- def _check_axis(self, axes: List[Axis]):
1254
- """Check all axis to make sure of their type and make sure their data are properly referring to the data index
1255
-
1256
- See Also
1257
- --------
1258
- :py:meth:`Axis.create_linear_data`
1259
- """
1260
- for ind, axis in enumerate(axes):
1261
- if not isinstance(axis, Axis):
1262
- raise TypeError(f'An axis of {self.__class__.__name__} should be an Axis object')
1263
- if self.get_shape_from_index(axis.index) != axis.size:
1264
- warnings.warn(DataSizeWarning('The size of the axis is not coherent with the shape of the data. '
1265
- 'Replacing it with a linspaced version: np.array([0, 1, 2, ...])'))
1266
- axis.size = self.get_shape_from_index(axis.index)
1267
- axis.scaling = 1
1268
- axis.offset = 0
1269
- axes[ind] = axis
1270
- self._axes = axes
1271
-
1272
- def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
1273
- """Get the axis referred by a given data dimensionality index
1274
-
1275
- If the axis is absent, create a linear one to fit the data shape if parameter create is True
1276
-
1277
- Parameters
1278
- ----------
1279
- index: int
1280
- The index referring to the data ndarray shape
1281
- create: bool
1282
- If True and the axis referred by index has not been found in axes, create one
1283
-
1284
- Returns
1285
- -------
1286
- List[Axis] or None: return the list of axis instance if Data has the axis (or it has been created) else None
1287
-
1288
- See Also
1289
- --------
1290
- :py:meth:`Axis.create_linear_data`
1291
- """
1292
- index = int(index)
1293
- has_axis, axis = self._has_get_axis_from_index(index)
1294
- if not has_axis:
1295
- if create:
1296
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, '
1297
- f'creating a linear one...'))
1298
- axis = Axis(index=index, offset=0, scaling=1)
1299
- axis.size = self.get_shape_from_index(index)
1300
- else:
1301
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, returning None'))
1302
- return [axis]
1303
-
1304
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1305
- """in spread mode, different nav axes have the same index (but not
1306
- the same spread_order integer value)
1307
-
1308
- """
1309
- return None
1310
-
1311
- def get_sorted_index(self, axis_index: int = 0, spread_index=0) -> Tuple[np.ndarray, Tuple[slice]]:
1312
- """ Get the index to sort the specified axis
1313
-
1314
- Parameters
1315
- ----------
1316
- axis_index: int
1317
- The index along which one should sort the data
1318
- spread_index: int
1319
- for spread data only, specifies which spread axis to use
1320
-
1321
- Returns
1322
- -------
1323
- np.ndarray: the sorted index from the specified axis
1324
- tuple of slice:
1325
- used to slice the underlying data
1326
- """
1327
-
1328
- axes = self.get_axis_from_index(axis_index)
1329
- if axes[0] is not None:
1330
- sorted_index = np.argsort(axes[0].get_data())
1331
- axes[0].data = axes[0].get_data()[sorted_index]
1332
- slices = []
1333
- for ind in range(len(self.shape)):
1334
- if ind == axis_index:
1335
- slices.append(sorted_index)
1336
- else:
1337
- slices.append(Ellipsis)
1338
- slices = tuple(slices)
1339
- return sorted_index, slices
1340
- else:
1341
- return None, None
1342
-
1343
- def _get_dimension_str(self):
1344
- string = "("
1345
- for nav_index in self.nav_indexes:
1346
- string += str(self._data_shape[nav_index]) + ", "
1347
- string = string.rstrip(", ")
1348
- string += "|"
1349
- for sig_index in self.sig_indexes:
1350
- string += str(self._data_shape[sig_index]) + ", "
1351
- string = string.rstrip(", ")
1352
- string += ")"
1353
- return string
1354
-
1355
-
1356
- class AxesManagerSpread(AxesManagerBase):
1357
- """For this particular data category, some explanation is needed, see example below:
1358
-
1359
- Examples
1360
- --------
1361
- One take images data (20x30) as a function of 2 parameters, say xaxis and yaxis non-linearly spaced on a regular
1362
- grid.
1363
-
1364
- data.shape = (150, 20, 30)
1365
- data.nav_indexes = (0,)
1366
-
1367
- The first dimension (150) corresponds to the navigation (there are 150 non uniform data points taken)
1368
- The second and third could correspond to signal data, here an image of size (20x30)
1369
- so:
1370
- * nav_indexes is (0, )
1371
- * sig_indexes are (1, 2)
1372
-
1373
- xaxis = Axis(name=xaxis, index=0, data...) length 150
1374
- yaxis = Axis(name=yaxis, index=0, data...) length 150
1375
-
1376
- In fact from such a data shape the number of navigation axes in unknown . In our example, they are 2. To somehow
1377
- keep track of some ordering in these navigation axes, one adds an attribute to the Axis object: the spread_order
1378
- xaxis = Axis(name=xaxis, index=0, spread_order=0, data...) length 150
1379
- yaxis = Axis(name=yaxis, index=0, spread_order=1, data...) length 150
1380
- """
1381
-
1382
- def __init__(self, *args, **kwargs):
1383
- super().__init__(*args, **kwargs)
1384
-
1385
- def _check_axis(self, axes: List[Axis]):
1386
- """Check all axis to make sure of their type and make sure their data are properly referring to the data index
1387
-
1388
- """
1389
- for axis in axes:
1390
- if not isinstance(axis, Axis):
1391
- raise TypeError(f'An axis of {self.__class__.__name__} should be an Axis object')
1392
- elif len(self.nav_indexes) != 1:
1393
- raise ValueError('Spread data should have only one specified index in self.nav_indexes')
1394
- elif axis.index in self.nav_indexes:
1395
- if axis.size != 1 and (axis.size != self._data_shape[self.nav_indexes[0]]):
1396
- raise DataLengthError('all navigation axes should have the same size')
1397
-
1398
- def compute_shape_from_axes(self):
1399
- """Get data shape from axes
1400
-
1401
- First get the nav length from one of the navigation axes
1402
- Then check for signal axes
1403
- """
1404
- if len(self.axes) != 0:
1405
-
1406
- axes = sorted(self.axes, key=lambda axis: axis.index)
1407
-
1408
- shape = []
1409
- for axis in axes:
1410
- if axis.index in self.nav_indexes:
1411
- shape.append(axis.size)
1412
- break
1413
- for axis in axes:
1414
- if axis.index not in self.nav_indexes:
1415
- shape.append(axis.size)
1416
- else:
1417
- shape = self._data_shape
1418
- return tuple(shape)
1419
-
1420
- def get_shape_from_index(self, index: int) -> int:
1421
- """Get the data shape at the given index"""
1422
- if index > len(self._data_shape) or index < 0:
1423
- raise IndexError('The specified index does not correspond to any data dimension')
1424
- return self._data_shape[index]
1425
-
1426
- def get_axis_from_index(self, index: int, create: bool = False) -> List[Axis]:
1427
- """in spread mode, different nav axes have the same index (but not
1428
- the same spread_order integer value) so may return multiple axis
1429
-
1430
- No possible "linear" creation in this mode except if the index is a signal index
1431
-
1432
- """
1433
- if index in self.nav_indexes:
1434
- axes = []
1435
- for axis in self.axes:
1436
- if axis.index == index:
1437
- axes.append(axis)
1438
- return axes
1439
- else:
1440
- index = int(index)
1441
- try:
1442
- has_axis, axis = self._has_get_axis_from_index(index)
1443
- except IndexError:
1444
- axis = [None]
1445
- has_axis = False
1446
- return axis
1447
-
1448
- if not has_axis and index in self.sig_indexes:
1449
- if create:
1450
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, '
1451
- f'creating a linear one...'))
1452
- axis = Axis(index=index, offset=0, scaling=1)
1453
- axis.size = self.get_shape_from_index(index)
1454
- else:
1455
- warnings.warn(DataIndexWarning(f'The axis requested with index {index} is not present, returning None'))
1456
-
1457
- return [axis]
1458
-
1459
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1460
- """in spread mode, different nav axes have the same index (but not
1461
- the same spread_order integer value)
1462
-
1463
- """
1464
- for axis in self.axes:
1465
- if axis.index == index and axis.spread_order == spread_order:
1466
- return axis
1467
-
1468
- def get_sorted_index(self, axis_index: int = 0, spread_index=0) -> Tuple[np.ndarray, Tuple[slice]]:
1469
- """ Get the index to sort the specified axis
1470
-
1471
- Parameters
1472
- ----------
1473
- axis_index: int
1474
- The index along which one should sort the data
1475
- spread_index: int
1476
- for spread data only, specifies which spread axis to use
1477
-
1478
- Returns
1479
- -------
1480
- np.ndarray: the sorted index from the specified axis
1481
- tuple of slice:
1482
- used to slice the underlying data
1483
- """
1484
-
1485
- if axis_index in self.nav_indexes:
1486
- axis = self.get_axis_from_index_spread(axis_index, spread_index)
1487
- else:
1488
- axis = self.get_axis_from_index(axis_index)[0]
1489
-
1490
- if axis is not None:
1491
- sorted_index = np.argsort(axis.get_data())
1492
- slices = []
1493
- for ind in range(len(self.shape)):
1494
- if ind == axis_index:
1495
- slices.append(sorted_index)
1496
- else:
1497
- if slices[-1] is Ellipsis: # only one ellipsis
1498
- slices.append(Ellipsis)
1499
- slices = tuple(slices)
1500
-
1501
- for nav_index in self.nav_indexes:
1502
- for axis in self.get_axis_from_index(nav_index):
1503
- axis.data = axis.get_data()[sorted_index]
1504
-
1505
- return sorted_index, slices
1506
- else:
1507
- return None, None
1508
-
1509
- def get_axis_from_index_spread(self, index: int, spread_order: int) -> Axis:
1510
- for axis in self.axes:
1511
- if axis.index == index and axis.spread_order == spread_order:
1512
- return axis
1513
-
1514
- def _get_dimension_str(self):
1515
- try:
1516
- string = "("
1517
- for nav_index in self.nav_indexes:
1518
- string += str(self._data_shape[nav_index]) + ", "
1519
- break
1520
- string = string.rstrip(", ")
1521
- string += "|"
1522
- for sig_index in self.sig_indexes:
1523
- string += str(self._data_shape[sig_index]) + ", "
1524
- string = string.rstrip(", ")
1525
- string += ")"
1526
- except Exception as e:
1527
- string = f'({self._data_shape})'
1528
- finally:
1529
- return string
1530
-
1531
-
1532
- class DataWithAxes(DataBase):
1533
- """Data object with Axis objects corresponding to underlying data nd-arrays
1534
-
1535
- Parameters
1536
- ----------
1537
- axes: list of Axis
1538
- the list of Axis object for proper plotting, calibration ...
1539
- nav_indexes: tuple of int
1540
- highlight which Axis in axes is Signal or Navigation axis depending on the content:
1541
- For instance, nav_indexes = (2,), means that the axis with index 2 in a at least 3D ndarray data is the first
1542
- navigation axis
1543
- For instance, nav_indexes = (3,2), means that the axis with index 3 in a at least 4D ndarray data is the first
1544
- navigation axis while the axis with index 2 is the second navigation Axis. Axes with index 0 and 1 are signal
1545
- axes of 2D ndarray data
1546
- errors: list of ndarray.
1547
- The list should match the length of the data attribute while the ndarrays
1548
- should match the data ndarray
1549
- """
1550
-
1551
- def __init__(self, *args, axes: List[Axis] = [],
1552
- nav_indexes: Tuple[int] = (),
1553
- errors: Iterable[np.ndarray] = None,
1554
- **kwargs):
1555
-
1556
- if 'nav_axes' in kwargs:
1557
- deprecation_msg('nav_axes parameter should not be used anymore, use nav_indexes')
1558
- nav_indexes = kwargs.pop('nav_axes')
1559
-
1560
- x_axis = kwargs.pop('x_axis') if 'x_axis' in kwargs else None
1561
- y_axis = kwargs.pop('y_axis') if 'y_axis' in kwargs else None
1562
-
1563
- nav_x_axis = kwargs.pop('nav_x_axis') if 'nav_x_axis' in kwargs else None
1564
- nav_y_axis = kwargs.pop('nav_y_axis') if 'nav_y_axis' in kwargs else None
1565
-
1566
- super().__init__(*args, **kwargs)
1567
-
1568
- self._axes = axes
1569
-
1570
- other_kwargs = dict(x_axis=x_axis, y_axis=y_axis, nav_x_axis=nav_x_axis, nav_y_axis=nav_y_axis)
1571
-
1572
- self.set_axes_manager(self.shape, axes=axes, nav_indexes=nav_indexes, **other_kwargs)
1573
-
1574
- self.inav: Iterable[DataWithAxes] = SpecialSlicersData(self, True)
1575
- self.isig: Iterable[DataWithAxes] = SpecialSlicersData(self, False)
1576
-
1577
- self.get_dim_from_data_axes() # in DataBase, dim is processed from the shape of data, but if axes are provided
1578
- #then use get_dim_from axes
1579
- self._check_errors(errors)
1580
-
1581
- def _check_errors(self, errors: Iterable[np.ndarray]):
1582
- """ Make sure the errors object is adapted to the len/shape of the dwa object
1583
-
1584
- new in 4.2.0
1585
- """
1586
- check = False
1587
- if errors is None:
1588
- self._errors = None
1589
- return
1590
- if isinstance(errors, (tuple, list)) and len(errors) == len(self):
1591
- if np.all([isinstance(error, np.ndarray) for error in errors]):
1592
- if np.all([error_array.shape == self.shape for error_array in errors]):
1593
- check = True
1594
- else:
1595
- logger.warning(f'All error objects should have the same shape as the data'
1596
- f'objects')
1597
- else:
1598
- logger.warning(f'All error objects should be np.ndarray')
1599
-
1600
- if not check:
1601
- logger.warning('the errors field is incompatible with the structure of the data')
1602
- self._errors = None
1603
- else:
1604
- self._errors = errors
1605
-
1606
- @property
1607
- def errors(self):
1608
- """ Get/Set the errors bar values as a list of np.ndarray
1609
-
1610
- new in 4.2.0
1611
- """
1612
- return self._errors
1613
-
1614
- @errors.setter
1615
- def errors(self, errors: Iterable[np.ndarray]):
1616
- self._check_errors(errors)
1617
-
1618
- def get_error(self, index):
1619
- """ Get a particular error ndarray at the given index in the list
1620
-
1621
- new in 4.2.0
1622
- """
1623
- if self._errors is not None: #because to the initial check we know it is a list of ndarrays
1624
- return self._errors[index]
1625
- else:
1626
- return np.array([0]) # this could be added to any numpy array of any shape
1627
-
1628
- def errors_as_dwa(self):
1629
- """ Get a dwa from self replacing the data content with the error attribute (if not None)
1630
-
1631
- New in 4.2.0
1632
- """
1633
- if self.errors is not None:
1634
- dwa = self.deepcopy_with_new_data(self.errors)
1635
- dwa.name = f'{self.name}_errors'
1636
- dwa.errors = None
1637
- return dwa
1638
- else:
1639
- raise ValueError(f'Cannot create a dwa from a None, should be a list of ndarray')
1640
-
1641
- def plot(self, plotter_backend: str = config('plotting', 'backend'), *args, viewer=None,
1642
- **kwargs):
1643
- """ Call a plotter factory and its plot method over the actual data"""
1644
- return plotter_factory.get(plotter_backend).plot(self, *args, viewer=viewer, **kwargs)
1645
-
1646
- def set_axes_manager(self, data_shape, axes, nav_indexes, **kwargs):
1647
- if self.distribution.name == 'uniform' or len(nav_indexes) == 0:
1648
- self._distribution = DataDistribution['uniform']
1649
- self.axes_manager = AxesManagerUniform(data_shape=data_shape, axes=axes,
1650
- nav_indexes=nav_indexes,
1651
- **kwargs)
1652
- elif self.distribution.name == 'spread':
1653
- self.axes_manager = AxesManagerSpread(data_shape=data_shape, axes=axes,
1654
- nav_indexes=nav_indexes,
1655
- **kwargs)
1656
- else:
1657
- raise ValueError(f'Such a data distribution ({data.distribution}) has no AxesManager')
1658
-
1659
- def __eq__(self, other):
1660
- is_equal = super().__eq__(other)
1661
- if isinstance(other, DataWithAxes):
1662
- for ind in list(self.nav_indexes) + list(self.sig_indexes):
1663
- axes_self = self.get_axis_from_index(ind)
1664
- axes_other = other.get_axis_from_index(ind)
1665
- if len(axes_other) != len(axes_self):
1666
- return False
1667
- for ind_ax in range(len(axes_self)):
1668
- if axes_self[ind_ax] != axes_other[ind_ax]:
1669
- return False
1670
- if self.errors is None:
1671
- is_equal = is_equal and other.errors is None
1672
- else:
1673
- for ind_error in range(len(self.errors)):
1674
- if not np.allclose(self.errors[ind_error], other.errors[ind_error]):
1675
- return False
1676
- return is_equal
1677
-
1678
- def __repr__(self):
1679
- return f'<{self.__class__.__name__}: {self.name} <len:{self.length}> {self._am}>'
1680
-
1681
- def sort_data(self, axis_index: int = 0, spread_index=0, inplace=False) -> DataWithAxes:
1682
- """ Sort data along a given axis, default is 0
1683
-
1684
- Parameters
1685
- ----------
1686
- axis_index: int
1687
- The index along which one should sort the data
1688
- spread_index: int
1689
- for spread data only, specifies which spread axis to use
1690
- inplace: bool
1691
- modify in place or not the data (and its axes)
1692
-
1693
- Returns
1694
- -------
1695
- DataWithAxes
1696
- """
1697
- if inplace:
1698
- data = self
1699
- else:
1700
- data = self.deepcopy()
1701
- sorted_index, slices = data._am.get_sorted_index(axis_index, spread_index)
1702
- if sorted_index is not None:
1703
- for ind in range(len(data)):
1704
- data.data[ind] = data.data[ind][slices]
1705
- return data
1706
-
1707
- def transpose(self):
1708
- """replace the data by their transposed version
1709
-
1710
- Valid only for 2D data
1711
- """
1712
- if self.dim == 'Data2D':
1713
- self.data[:] = [data.T for data in self.data]
1714
- for axis in self.axes:
1715
- axis.index = 0 if axis.index == 1 else 1
1716
-
1717
- def crop_at_along(self, coordinates_tuple: Tuple):
1718
- slices = []
1719
- for coordinates in coordinates_tuple:
1720
- axis = self.get_axis_from_index(0)[0]
1721
- indexes = axis.find_indexes(coordinates)
1722
- slices.append(slice(indexes))
1723
-
1724
- return self._slicer(slices, False)
1725
-
1726
- def mean(self, axis: int = 0) -> DataWithAxes:
1727
- """Process the mean of the data on the specified axis and returns the new data
1728
-
1729
- Parameters
1730
- ----------
1731
- axis: int
1732
-
1733
- Returns
1734
- -------
1735
- DataWithAxes
1736
- """
1737
- dat_mean = []
1738
- for dat in self.data:
1739
- mean = np.mean(dat, axis=axis)
1740
- if isinstance(mean, numbers.Number):
1741
- mean = np.array([mean])
1742
- dat_mean.append(mean)
1743
- return self.deepcopy_with_new_data(dat_mean, remove_axes_index=axis)
1744
-
1745
- def sum(self, axis: int = 0) -> DataWithAxes:
1746
- """Process the sum of the data on the specified axis and returns the new data
1747
-
1748
- Parameters
1749
- ----------
1750
- axis: int
1751
-
1752
- Returns
1753
- -------
1754
- DataWithAxes
1755
- """
1756
- dat_sum = []
1757
- for dat in self.data:
1758
- dat_sum.append(np.sum(dat, axis=axis))
1759
- return self.deepcopy_with_new_data(dat_sum, remove_axes_index=axis)
1760
-
1761
- def interp(self, new_axis_data: Union[Axis, np.ndarray], **kwargs) -> DataWithAxes:
1762
- """Performs linear interpolation for 1D data only.
1763
-
1764
- For more complex ones, see :py:meth:`scipy.interpolate`
1765
-
1766
- Parameters
1767
- ----------
1768
- new_axis_data: Union[Axis, np.ndarray]
1769
- The coordinates over which to do the interpolation
1770
- kwargs: dict
1771
- extra named parameters to be passed to the :py:meth:`~numpy.interp` method
1772
-
1773
- Returns
1774
- -------
1775
- DataWithAxes
1776
-
1777
- See Also
1778
- --------
1779
- :py:meth:`~numpy.interp`
1780
- :py:meth:`~scipy.interpolate`
1781
- """
1782
- if self.dim != DataDim['Data1D']:
1783
- raise ValueError('For basic interpolation, only 1D data are supported')
1784
-
1785
- data_interpolated = []
1786
- axis_obj = self.get_axis_from_index(0)[0]
1787
- if isinstance(new_axis_data, np.ndarray):
1788
- new_axis_data = Axis(axis_obj.label, axis_obj.units, data=new_axis_data)
1789
-
1790
- for dat in self.data:
1791
- data_interpolated.append(np.interp(new_axis_data.get_data(), axis_obj.get_data(), dat,
1792
- **kwargs))
1793
- new_data = DataCalculated(f'{self.name}_interp', data=data_interpolated,
1794
- axes=[new_axis_data],
1795
- labels=self.labels)
1796
- return new_data
1797
-
1798
- def ft(self, axis: int = 0) -> DataWithAxes:
1799
- """Process the Fourier Transform of the data on the specified axis and returns the new data
1800
-
1801
- Parameters
1802
- ----------
1803
- axis: int
1804
-
1805
- Returns
1806
- -------
1807
- DataWithAxes
1808
-
1809
- See Also
1810
- --------
1811
- :py:meth:`~pymodaq.utils.math_utils.ft`, :py:meth:`~numpy.fft.fft`
1812
- """
1813
- dat_ft = []
1814
- axis_obj = self.get_axis_from_index(axis)[0]
1815
- omega_grid, time_grid = mutils.ftAxis_time(len(axis_obj),
1816
- np.abs(axis_obj.max() - axis_obj.min()))
1817
- for dat in self.data:
1818
- dat_ft.append(mutils.ft(dat, dim=axis))
1819
- new_data = self.deepcopy_with_new_data(dat_ft)
1820
- axis_obj = new_data.get_axis_from_index(axis)[0]
1821
- axis_obj.data = omega_grid
1822
- axis_obj.label = f'ft({axis_obj.label})'
1823
- axis_obj.units = f'2pi/{axis_obj.units}'
1824
- return new_data
1825
-
1826
- def ift(self, axis: int = 0) -> DataWithAxes:
1827
- """Process the inverse Fourier Transform of the data on the specified axis and returns the
1828
- new data
1829
-
1830
- Parameters
1831
- ----------
1832
- axis: int
1833
-
1834
- Returns
1835
- -------
1836
- DataWithAxes
1837
-
1838
- See Also
1839
- --------
1840
- :py:meth:`~pymodaq.utils.math_utils.ift`, :py:meth:`~numpy.fft.ifft`
1841
- """
1842
- dat_ift = []
1843
- axis_obj = self.get_axis_from_index(axis)[0]
1844
- omega_grid, time_grid = mutils.ftAxis_time(len(axis_obj),
1845
- np.abs(axis_obj.max() - axis_obj.min()))
1846
- for dat in self.data:
1847
- dat_ift.append(mutils.ift(dat, dim=axis))
1848
- new_data = self.deepcopy_with_new_data(dat_ift)
1849
- axis_obj.data = omega_grid
1850
- axis_obj.label = f'ift({axis_obj.label})'
1851
- axis_obj.units = f'2pi/{axis_obj.units}'
1852
- return new_data
1853
-
1854
- def fit(self, function: Callable, initial_guess: IterableType, data_index: int = None,
1855
- axis_index: int = 0, **kwargs) -> DataCalculated:
1856
- """ Apply 1D curve fitting using the scipy optimization package
1857
-
1858
- Parameters
1859
- ----------
1860
- function: Callable
1861
- a callable to be used for the fit
1862
- initial_guess: Iterable
1863
- The initial parameters for the fit
1864
- data_index: int
1865
- The index of the data over which to do the fit, if None apply the fit to all
1866
- axis_index: int
1867
- the axis index to use for the fit (if multiple) but there should be only one
1868
- kwargs: dict
1869
- extra named parameters applied to the curve_fit scipy method
1870
-
1871
- Returns
1872
- -------
1873
- DataCalculated containing the evaluation of the fit on the specified axis
1874
-
1875
- See Also
1876
- --------
1877
- :py:meth:`~scipy.optimize.curve_fit`
1878
- """
1879
- import scipy.optimize as opt
1880
- if self.dim != DataDim['Data1D']:
1881
- raise ValueError('Integrated fitting only works for 1D data')
1882
- axis = self.get_axis_from_index(axis_index)[0].copy()
1883
- axis_array = axis.get_data()
1884
- if data_index is None:
1885
- datalist_to_fit = self.data
1886
- labels = [f'{label}_fit' for label in self.labels]
1887
- else:
1888
- datalist_to_fit = [self.data[data_index]]
1889
- labels = [f'{self.labels[data_index]}_fit']
1890
-
1891
- datalist_fitted = []
1892
- fit_coeffs = []
1893
- for data_array in datalist_to_fit:
1894
- popt, pcov = opt.curve_fit(function, axis_array, data_array, p0=initial_guess, **kwargs)
1895
- datalist_fitted.append(function(axis_array, *popt))
1896
- fit_coeffs.append(popt)
1897
-
1898
- return DataCalculated(f'{self.name}_fit', data=datalist_fitted,
1899
- labels=labels,
1900
- axes=[axis], fit_coeffs=fit_coeffs)
1901
-
1902
- def find_peaks(self, height=None, threshold=None, **kwargs) -> DataToExport:
1903
- """ Apply the scipy find_peaks method to 1D data
1904
-
1905
- Parameters
1906
- ----------
1907
- height: number or ndarray or sequence, optional
1908
- threshold: number or ndarray or sequence, optional
1909
- kwargs: dict
1910
- extra named parameters applied to the find_peaks scipy method
1911
-
1912
- Returns
1913
- -------
1914
- DataCalculated
1915
-
1916
- See Also
1917
- --------
1918
- :py:meth:`~scipy.optimize.find_peaks`
1919
- """
1920
- if self.dim != DataDim['Data1D']:
1921
- raise ValueError('Finding peaks only works for 1D data')
1922
- from scipy.signal import find_peaks
1923
- peaks_indices = []
1924
- dte = DataToExport('peaks')
1925
- for ind in range(len(self)):
1926
- peaks, properties = find_peaks(self[ind], height, threshold, **kwargs)
1927
- peaks_indices.append(peaks)
1928
-
1929
- dte.append(DataCalculated(f'{self.labels[ind]}',
1930
- data=[self[ind][peaks_indices[-1]],
1931
- peaks_indices[-1]
1932
- ],
1933
- labels=['peak value', 'peak indexes'],
1934
- axes=[Axis('peak position', self.axes[0].units,
1935
- data=self.axes[0].get_data_at(peaks_indices[-1]))])
1936
- )
1937
- return dte
1938
-
1939
- def get_dim_from_data_axes(self) -> DataDim:
1940
- """Get the dimensionality DataDim from data taking into account nav indexes
1941
- """
1942
- if len(self.axes) != len(self.shape):
1943
- self._dim = self.get_dim_from_data(self.data)
1944
- else:
1945
- if len(self.nav_indexes) > 0:
1946
- self._dim = DataDim['DataND']
1947
- else:
1948
- if len(self.axes) == 0:
1949
- self._dim = DataDim['Data0D']
1950
- elif len(self.axes) == 1:
1951
- self._dim = DataDim['Data1D']
1952
- elif len(self.axes) == 2:
1953
- self._dim = DataDim['Data2D']
1954
- if len(self.nav_indexes) > 0:
1955
- self._dim = DataDim['DataND']
1956
- return self._dim
1957
-
1958
- @property
1959
- def n_axes(self):
1960
- """Get the number of axes (even if not specified)"""
1961
- return len(self.axes)
1962
-
1963
- @property
1964
- def axes(self):
1965
- """convenience property to fetch attribute from axis_manager"""
1966
- return self._am.axes
1967
-
1968
- @axes.setter
1969
- def axes(self, axes: List[Axis]):
1970
- """convenience property to set attribute from axis_manager"""
1971
- self.set_axes_manager(self.shape, axes=axes, nav_indexes=self.nav_indexes)
1972
-
1973
- def axes_limits(self, axes_indexes: List[int] = None) -> List[Tuple[float, float]]:
1974
- """Get the limits of specified axes (all if axes_indexes is None)"""
1975
- if axes_indexes is None:
1976
- return [(axis.min(), axis.max()) for axis in self.axes]
1977
- else:
1978
- return [(axis.min(), axis.max()) for axis in self.axes if axis.index in axes_indexes]
1979
-
1980
- @property
1981
- def sig_indexes(self):
1982
- """convenience property to fetch attribute from axis_manager"""
1983
- return self._am.sig_indexes
1984
-
1985
- @property
1986
- def nav_indexes(self):
1987
- """convenience property to fetch attribute from axis_manager"""
1988
- return self._am.nav_indexes
1989
-
1990
- @nav_indexes.setter
1991
- def nav_indexes(self, indexes: List[int]):
1992
- """create new axis manager with new navigation indexes"""
1993
- self.set_axes_manager(self.shape, axes=self.axes, nav_indexes=indexes)
1994
- self.get_dim_from_data_axes()
1995
-
1996
- def get_nav_axes(self) -> List[Axis]:
1997
- return self._am.get_nav_axes()
1998
-
1999
- def get_sig_index(self) -> List[Axis]:
2000
- return self._am.get_signal_axes()
2001
-
2002
- def get_nav_axes_with_data(self) -> List[Axis]:
2003
- """Get the data's navigation axes making sure there is data in the data field"""
2004
- axes = self.get_nav_axes()
2005
- for axis in axes:
2006
- if axis.get_data() is None:
2007
- axis.create_linear_data(self.shape[axis.index])
2008
- return axes
2009
-
2010
- def get_axis_indexes(self) -> List[int]:
2011
- """Get all present different axis indexes"""
2012
- return sorted(list(set([axis.index for axis in self.axes])))
2013
-
2014
- def get_axis_from_index(self, index, create=False):
2015
- return self._am.get_axis_from_index(index, create)
2016
-
2017
- def get_axis_from_index_spread(self, index: int, spread: int):
2018
- return self._am.get_axis_from_index_spread(index, spread)
2019
-
2020
- def get_axis_from_label(self, label: str) -> Axis:
2021
- """Get the axis referred by a given label
2022
-
2023
- Parameters
2024
- ----------
2025
- label: str
2026
- The label of the axis
2027
-
2028
- Returns
2029
- -------
2030
- Axis or None: return the axis instance if it has the right label else None
2031
- """
2032
- for axis in self.axes:
2033
- if axis.label == label:
2034
- return axis
2035
-
2036
- def create_missing_axes(self):
2037
- """Check if given the data shape, some axes are missing to properly define the data
2038
- (especially for plotting)"""
2039
- axes = self.axes[:]
2040
- for index in self.nav_indexes + self.sig_indexes:
2041
- if (len(self.get_axis_from_index(index)) != 0 and
2042
- self.get_axis_from_index(index)[0] is None):
2043
- axes_tmp = self.get_axis_from_index(index, create=True)
2044
- for ax in axes_tmp:
2045
- if ax.size > 1:
2046
- axes.append(ax)
2047
- self.axes = axes
2048
-
2049
- def _compute_slices(self, slices, is_navigation=True):
2050
- """Compute the total slice to apply to the data
2051
-
2052
- Filling in Ellipsis when no slicing should be done
2053
- """
2054
- if isinstance(slices, numbers.Number) or isinstance(slices, slice):
2055
- slices = [slices]
2056
- if is_navigation:
2057
- indexes = self._am.nav_indexes
2058
- else:
2059
- indexes = self._am.sig_indexes
2060
- total_slices = []
2061
- slices = list(slices)
2062
- for ind in range(len(self.shape)):
2063
- if ind in indexes:
2064
- total_slices.append(slices.pop(0))
2065
- elif len(total_slices) == 0:
2066
- total_slices.append(Ellipsis)
2067
- elif not (Ellipsis in total_slices and total_slices[-1] is Ellipsis):
2068
- total_slices.append(slice(None))
2069
- total_slices = tuple(total_slices)
2070
- return total_slices
2071
-
2072
- def check_squeeze(self, total_slices: List[slice], is_navigation: bool):
2073
-
2074
- do_squeeze = True
2075
- if 1 in self.data[0][total_slices].shape:
2076
- if not is_navigation and self.data[0][total_slices].shape.index(1) in self.nav_indexes:
2077
- do_squeeze = False
2078
- elif is_navigation and self.data[0][total_slices].shape.index(1) in self.sig_indexes:
2079
- do_squeeze = False
2080
- return do_squeeze
2081
-
2082
- def _slicer(self, slices, is_navigation=True):
2083
- """Apply a given slice to the data either navigation or signal dimension
2084
-
2085
- Parameters
2086
- ----------
2087
- slices: tuple of slice or int
2088
- the slices to apply to the data
2089
- is_navigation: bool
2090
- if True apply the slices to the navigation dimension else to the signal ones
2091
-
2092
- Returns
2093
- -------
2094
- DataWithAxes
2095
- Object of the same type as the initial data, derived from DataWithAxes. But with lower
2096
- data size due to the slicing and with eventually less axes.
2097
- """
2098
-
2099
- if isinstance(slices, numbers.Number) or isinstance(slices, slice):
2100
- slices = [slices]
2101
- total_slices = self._compute_slices(slices, is_navigation)
2102
-
2103
- do_squeeze = self.check_squeeze(total_slices, is_navigation)
2104
- new_arrays_data = [squeeze(dat[total_slices], do_squeeze) for dat in self.data]
2105
- tmp_axes = self._am.get_signal_axes() if is_navigation else self._am.get_nav_axes()
2106
- axes_to_append = [copy.deepcopy(axis) for axis in tmp_axes]
2107
-
2108
- # axes_to_append are the axes to append to the new produced data
2109
- # (basically the ones to keep)
2110
-
2111
- indexes_to_get = self.nav_indexes if is_navigation else self.sig_indexes
2112
- # indexes_to_get are the indexes of the axes where the slice should be applied
2113
-
2114
- _indexes = list(self.nav_indexes)
2115
- _indexes.extend(self.sig_indexes)
2116
- lower_indexes = dict(zip(_indexes, [0 for _ in range(len(_indexes))]))
2117
- # lower_indexes will store for each *axis index* how much the index should be reduced
2118
- # because one axis has
2119
- # been removed
2120
-
2121
- axes = []
2122
- nav_indexes = [] if is_navigation else list(self._am.nav_indexes)
2123
- for ind_slice, _slice in enumerate(slices):
2124
- if ind_slice < len(indexes_to_get):
2125
- ax = self._am.get_axis_from_index(indexes_to_get[ind_slice])
2126
- if len(ax) != 0 and ax[0] is not None:
2127
- for ind in range(len(ax)):
2128
- ax[ind] = ax[ind].iaxis[_slice]
2129
-
2130
- if not(ax[0] is None or ax[0].size <= 1): # means the slice kept part of the axis
2131
- if is_navigation:
2132
- nav_indexes.append(self._am.nav_indexes[ind_slice])
2133
- axes.extend(ax)
2134
- else:
2135
- for axis in axes_to_append: # means we removed one of the axes (and data dim),
2136
- # hence axis index above current index should be lowered by 1
2137
- if axis.index > indexes_to_get[ind_slice]:
2138
- lower_indexes[axis.index] += 1
2139
- for index in indexes_to_get[ind_slice+1:]:
2140
- lower_indexes[index] += 1
2141
-
2142
- axes.extend(axes_to_append)
2143
- for axis in axes:
2144
- axis.index -= lower_indexes[axis.index]
2145
- for ind in range(len(nav_indexes)):
2146
- nav_indexes[ind] -= lower_indexes[nav_indexes[ind]]
2147
-
2148
- if len(nav_indexes) != 0:
2149
- distribution = self.distribution
2150
- else:
2151
- distribution = DataDistribution['uniform']
2152
-
2153
- data = DataWithAxes(self.name, data=new_arrays_data, nav_indexes=tuple(nav_indexes),
2154
- axes=axes,
2155
- source='calculated', origin=self.origin,
2156
- labels=self.labels[:],
2157
- distribution=distribution)
2158
- return data
2159
-
2160
- def deepcopy_with_new_data(self, data: List[np.ndarray] = None,
2161
- remove_axes_index: Union[int, List[int]] = None,
2162
- source: DataSource = 'calculated',
2163
- keep_dim=False) -> DataWithAxes:
2164
- """deepcopy without copying the initial data (saving memory)
2165
-
2166
- The new data, may have some axes stripped as specified in remove_axes_index
2167
-
2168
- Parameters
2169
- ----------
2170
- data: list of numpy ndarray
2171
- The new data
2172
- remove_axes_index: tuple of int
2173
- indexes of the axis to be removed
2174
- source: DataSource
2175
- keep_dim: bool
2176
- if False (the default) will calculate the new dim based on the data shape
2177
- else keep the same (be aware it could lead to issues)
2178
-
2179
- Returns
2180
- -------
2181
- DataWithAxes
2182
- """
2183
- try:
2184
- old_data = self.data
2185
- self._data = None
2186
- new_data = self.deepcopy()
2187
- new_data._data = data
2188
- new_data.get_dim_from_data(data)
2189
-
2190
- if source is not None:
2191
- source = enum_checker(DataSource, source)
2192
- new_data._source = source
2193
-
2194
- if remove_axes_index is not None:
2195
- if not isinstance(remove_axes_index, Iterable):
2196
- remove_axes_index = [remove_axes_index]
2197
-
2198
- lower_indexes = dict(zip(new_data.get_axis_indexes(),
2199
- [0 for _ in range(len(new_data.get_axis_indexes()))]))
2200
- # lower_indexes will store for each *axis index* how much the index should be reduced because one axis has
2201
- # been removed
2202
-
2203
- nav_indexes = list(new_data.nav_indexes)
2204
- sig_indexes = list(new_data.sig_indexes)
2205
- for index in remove_axes_index:
2206
- for axis in new_data.get_axis_from_index(index):
2207
- if axis is not None:
2208
- new_data.axes.remove(axis)
2209
-
2210
- if index in new_data.nav_indexes:
2211
- nav_indexes.pop(nav_indexes.index(index))
2212
- if index in new_data.sig_indexes:
2213
- sig_indexes.pop(sig_indexes.index(index))
2214
-
2215
- # for ind, nav_ind in enumerate(nav_indexes):
2216
- # if nav_ind > index and nav_ind not in remove_axes_index:
2217
- # nav_indexes[ind] -= 1
2218
-
2219
- # for ind, sig_ind in enumerate(sig_indexes):
2220
- # if sig_ind > index:
2221
- # sig_indexes[ind] -= 1
2222
- for axis in new_data.axes:
2223
- if axis.index > index and axis.index not in remove_axes_index:
2224
- lower_indexes[axis.index] += 1
2225
-
2226
- for axis in new_data.axes:
2227
- axis.index -= lower_indexes[axis.index]
2228
- for ind in range(len(nav_indexes)):
2229
- nav_indexes[ind] -= lower_indexes[nav_indexes[ind]]
2230
-
2231
- new_data.nav_indexes = tuple(nav_indexes)
2232
- # new_data._am.sig_indexes = tuple(sig_indexes)
2233
-
2234
- new_data._shape = data[0].shape
2235
- if not keep_dim:
2236
- new_data._dim = self._get_dim_from_data(data)
2237
- return new_data
2238
-
2239
- except Exception as e:
2240
- pass
2241
- finally:
2242
- self._data = old_data
2243
-
2244
-
2245
-
2246
- @property
2247
- def _am(self) -> AxesManagerBase:
2248
- return self.axes_manager
2249
-
2250
- def get_data_dimension(self) -> str:
2251
- return str(self._am)
1
+ import numpy as np
2
+ import numbers
3
+ import warnings
2252
4
 
2253
- def get_data_as_dwa(self, index: int = 0) -> DataWithAxes:
2254
- """ Get the underlying data selected from the list at index, returned as a DataWithAxes"""
2255
- return self.deepcopy_with_new_data([self[index]])
5
+ from typing import List
2256
6
 
7
+ from pymodaq_utils.warnings import deprecation_msg, user_warning
2257
8
 
2258
- class DataRaw(DataWithAxes):
2259
- """Specialized DataWithAxes set with source as 'raw'. To be used for raw data"""
2260
- def __init__(self, *args, **kwargs):
2261
- if 'source' in kwargs:
2262
- kwargs.pop('source')
2263
- super().__init__(*args, source=DataSource['raw'], **kwargs)
9
+ from pymodaq_data.data import (DataRaw, DataWithAxes, DataToExport, DataCalculated, DataDim,
10
+ DataSource, DataBase, Axis, NavAxis)
2264
11
 
2265
12
 
2266
13
  class DataActuator(DataRaw):
@@ -2341,489 +88,6 @@ class DataFromPlugins(DataRaw):
2341
88
  super().__init__(*args, **kwargs)
2342
89
 
2343
90
 
2344
- class DataCalculated(DataWithAxes):
2345
- """Specialized DataWithAxes set with source as 'calculated'. To be used for processed/calculated data"""
2346
- def __init__(self, *args, axes=[], **kwargs):
2347
- if 'source' in kwargs:
2348
- kwargs.pop('source')
2349
- super().__init__(*args, source=DataSource['calculated'], axes=axes, **kwargs)
2350
-
2351
-
2352
- class DataFromRoi(DataCalculated):
2353
- """Specialized DataWithAxes set with source as 'calculated'.To be used for processed data from region of interest"""
2354
- def __init__(self, *args, axes=[], **kwargs):
2355
- super().__init__(*args, axes=axes, **kwargs)
2356
-
2357
-
2358
- class DataToExport(DataLowLevel):
2359
- """Object to store all raw and calculated DataWithAxes data for later exporting, saving, sending signal...
2360
-
2361
- Includes methods to retrieve data from dim, source...
2362
- Stored data have a unique identifier their name. If some data is appended with an existing name, it will replace
2363
- the existing data. So if you want to append data that has the same name
2364
-
2365
- Parameters
2366
- ----------
2367
- name: str
2368
- The identifier of the exporting object
2369
- data: list of DataWithAxes
2370
- All the raw and calculated data to be exported
2371
-
2372
- Attributes
2373
- ----------
2374
- name
2375
- timestamp
2376
- data
2377
- """
2378
-
2379
- def __init__(self, name: str, data: List[DataWithAxes] = [], **kwargs):
2380
- """
2381
-
2382
- Parameters
2383
- ----------
2384
- name
2385
- data
2386
- """
2387
- super().__init__(name)
2388
- if not isinstance(data, list):
2389
- raise TypeError('Data stored in a DataToExport object should be as a list of objects'
2390
- ' inherited from DataWithAxis')
2391
- self._data = []
2392
-
2393
- self.data = data
2394
- for key in kwargs:
2395
- setattr(self, key, kwargs[key])
2396
-
2397
- def plot(self, plotter_backend: str = config('plotting', 'backend'), *args, **kwargs):
2398
- """ Call a plotter factory and its plot method over the actual data"""
2399
- return plotter_factory.get(plotter_backend).plot(self, *args, **kwargs)
2400
-
2401
- def affect_name_to_origin_if_none(self):
2402
- """Affect self.name to all DataWithAxes children's attribute origin if this origin is not defined"""
2403
- for dat in self.data:
2404
- if dat.origin is None or dat.origin == '':
2405
- dat.origin = self.name
2406
-
2407
- def __sub__(self, other: object):
2408
- if isinstance(other, DataToExport) and len(other) == len(self):
2409
- new_data = copy.deepcopy(self)
2410
- for ind_dfp in range(len(self)):
2411
- new_data[ind_dfp] = self[ind_dfp] - other[ind_dfp]
2412
- return new_data
2413
- else:
2414
- raise TypeError(f'Could not substract a {other.__class__.__name__} or a {self.__class__.__name__} '
2415
- f'of a different length')
2416
-
2417
- def __add__(self, other: object):
2418
- if isinstance(other, DataToExport) and len(other) == len(self):
2419
- new_data = copy.deepcopy(self)
2420
- for ind_dfp in range(len(self)):
2421
- new_data[ind_dfp] = self[ind_dfp] + other[ind_dfp]
2422
- return new_data
2423
- else:
2424
- raise TypeError(f'Could not add a {other.__class__.__name__} or a {self.__class__.__name__} '
2425
- f'of a different length')
2426
-
2427
- def __mul__(self, other: object):
2428
- if isinstance(other, numbers.Number):
2429
- new_data = copy.deepcopy(self)
2430
- for ind_dfp in range(len(self)):
2431
- new_data[ind_dfp] = self[ind_dfp] * other
2432
- return new_data
2433
- else:
2434
- raise TypeError(f'Could not multiply a {other.__class__.__name__} with a {self.__class__.__name__} '
2435
- f'of a different length')
2436
-
2437
- def __truediv__(self, other: object):
2438
- if isinstance(other, numbers.Number):
2439
- return self * (1 / other)
2440
- else:
2441
- raise TypeError(f'Could not divide a {other.__class__.__name__} with a {self.__class__.__name__} '
2442
- f'of a different length')
2443
-
2444
- def average(self, other: DataToExport, weight: int) -> DataToExport:
2445
- """ Compute the weighted average between self and other DataToExport and attributes it to self
2446
-
2447
- Parameters
2448
- ----------
2449
- other: DataToExport
2450
- weight: int
2451
- The weight the 'other_data' holds with respect to self
2452
-
2453
- """
2454
- if isinstance(other, DataToExport) and len(other) == len(self):
2455
- new_data = copy.copy(self)
2456
- for ind_dfp in range(len(self)):
2457
- new_data[ind_dfp] = self[ind_dfp].average(other[ind_dfp], weight)
2458
- return new_data
2459
- else:
2460
- raise TypeError(f'Could not average a {other.__class__.__name__} with a {self.__class__.__name__} '
2461
- f'of a different length')
2462
-
2463
- def merge_as_dwa(self, dim: Union[str, DataDim], name: str = None) -> DataRaw:
2464
- """ attempt to merge filtered dwa into one
2465
-
2466
- Only possible if all filtered dwa and underlying data have same shape
2467
-
2468
- Parameters
2469
- ----------
2470
- dim: DataDim or str
2471
- will only try to merge dwa having this dimensionality
2472
- name: str
2473
- The new name of the returned dwa
2474
- """
2475
- dim = enum_checker(DataDim, dim)
2476
-
2477
- filtered_data = self.get_data_from_dim(dim)
2478
- if len(filtered_data) != 0:
2479
- dwa = filtered_data[0].deepcopy()
2480
- for dwa_tmp in filtered_data[1:]:
2481
- if dwa_tmp.shape == dwa.shape and dwa_tmp.distribution == dwa.distribution:
2482
- dwa.append(dwa_tmp)
2483
- if name is None:
2484
- name = self.name
2485
- dwa.name = name
2486
- return dwa
2487
-
2488
- def __repr__(self):
2489
- repr = f'{self.__class__.__name__}: {self.name} <len:{len(self)}>\n'
2490
- for dwa in self:
2491
- repr += f' * {str(dwa)}\n'
2492
- return repr
2493
-
2494
- def __len__(self):
2495
- return len(self.data)
2496
-
2497
- def __iter__(self):
2498
- self._iter_index = 0
2499
- return self
2500
-
2501
- def __next__(self) -> DataWithAxes:
2502
- if self._iter_index < len(self):
2503
- self._iter_index += 1
2504
- return self.data[self._iter_index-1]
2505
- else:
2506
- raise StopIteration
2507
-
2508
- def __getitem__(self, item) -> Union[DataWithAxes, DataToExport]:
2509
- if isinstance(item, int) and 0 <= item < len(self):
2510
- return self.data[item]
2511
- elif isinstance(item, slice):
2512
- return DataToExport(self.name, data=[self[ind] for ind in list(range(len(self))[item])])
2513
- else:
2514
- raise IndexError(f'The index should be a positive integer lower than the data length')
2515
-
2516
- def __setitem__(self, key, value: DataWithAxes):
2517
- if isinstance(key, int) and 0 <= key < len(self) and isinstance(value, DataWithAxes):
2518
- self.data[key] = value
2519
- else:
2520
- raise IndexError(f'The index should be a positive integer lower than the data length')
2521
-
2522
- def get_names(self, dim: DataDim = None) -> List[str]:
2523
- """Get the names of the stored DataWithAxes, eventually filtered by dim
2524
-
2525
- Parameters
2526
- ----------
2527
- dim: DataDim or str
2528
-
2529
- Returns
2530
- -------
2531
- list of str: the names of the (filtered) DataWithAxes data
2532
- """
2533
- if dim is None:
2534
- return [data.name for data in self.data]
2535
- else:
2536
- return [data.name for data in self.get_data_from_dim(dim).data]
2537
-
2538
- def get_full_names(self, dim: DataDim = None):
2539
- """Get the ful names including the origin attribute into the returned value, eventually filtered by dim
2540
-
2541
- Parameters
2542
- ----------
2543
- dim: DataDim or str
2544
-
2545
- Returns
2546
- -------
2547
- list of str: the names of the (filtered) DataWithAxes data constructed as : origin/name
2548
-
2549
- Examples
2550
- --------
2551
- d0 = DataWithAxes(name='datafromdet0', origin='det0')
2552
- """
2553
- if dim is None:
2554
- return [data.get_full_name() for data in self.data]
2555
- else:
2556
- return [data.get_full_name() for data in self.get_data_from_dim(dim).data]
2557
-
2558
- def get_origins(self, dim: DataDim = None):
2559
- """Get the origins of the underlying data into the returned value, eventually filtered by dim
2560
-
2561
- Parameters
2562
- ----------
2563
- dim: DataDim or str
2564
-
2565
- Returns
2566
- -------
2567
- list of str: the origins of the (filtered) DataWithAxes data
2568
-
2569
- Examples
2570
- --------
2571
- d0 = DataWithAxes(name='datafromdet0', origin='det0')
2572
- """
2573
- if dim is None:
2574
- return list({dwa.origin for dwa in self.data})
2575
- else:
2576
- return list({dwa.origin for dwa in self.get_data_from_dim(dim).data})
2577
-
2578
-
2579
- def get_data_from_full_name(self, full_name: str, deepcopy=False) -> DataWithAxes:
2580
- """Get the DataWithAxes with matching full name"""
2581
- if deepcopy:
2582
- data = self.get_data_from_name_origin(full_name.split('/')[1], full_name.split('/')[0]).deepcopy()
2583
- else:
2584
- data = self.get_data_from_name_origin(full_name.split('/')[1], full_name.split('/')[0])
2585
- return data
2586
-
2587
- def get_data_from_full_names(self, full_names: List[str], deepcopy=False) -> DataToExport:
2588
- data = [self.get_data_from_full_name(full_name, deepcopy) for full_name in full_names]
2589
- return DataToExport(name=self.name, data=data)
2590
-
2591
- def get_dim_presents(self) -> List[str]:
2592
- dims = []
2593
- for dim in DataDim.names():
2594
- if len(self.get_data_from_dim(dim)) != 0:
2595
- dims.append(dim)
2596
-
2597
- return dims
2598
-
2599
- def get_data_from_source(self, source: DataSource, deepcopy=False) -> DataToExport:
2600
- """Get the data matching the given DataSource
2601
-
2602
- Returns
2603
- -------
2604
- DataToExport: filtered with data matching the dimensionality
2605
- """
2606
- source = enum_checker(DataSource, source)
2607
- return self.get_data_from_attribute('source', source, deepcopy=deepcopy)
2608
-
2609
- def get_data_from_missing_attribute(self, attribute: str, deepcopy=False) -> DataToExport:
2610
- """ Get the data matching a given attribute value
2611
-
2612
- Parameters
2613
- ----------
2614
- attribute: str
2615
- a string of a possible attribute
2616
- deepcopy: bool
2617
- if True the returned DataToExport will contain deepcopies of the DataWithAxes
2618
- Returns
2619
- -------
2620
- DataToExport: filtered with data missing the given attribute
2621
- """
2622
- if deepcopy:
2623
- return DataToExport(self.name, data=[dwa.deepcopy() for dwa in self if not hasattr(dwa, attribute)])
2624
- else:
2625
- return DataToExport(self.name, data=[dwa for dwa in self if not hasattr(dwa, attribute)])
2626
-
2627
- def get_data_from_attribute(self, attribute: str, attribute_value: Any, deepcopy=False) -> DataToExport:
2628
- """Get the data matching a given attribute value
2629
-
2630
- Returns
2631
- -------
2632
- DataToExport: filtered with data matching the attribute presence and value
2633
- """
2634
- selection = find_objects_in_list_from_attr_name_val(self.data, attribute, attribute_value,
2635
- return_first=False)
2636
- selection.sort(key=lambda elt: elt[0].name)
2637
- if deepcopy:
2638
- data = [sel[0].deepcopy() for sel in selection]
2639
- else:
2640
- data = [sel[0] for sel in selection]
2641
- return DataToExport(name=self.name, data=data)
2642
-
2643
- def get_data_from_dim(self, dim: DataDim, deepcopy=False) -> DataToExport:
2644
- """Get the data matching the given DataDim
2645
-
2646
- Returns
2647
- -------
2648
- DataToExport: filtered with data matching the dimensionality
2649
- """
2650
- dim = enum_checker(DataDim, dim)
2651
- return self.get_data_from_attribute('dim', dim, deepcopy=deepcopy)
2652
-
2653
- def get_data_from_dims(self, dims: List[DataDim], deepcopy=False) -> DataToExport:
2654
- """Get the data matching the given DataDim
2655
-
2656
- Returns
2657
- -------
2658
- DataToExport: filtered with data matching the dimensionality
2659
- """
2660
- data = DataToExport(name=self.name)
2661
- for dim in dims:
2662
- data.append(self.get_data_from_dim(dim, deepcopy=deepcopy))
2663
- return data
2664
-
2665
- def get_data_from_sig_axes(self, Naxes: int, deepcopy: bool = False) -> DataToExport:
2666
- """Get the data matching the given number of signal axes
2667
-
2668
- Parameters
2669
- ----------
2670
- Naxes: int
2671
- Number of signal axes in the DataWithAxes objects
2672
-
2673
- Returns
2674
- -------
2675
- DataToExport: filtered with data matching the number of signal axes
2676
- """
2677
- data = DataToExport(name=self.name)
2678
- for _data in self:
2679
- if len(_data.sig_indexes) == Naxes:
2680
- if deepcopy:
2681
- data.append(_data.deepcopy())
2682
- else:
2683
- data.append(_data)
2684
- return data
2685
-
2686
- def get_data_from_Naxes(self, Naxes: int, deepcopy: bool = False) -> DataToExport:
2687
- """Get the data matching the given number of axes
2688
-
2689
- Parameters
2690
- ----------
2691
- Naxes: int
2692
- Number of axes in the DataWithAxes objects
2693
-
2694
- Returns
2695
- -------
2696
- DataToExport: filtered with data matching the number of axes
2697
- """
2698
- data = DataToExport(name=self.name)
2699
- for _data in self:
2700
- if len(_data.shape) == Naxes:
2701
- if deepcopy:
2702
- data.append(_data.deepcopy())
2703
- else:
2704
- data.append(_data)
2705
- return data
2706
-
2707
- def get_data_with_naxes_lower_than(self, n_axes=2, deepcopy: bool = False) -> DataToExport:
2708
- """Get the data with n axes lower than the given number
2709
-
2710
- Parameters
2711
- ----------
2712
- Naxes: int
2713
- Number of axes in the DataWithAxes objects
2714
-
2715
- Returns
2716
- -------
2717
- DataToExport: filtered with data matching the number of axes
2718
- """
2719
- data = DataToExport(name=self.name)
2720
- for _data in self:
2721
- if _data.n_axes <= n_axes:
2722
- if deepcopy:
2723
- data.append(_data.deepcopy())
2724
- else:
2725
- data.append(_data)
2726
- return data
2727
-
2728
- def get_data_from_name(self, name: str) -> DataWithAxes:
2729
- """Get the data matching the given name"""
2730
- data, _ = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
2731
- return data
2732
-
2733
- def get_data_from_names(self, names: List[str]) -> DataToExport:
2734
- return DataToExport(self.name, data=[dwa for dwa in self if dwa.name in names])
2735
-
2736
- def get_data_from_name_origin(self, name: str, origin: str = '') -> DataWithAxes:
2737
- """Get the data matching the given name and the given origin"""
2738
- if origin == '':
2739
- data, _ = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
2740
- else:
2741
- selection = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=False)
2742
- selection = [sel[0] for sel in selection]
2743
- data, _ = find_objects_in_list_from_attr_name_val(selection, 'origin', origin)
2744
- return data
2745
-
2746
- def index(self, data: DataWithAxes):
2747
- return self.data.index(data)
2748
-
2749
- def index_from_name_origin(self, name: str, origin: str = '') -> List[DataWithAxes]:
2750
- """Get the index of a given DataWithAxes within the list of data"""
2751
- """Get the data matching the given name and the given origin"""
2752
- if origin == '':
2753
- _, index = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=True)
2754
- else:
2755
- selection = find_objects_in_list_from_attr_name_val(self.data, 'name', name, return_first=False)
2756
- data_selection = [sel[0] for sel in selection]
2757
- index_selection = [sel[1] for sel in selection]
2758
- _, index = find_objects_in_list_from_attr_name_val(data_selection, 'origin', origin)
2759
- index = index_selection[index]
2760
- return index
2761
-
2762
- def pop(self, index: int) -> DataWithAxes:
2763
- """return and remove the DataWithAxes referred by its index
2764
-
2765
- Parameters
2766
- ----------
2767
- index: int
2768
- index as returned by self.index_from_name_origin
2769
-
2770
- See Also
2771
- --------
2772
- index_from_name_origin
2773
- """
2774
- return self.data.pop(index)
2775
-
2776
- def remove(self, dwa: DataWithAxes):
2777
- return self.pop(self.data.index(dwa))
2778
-
2779
- @property
2780
- def data(self) -> List[DataWithAxes]:
2781
- """List[DataWithAxes]: get the data contained in the object"""
2782
- return self._data
2783
-
2784
- @data.setter
2785
- def data(self, new_data: List[DataWithAxes]):
2786
- for dat in new_data:
2787
- self._check_data_type(dat)
2788
- self._data[:] = [dat for dat in new_data] # shallow copyto make sure that if the original
2789
- # list is changed, the change will not be applied in here
2790
-
2791
- self.affect_name_to_origin_if_none()
2792
-
2793
- @staticmethod
2794
- def _check_data_type(data: DataWithAxes):
2795
- """Make sure data is a DataWithAxes object or inherited"""
2796
- if not isinstance(data, DataWithAxes):
2797
- raise TypeError('Data stored in a DataToExport object should be objects inherited from DataWithAxis')
2798
-
2799
- def deepcopy(self):
2800
- return DataToExport('Copy', data=[data.deepcopy() for data in self])
2801
-
2802
- @dispatch(list)
2803
- def append(self, data_list: List[DataWithAxes]):
2804
- for dwa in data_list:
2805
- self.append(dwa)
2806
-
2807
- @dispatch(DataWithAxes)
2808
- def append(self, dwa: DataWithAxes):
2809
- """Append/replace DataWithAxes object to the data attribute
2810
-
2811
- Make sure only one DataWithAxes object with a given name is in the list except if they don't have the same
2812
- origin identifier
2813
- """
2814
- dwa = dwa.deepcopy()
2815
- self._check_data_type(dwa)
2816
- obj = self.get_data_from_name_origin(dwa.name, dwa.origin)
2817
- if obj is not None:
2818
- self._data.pop(self.data.index(obj))
2819
- self._data.append(dwa)
2820
-
2821
- @dispatch(object)
2822
- def append(self, dte: DataToExport):
2823
- if isinstance(dte, DataToExport):
2824
- self.append(dte.data)
2825
-
2826
-
2827
91
  class DataScan(DataToExport):
2828
92
  """Specialized DataToExport.To be used for data to be saved """
2829
93
  def __init__(self, name: str, data: List[DataWithAxes] = [], **kwargs):
@@ -2848,7 +112,8 @@ class DataToActuators(DataToExport):
2848
112
 
2849
113
  def __init__(self, *args, mode='rel', **kwargs):
2850
114
  if mode not in ['rel', 'abs']:
2851
- warnings.warn('Incorrect mode for the actuators, switching to default relative mode: rel')
115
+ user_warning('Incorrect mode for the actuators, '
116
+ 'switching to default relative mode: rel')
2852
117
  mode = 'rel'
2853
118
  kwargs.update({'mode': mode})
2854
119
  super().__init__(*args, **kwargs)
@@ -2856,33 +121,3 @@ class DataToActuators(DataToExport):
2856
121
  def __repr__(self):
2857
122
  return f'{super().__repr__()}: {self.mode}'
2858
123
 
2859
-
2860
-
2861
- if __name__ == '__main__':
2862
-
2863
-
2864
- d1 = DataFromRoi(name=f'Hlineout_', data=[np.zeros((24,))],
2865
- x_axis=Axis(data=np.zeros((24,)), units='myunits', label='mylabel1'))
2866
- d2 = DataFromRoi(name=f'Hlineout_', data=[np.zeros((12,))],
2867
- x_axis=Axis(data=np.zeros((12,)),
2868
- units='myunits2',
2869
- label='mylabel2'))
2870
-
2871
- Nsig = 200
2872
- Nnav = 10
2873
- x = np.linspace(-Nsig/2, Nsig/2-1, Nsig)
2874
-
2875
- dat = np.zeros((Nnav, Nsig))
2876
- for ind in range(Nnav):
2877
- dat[ind] = mutils.gauss1D(x, 50 * (ind -Nnav / 2), 25 / np.sqrt(2))
2878
-
2879
- data = DataRaw('mydata', data=[dat], nav_indexes=(0,),
2880
- axes=[Axis('nav', data=np.linspace(0, Nnav-1, Nnav), index=0),
2881
- Axis('sig', data=x, index=1)])
2882
-
2883
- data2 = copy.copy(data)
2884
-
2885
- data3 = data.deepcopy_with_new_data([np.sum(dat, 1)], remove_axes_index=(1,))
2886
-
2887
- print('done')
2888
-