cgse 2023.38.0__py3-none-any.whl → 2024.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (653) hide show
  1. README.md +27 -0
  2. bump.py +85 -0
  3. cgse-2024.1.4.dist-info/METADATA +38 -0
  4. cgse-2024.1.4.dist-info/RECORD +5 -0
  5. {cgse-2023.38.0.dist-info → cgse-2024.1.4.dist-info}/WHEEL +1 -2
  6. cgse-2023.38.0.dist-info/COPYING +0 -674
  7. cgse-2023.38.0.dist-info/COPYING.LESSER +0 -165
  8. cgse-2023.38.0.dist-info/METADATA +0 -144
  9. cgse-2023.38.0.dist-info/RECORD +0 -649
  10. cgse-2023.38.0.dist-info/entry_points.txt +0 -75
  11. cgse-2023.38.0.dist-info/top_level.txt +0 -2
  12. egse/__init__.py +0 -12
  13. egse/__main__.py +0 -32
  14. egse/aeu/aeu.py +0 -5235
  15. egse/aeu/aeu_awg.yaml +0 -265
  16. egse/aeu/aeu_crio.yaml +0 -273
  17. egse/aeu/aeu_cs.py +0 -626
  18. egse/aeu/aeu_devif.py +0 -321
  19. egse/aeu/aeu_main_ui.py +0 -912
  20. egse/aeu/aeu_metrics.py +0 -131
  21. egse/aeu/aeu_protocol.py +0 -463
  22. egse/aeu/aeu_psu.yaml +0 -204
  23. egse/aeu/aeu_ui.py +0 -873
  24. egse/aeu/arbdata/FccdRead.arb +0 -2
  25. egse/aeu/arbdata/FccdRead_min_points.arb +0 -2
  26. egse/aeu/arbdata/HeaterSync_FccdRead.arb +0 -2
  27. egse/aeu/arbdata/HeaterSync_ccdRead25.arb +0 -2
  28. egse/aeu/arbdata/HeaterSync_ccdRead31_25.arb +0 -2
  29. egse/aeu/arbdata/HeaterSync_ccdRead37_50.arb +0 -2
  30. egse/aeu/arbdata/HeaterSync_ccdRead43_75.arb +0 -2
  31. egse/aeu/arbdata/HeaterSync_ccdRead50.arb +0 -2
  32. egse/aeu/arbdata/Heater_FccdRead_min_points.arb +0 -2
  33. egse/aeu/arbdata/ccdRead25.arb +0 -2
  34. egse/aeu/arbdata/ccdRead25_150ms.arb +0 -2
  35. egse/aeu/arbdata/ccdRead31_25.arb +0 -2
  36. egse/aeu/arbdata/ccdRead31_25_150ms.arb +0 -2
  37. egse/aeu/arbdata/ccdRead37_50.arb +0 -2
  38. egse/aeu/arbdata/ccdRead37_50_150ms.arb +0 -2
  39. egse/aeu/arbdata/ccdRead43_75.arb +0 -2
  40. egse/aeu/arbdata/ccdRead43_75_150ms.arb +0 -2
  41. egse/aeu/arbdata/ccdRead50.arb +0 -2
  42. egse/aeu/arbdata/ccdRead50_150ms.arb +0 -2
  43. egse/alert/__init__.py +0 -1049
  44. egse/alert/alertman.yaml +0 -37
  45. egse/alert/alertman_cs.py +0 -234
  46. egse/alert/alertman_ui.py +0 -603
  47. egse/alert/gsm/beaglebone.py +0 -138
  48. egse/alert/gsm/beaglebone.yaml +0 -51
  49. egse/alert/gsm/beaglebone_cs.py +0 -108
  50. egse/alert/gsm/beaglebone_devif.py +0 -130
  51. egse/alert/gsm/beaglebone_protocol.py +0 -48
  52. egse/bits.py +0 -318
  53. egse/camera.py +0 -44
  54. egse/collimator/__init__.py +0 -0
  55. egse/collimator/fcul/__init__.py +0 -0
  56. egse/collimator/fcul/ogse.py +0 -1077
  57. egse/collimator/fcul/ogse.yaml +0 -14
  58. egse/collimator/fcul/ogse_cs.py +0 -154
  59. egse/collimator/fcul/ogse_devif.py +0 -358
  60. egse/collimator/fcul/ogse_protocol.py +0 -129
  61. egse/collimator/fcul/ogse_sim.py +0 -431
  62. egse/collimator/fcul/ogse_ui.py +0 -1108
  63. egse/command.py +0 -699
  64. egse/config.py +0 -410
  65. egse/confman/__init__.py +0 -1015
  66. egse/confman/confman.yaml +0 -67
  67. egse/confman/confman_cs.py +0 -239
  68. egse/confman/confman_ui.py +0 -381
  69. egse/confman/setup_ui.py +0 -565
  70. egse/control.py +0 -442
  71. egse/coordinates/__init__.py +0 -531
  72. egse/coordinates/avoidance.py +0 -103
  73. egse/coordinates/cslmodel.py +0 -127
  74. egse/coordinates/laser_tracker_to_dict.py +0 -120
  75. egse/coordinates/point.py +0 -707
  76. egse/coordinates/pyplot.py +0 -195
  77. egse/coordinates/referenceFrame.py +0 -1279
  78. egse/coordinates/refmodel.py +0 -737
  79. egse/coordinates/rotationMatrix.py +0 -85
  80. egse/coordinates/transform3d_addon.py +0 -419
  81. egse/csl/__init__.py +0 -50
  82. egse/csl/commanding.py +0 -78
  83. egse/csl/icons/hexapod-connected-selected.svg +0 -30
  84. egse/csl/icons/hexapod-connected.svg +0 -30
  85. egse/csl/icons/hexapod-homing-selected.svg +0 -68
  86. egse/csl/icons/hexapod-homing.svg +0 -68
  87. egse/csl/icons/hexapod-retract-selected.svg +0 -56
  88. egse/csl/icons/hexapod-retract.svg +0 -51
  89. egse/csl/icons/hexapod-zero-selected.svg +0 -56
  90. egse/csl/icons/hexapod-zero.svg +0 -56
  91. egse/csl/icons/logo-puna.svg +0 -92
  92. egse/csl/icons/stop.svg +0 -1
  93. egse/csl/initialisation.py +0 -102
  94. egse/csl/mech_pos_settings.yaml +0 -18
  95. egse/das.py +0 -1247
  96. egse/das.yaml +0 -7
  97. egse/data/conf/SETUP_CSL_00000_170620_150000.yaml +0 -5
  98. egse/data/conf/SETUP_CSL_00001_170620_151010.yaml +0 -69
  99. egse/data/conf/SETUP_CSL_00002_170620_151020.yaml +0 -69
  100. egse/data/conf/SETUP_CSL_00003_170620_151030.yaml +0 -69
  101. egse/data/conf/SETUP_CSL_00004_170620_151040.yaml +0 -69
  102. egse/data/conf/SETUP_CSL_00005_170620_151050.yaml +0 -69
  103. egse/data/conf/SETUP_CSL_00006_170620_151060.yaml +0 -69
  104. egse/data/conf/SETUP_CSL_00007_170620_151070.yaml +0 -69
  105. egse/data/conf/SETUP_CSL_00008_170620_151080.yaml +0 -75
  106. egse/data/conf/SETUP_CSL_00010_210308_083016.yaml +0 -138
  107. egse/data/conf/SETUP_INTA_00000_170620_150000.yaml +0 -4
  108. egse/data/conf/SETUP_SRON_00000_170620_150000.yaml +0 -4
  109. egse/decorators.py +0 -415
  110. egse/device.py +0 -269
  111. egse/dpu/__init__.py +0 -2681
  112. egse/dpu/ccd_ui.py +0 -508
  113. egse/dpu/dpu.py +0 -786
  114. egse/dpu/dpu.yaml +0 -153
  115. egse/dpu/dpu_cs.py +0 -272
  116. egse/dpu/dpu_ui.py +0 -668
  117. egse/dpu/fitsgen.py +0 -2077
  118. egse/dpu/fitsgen_test.py +0 -752
  119. egse/dpu/fitsgen_ui.py +0 -399
  120. egse/dpu/hdf5_model.py +0 -332
  121. egse/dpu/hdf5_ui.py +0 -277
  122. egse/dpu/hdf5_viewer.py +0 -506
  123. egse/dpu/hk_ui.py +0 -468
  124. egse/dpu_commands.py +0 -81
  125. egse/dsi/constants.py +0 -220
  126. egse/dsi/esl.py +0 -870
  127. egse/dsi/rmap.py +0 -1042
  128. egse/dsi/rmapci.py +0 -37
  129. egse/dsi/spw.py +0 -154
  130. egse/dsi/spw_state.py +0 -29
  131. egse/dummy.py +0 -258
  132. egse/dyndummy.py +0 -179
  133. egse/env.py +0 -278
  134. egse/exceptions.py +0 -88
  135. egse/fdir/__init__.py +0 -28
  136. egse/fdir/fdir_manager.py +0 -85
  137. egse/fdir/fdir_manager.yaml +0 -51
  138. egse/fdir/fdir_manager_controller.py +0 -228
  139. egse/fdir/fdir_manager_cs.py +0 -164
  140. egse/fdir/fdir_manager_interface.py +0 -25
  141. egse/fdir/fdir_remote.py +0 -73
  142. egse/fdir/fdir_remote.yaml +0 -37
  143. egse/fdir/fdir_remote_controller.py +0 -50
  144. egse/fdir/fdir_remote_cs.py +0 -97
  145. egse/fdir/fdir_remote_interface.py +0 -14
  146. egse/fdir/fdir_remote_popup.py +0 -31
  147. egse/fee/__init__.py +0 -114
  148. egse/fee/f_fee_register.yaml +0 -43
  149. egse/fee/fee.py +0 -631
  150. egse/fee/feesim.py +0 -750
  151. egse/fee/n_fee_hk.py +0 -761
  152. egse/fee/nfee.py +0 -187
  153. egse/filterwheel/__init__.py +0 -4
  154. egse/filterwheel/eksma/__init__.py +0 -24
  155. egse/filterwheel/eksma/fw8smc4.py +0 -661
  156. egse/filterwheel/eksma/fw8smc4.yaml +0 -121
  157. egse/filterwheel/eksma/fw8smc4_cs.py +0 -144
  158. egse/filterwheel/eksma/fw8smc4_devif.py +0 -473
  159. egse/filterwheel/eksma/fw8smc4_protocol.py +0 -81
  160. egse/filterwheel/eksma/fw8smc4_ui.py +0 -940
  161. egse/filterwheel/eksma/fw8smc5.py +0 -111
  162. egse/filterwheel/eksma/fw8smc5.yaml +0 -105
  163. egse/filterwheel/eksma/fw8smc5_controller.py +0 -307
  164. egse/filterwheel/eksma/fw8smc5_cs.py +0 -141
  165. egse/filterwheel/eksma/fw8smc5_interface.py +0 -65
  166. egse/filterwheel/eksma/fw8smc5_simulator.py +0 -29
  167. egse/filterwheel/eksma/fw8smc5_ui.py +0 -1068
  168. egse/filterwheel/eksma/testpythonfw.py +0 -215
  169. egse/fov/__init__.py +0 -65
  170. egse/fov/fov_hk.py +0 -712
  171. egse/fov/fov_ui.py +0 -861
  172. egse/fov/fov_ui_controller.py +0 -140
  173. egse/fov/fov_ui_model.py +0 -200
  174. egse/fov/fov_ui_view.py +0 -345
  175. egse/gimbal/__init__.py +0 -32
  176. egse/gimbal/symetrie/__init__.py +0 -26
  177. egse/gimbal/symetrie/alpha.py +0 -586
  178. egse/gimbal/symetrie/generic_gimbal_ui.py +0 -1521
  179. egse/gimbal/symetrie/gimbal.py +0 -877
  180. egse/gimbal/symetrie/gimbal.yaml +0 -168
  181. egse/gimbal/symetrie/gimbal_cs.py +0 -183
  182. egse/gimbal/symetrie/gimbal_protocol.py +0 -135
  183. egse/gimbal/symetrie/gimbal_ui.py +0 -361
  184. egse/gimbal/symetrie/pmac.py +0 -1006
  185. egse/gimbal/symetrie/pmac_regex.py +0 -83
  186. egse/graph.py +0 -132
  187. egse/gui/__init__.py +0 -47
  188. egse/gui/buttons.py +0 -378
  189. egse/gui/focalplane.py +0 -1281
  190. egse/gui/formatter.py +0 -10
  191. egse/gui/led.py +0 -162
  192. egse/gui/limitswitch.py +0 -143
  193. egse/gui/mechanisms.py +0 -588
  194. egse/gui/states.py +0 -148
  195. egse/gui/stripchart.py +0 -729
  196. egse/gui/switch.py +0 -112
  197. egse/h5.py +0 -274
  198. egse/help/__init__.py +0 -0
  199. egse/help/help_ui.py +0 -126
  200. egse/hexapod/__init__.py +0 -32
  201. egse/hexapod/symetrie/__init__.py +0 -138
  202. egse/hexapod/symetrie/alpha.py +0 -874
  203. egse/hexapod/symetrie/dynalpha.py +0 -1387
  204. egse/hexapod/symetrie/hexapod_ui.py +0 -1516
  205. egse/hexapod/symetrie/pmac.py +0 -1010
  206. egse/hexapod/symetrie/pmac_regex.py +0 -83
  207. egse/hexapod/symetrie/puna.py +0 -1167
  208. egse/hexapod/symetrie/puna.yaml +0 -193
  209. egse/hexapod/symetrie/puna_cs.py +0 -196
  210. egse/hexapod/symetrie/puna_protocol.py +0 -131
  211. egse/hexapod/symetrie/puna_ui.py +0 -434
  212. egse/hexapod/symetrie/punaplus.py +0 -107
  213. egse/hexapod/symetrie/zonda.py +0 -872
  214. egse/hexapod/symetrie/zonda.yaml +0 -337
  215. egse/hexapod/symetrie/zonda_cs.py +0 -172
  216. egse/hexapod/symetrie/zonda_devif.py +0 -415
  217. egse/hexapod/symetrie/zonda_protocol.py +0 -119
  218. egse/hexapod/symetrie/zonda_ui.py +0 -449
  219. egse/hk.py +0 -765
  220. egse/icons/aeu-cs-start.svg +0 -117
  221. egse/icons/aeu-cs-stop.svg +0 -118
  222. egse/icons/aeu-cs.svg +0 -107
  223. egse/icons/aeu_cs-started.svg +0 -112
  224. egse/icons/aeu_cs-stopped.svg +0 -112
  225. egse/icons/aeu_cs.svg +0 -55
  226. egse/icons/alert.svg +0 -1
  227. egse/icons/arrow-double-left.png +0 -0
  228. egse/icons/arrow-double-right.png +0 -0
  229. egse/icons/arrow-up.svg +0 -11
  230. egse/icons/backward.svg +0 -1
  231. egse/icons/busy.svg +0 -1
  232. egse/icons/cleaning.svg +0 -115
  233. egse/icons/color-scheme.svg +0 -1
  234. egse/icons/cs-connected-alert.svg +0 -91
  235. egse/icons/cs-connected-disabled.svg +0 -43
  236. egse/icons/cs-connected.svg +0 -89
  237. egse/icons/cs-not-connected.svg +0 -44
  238. egse/icons/double-left-arrow.svg +0 -1
  239. egse/icons/double-right-arrow.svg +0 -1
  240. egse/icons/erase-disabled.svg +0 -19
  241. egse/icons/erase.svg +0 -59
  242. egse/icons/fitsgen-start.svg +0 -47
  243. egse/icons/fitsgen-stop.svg +0 -48
  244. egse/icons/fitsgen.svg +0 -1
  245. egse/icons/forward.svg +0 -1
  246. egse/icons/fov-hk-start.svg +0 -33
  247. egse/icons/fov-hk-stop.svg +0 -37
  248. egse/icons/fov-hk.svg +0 -1
  249. egse/icons/front-desk.svg +0 -1
  250. egse/icons/home-actioned.svg +0 -15
  251. egse/icons/home-disabled.svg +0 -15
  252. egse/icons/home.svg +0 -13
  253. egse/icons/info.svg +0 -1
  254. egse/icons/invalid.png +0 -0
  255. egse/icons/led-green.svg +0 -20
  256. egse/icons/led-grey.svg +0 -20
  257. egse/icons/led-orange.svg +0 -20
  258. egse/icons/led-red.svg +0 -20
  259. egse/icons/led-square-green.svg +0 -134
  260. egse/icons/led-square-grey.svg +0 -134
  261. egse/icons/led-square-orange.svg +0 -134
  262. egse/icons/led-square-red.svg +0 -134
  263. egse/icons/limit-switch-all-green.svg +0 -115
  264. egse/icons/limit-switch-all-red.svg +0 -117
  265. egse/icons/limit-switch-el+.svg +0 -116
  266. egse/icons/limit-switch-el-.svg +0 -117
  267. egse/icons/location-marker.svg +0 -1
  268. egse/icons/logo-dpu.svg +0 -48
  269. egse/icons/logo-gimbal.svg +0 -112
  270. egse/icons/logo-huber.svg +0 -23
  271. egse/icons/logo-ogse.svg +0 -31
  272. egse/icons/logo-puna.svg +0 -92
  273. egse/icons/logo-tcs.svg +0 -29
  274. egse/icons/logo-zonda.svg +0 -66
  275. egse/icons/maximize.svg +0 -1
  276. egse/icons/meter.svg +0 -1
  277. egse/icons/more.svg +0 -45
  278. egse/icons/n-fee-hk-start.svg +0 -24
  279. egse/icons/n-fee-hk-stop.svg +0 -25
  280. egse/icons/n-fee-hk.svg +0 -83
  281. egse/icons/observing-off.svg +0 -46
  282. egse/icons/observing-on.svg +0 -46
  283. egse/icons/open-document-hdf5.png +0 -0
  284. egse/icons/open-document-hdf5.svg +0 -21
  285. egse/icons/ops-mode.svg +0 -1
  286. egse/icons/play-green.svg +0 -17
  287. egse/icons/plugged-disabled.svg +0 -27
  288. egse/icons/plugged.svg +0 -21
  289. egse/icons/pm_ui.svg +0 -1
  290. egse/icons/power-button-green.svg +0 -27
  291. egse/icons/power-button-red.svg +0 -27
  292. egse/icons/power-button.svg +0 -27
  293. egse/icons/radar.svg +0 -1
  294. egse/icons/radioactive.svg +0 -2
  295. egse/icons/reload.svg +0 -1
  296. egse/icons/remote-control-off.svg +0 -28
  297. egse/icons/remote-control-on.svg +0 -28
  298. egse/icons/repeat-blue.svg +0 -15
  299. egse/icons/repeat.svg +0 -1
  300. egse/icons/settings.svg +0 -1
  301. egse/icons/shrink.svg +0 -1
  302. egse/icons/shutter.svg +0 -1
  303. egse/icons/sign-off.svg +0 -1
  304. egse/icons/sign-on.svg +0 -1
  305. egse/icons/sim-mode.svg +0 -1
  306. egse/icons/small-buttons-go.svg +0 -20
  307. egse/icons/small-buttons-minus.svg +0 -51
  308. egse/icons/small-buttons-plus.svg +0 -51
  309. egse/icons/sponge.svg +0 -220
  310. egse/icons/start-button-disabled.svg +0 -84
  311. egse/icons/start-button.svg +0 -50
  312. egse/icons/stop-button-disabled.svg +0 -84
  313. egse/icons/stop-button.svg +0 -50
  314. egse/icons/stop-red.svg +0 -17
  315. egse/icons/stop.svg +0 -1
  316. egse/icons/switch-disabled-square.svg +0 -87
  317. egse/icons/switch-disabled.svg +0 -15
  318. egse/icons/switch-off-square.svg +0 -87
  319. egse/icons/switch-off.svg +0 -72
  320. egse/icons/switch-on-square.svg +0 -87
  321. egse/icons/switch-on.svg +0 -61
  322. egse/icons/temperature-control.svg +0 -44
  323. egse/icons/th_ui_logo.svg +0 -1
  324. egse/icons/unplugged.svg +0 -23
  325. egse/icons/unvalid.png +0 -0
  326. egse/icons/user-interface.svg +0 -1
  327. egse/icons/vacuum.svg +0 -1
  328. egse/icons/valid.png +0 -0
  329. egse/icons/zoom-to-pixel-dark.svg +0 -64
  330. egse/icons/zoom-to-pixel-white.svg +0 -36
  331. egse/images/big-rotation-stage.png +0 -0
  332. egse/images/connected-100.png +0 -0
  333. egse/images/cross.svg +0 -6
  334. egse/images/disconnected-100.png +0 -0
  335. egse/images/gui-icon.png +0 -0
  336. egse/images/home.svg +0 -6
  337. egse/images/info-icon.png +0 -0
  338. egse/images/led-black.svg +0 -89
  339. egse/images/led-green.svg +0 -85
  340. egse/images/led-orange.svg +0 -85
  341. egse/images/led-red.svg +0 -85
  342. egse/images/load-icon.png +0 -0
  343. egse/images/load-setup.png +0 -0
  344. egse/images/load.png +0 -0
  345. egse/images/pause.png +0 -0
  346. egse/images/play-button.svg +0 -8
  347. egse/images/play.png +0 -0
  348. egse/images/process-status.png +0 -0
  349. egse/images/restart.png +0 -0
  350. egse/images/search.png +0 -0
  351. egse/images/sma.png +0 -0
  352. egse/images/start.png +0 -0
  353. egse/images/stop-button.svg +0 -8
  354. egse/images/stop.png +0 -0
  355. egse/images/switch-off.svg +0 -48
  356. egse/images/switch-on.svg +0 -48
  357. egse/images/undo.png +0 -0
  358. egse/images/update-button.svg +0 -11
  359. egse/imageviewer/exposureselection.py +0 -475
  360. egse/imageviewer/imageviewer.py +0 -198
  361. egse/imageviewer/matchfocalplane.py +0 -179
  362. egse/imageviewer/subfieldposition.py +0 -133
  363. egse/lampcontrol/__init__.py +0 -4
  364. egse/lampcontrol/beaglebone/beaglebone.py +0 -178
  365. egse/lampcontrol/beaglebone/beaglebone.yaml +0 -62
  366. egse/lampcontrol/beaglebone/beaglebone_cs.py +0 -106
  367. egse/lampcontrol/beaglebone/beaglebone_devif.py +0 -150
  368. egse/lampcontrol/beaglebone/beaglebone_protocol.py +0 -73
  369. egse/lampcontrol/energetiq/__init__.py +0 -22
  370. egse/lampcontrol/energetiq/eq99.yaml +0 -98
  371. egse/lampcontrol/energetiq/lampEQ99.py +0 -283
  372. egse/lampcontrol/energetiq/lampEQ99_cs.py +0 -128
  373. egse/lampcontrol/energetiq/lampEQ99_devif.py +0 -158
  374. egse/lampcontrol/energetiq/lampEQ99_encode_decode_errors.py +0 -73
  375. egse/lampcontrol/energetiq/lampEQ99_protocol.py +0 -69
  376. egse/lampcontrol/energetiq/lampEQ99_ui.py +0 -465
  377. egse/lib/CentOS-7/EtherSpaceLink_v34_86.dylib +0 -0
  378. egse/lib/CentOS-8/ESL-RMAP_v34_86.dylib +0 -0
  379. egse/lib/CentOS-8/EtherSpaceLink_v34_86.dylib +0 -0
  380. egse/lib/Debian/ESL-RMAP_v34_86.dylib +0 -0
  381. egse/lib/Debian/EtherSpaceLink_v34_86.dylib +0 -0
  382. egse/lib/Debian/libetherspacelink_v35_21.dylib +0 -0
  383. egse/lib/Linux/ESL-RMAP_v34_86.dylib +0 -0
  384. egse/lib/Linux/EtherSpaceLink_v34_86.dylib +0 -0
  385. egse/lib/Ubuntu-20/ESL-RMAP_v34_86.dylib +0 -0
  386. egse/lib/Ubuntu-20/EtherSpaceLink_v34_86.dylib +0 -0
  387. egse/lib/gssw/python3-gssw_2.2.3+31f63c9f-1_all.deb +0 -0
  388. egse/lib/macOS/ESL-RMAP_v34_86.dylib +0 -0
  389. egse/lib/macOS/EtherSpaceLink_v34_86.dylib +0 -0
  390. egse/lib/ximc/__pycache__/pyximc.cpython-38 2.pyc +0 -0
  391. egse/lib/ximc/__pycache__/pyximc.cpython-38.pyc +0 -0
  392. egse/lib/ximc/libximc.framework/Frameworks/libbindy.dylib +0 -0
  393. egse/lib/ximc/libximc.framework/Frameworks/libxiwrapper.dylib +0 -0
  394. egse/lib/ximc/libximc.framework/Headers/ximc.h +0 -5510
  395. egse/lib/ximc/libximc.framework/Resources/Info.plist +0 -42
  396. egse/lib/ximc/libximc.framework/Resources/keyfile.sqlite +0 -0
  397. egse/lib/ximc/libximc.framework/libbindy.so +0 -0
  398. egse/lib/ximc/libximc.framework/libximc +0 -0
  399. egse/lib/ximc/libximc.framework/libximc.so +0 -0
  400. egse/lib/ximc/libximc.framework/libximc.so.7.0.0 +0 -0
  401. egse/lib/ximc/libximc.framework/libxiwrapper.so +0 -0
  402. egse/lib/ximc/pyximc.py +0 -922
  403. egse/listener.py +0 -73
  404. egse/logger/__init__.py +0 -243
  405. egse/logger/log_cs.py +0 -321
  406. egse/metrics.py +0 -98
  407. egse/mixin.py +0 -464
  408. egse/monitoring.py +0 -95
  409. egse/ni/alarms/__init__.py +0 -26
  410. egse/ni/alarms/cdaq9375.py +0 -300
  411. egse/ni/alarms/cdaq9375.yaml +0 -89
  412. egse/ni/alarms/cdaq9375_cs.py +0 -130
  413. egse/ni/alarms/cdaq9375_devif.py +0 -183
  414. egse/ni/alarms/cdaq9375_protocol.py +0 -48
  415. egse/obs_inspection.py +0 -163
  416. egse/observer.py +0 -41
  417. egse/obsid.py +0 -163
  418. egse/powermeter/__init__.py +0 -0
  419. egse/powermeter/ni/__init__.py +0 -38
  420. egse/powermeter/ni/cdaq9184.py +0 -224
  421. egse/powermeter/ni/cdaq9184.yaml +0 -73
  422. egse/powermeter/ni/cdaq9184_cs.py +0 -130
  423. egse/powermeter/ni/cdaq9184_devif.py +0 -201
  424. egse/powermeter/ni/cdaq9184_protocol.py +0 -48
  425. egse/powermeter/ni/cdaq9184_ui.py +0 -544
  426. egse/powermeter/thorlabs/__init__.py +0 -25
  427. egse/powermeter/thorlabs/pm100a.py +0 -380
  428. egse/powermeter/thorlabs/pm100a.yaml +0 -132
  429. egse/powermeter/thorlabs/pm100a_cs.py +0 -136
  430. egse/powermeter/thorlabs/pm100a_devif.py +0 -127
  431. egse/powermeter/thorlabs/pm100a_protocol.py +0 -80
  432. egse/powermeter/thorlabs/pm100a_ui.py +0 -725
  433. egse/process.py +0 -451
  434. egse/procman/__init__.py +0 -811
  435. egse/procman/cannot_start_process_popup.py +0 -43
  436. egse/procman/procman.yaml +0 -49
  437. egse/procman/procman_cs.py +0 -201
  438. egse/procman/procman_ui.py +0 -2081
  439. egse/protocol.py +0 -603
  440. egse/proxy.py +0 -522
  441. egse/randomwalk.py +0 -140
  442. egse/reg.py +0 -585
  443. egse/reload.py +0 -122
  444. egse/reprocess.py +0 -675
  445. egse/resource.py +0 -333
  446. egse/rst.py +0 -135
  447. egse/search.py +0 -182
  448. egse/serialdevice.py +0 -190
  449. egse/services.py +0 -212
  450. egse/services.yaml +0 -51
  451. egse/settings.py +0 -379
  452. egse/settings.yaml +0 -980
  453. egse/setup.py +0 -1180
  454. egse/shutter/__init__.py +0 -0
  455. egse/shutter/thorlabs/__init__.py +0 -19
  456. egse/shutter/thorlabs/ksc101.py +0 -205
  457. egse/shutter/thorlabs/ksc101.yaml +0 -105
  458. egse/shutter/thorlabs/ksc101_cs.py +0 -136
  459. egse/shutter/thorlabs/ksc101_devif.py +0 -201
  460. egse/shutter/thorlabs/ksc101_protocol.py +0 -69
  461. egse/shutter/thorlabs/ksc101_ui.py +0 -548
  462. egse/shutter/thorlabs/sc10.py +0 -82
  463. egse/shutter/thorlabs/sc10.yaml +0 -52
  464. egse/shutter/thorlabs/sc10_controller.py +0 -81
  465. egse/shutter/thorlabs/sc10_cs.py +0 -108
  466. egse/shutter/thorlabs/sc10_interface.py +0 -25
  467. egse/shutter/thorlabs/sc10_simulator.py +0 -30
  468. egse/simulator.py +0 -41
  469. egse/slack.py +0 -61
  470. egse/socketdevice.py +0 -218
  471. egse/sockets.py +0 -218
  472. egse/spw.py +0 -1479
  473. egse/stages/__init__.py +0 -12
  474. egse/stages/aerotech/ensemble.py +0 -247
  475. egse/stages/aerotech/ensemble.yaml +0 -205
  476. egse/stages/aerotech/ensemble_controller.py +0 -275
  477. egse/stages/aerotech/ensemble_cs.py +0 -110
  478. egse/stages/aerotech/ensemble_interface.py +0 -132
  479. egse/stages/aerotech/ensemble_parameters.py +0 -433
  480. egse/stages/aerotech/ensemble_simulator.py +0 -27
  481. egse/stages/aerotech/mgse_sim.py +0 -193
  482. egse/stages/arun/smd3.py +0 -111
  483. egse/stages/arun/smd3.yaml +0 -68
  484. egse/stages/arun/smd3_controller.py +0 -472
  485. egse/stages/arun/smd3_cs.py +0 -112
  486. egse/stages/arun/smd3_interface.py +0 -53
  487. egse/stages/arun/smd3_simulator.py +0 -27
  488. egse/stages/arun/smd3_stop.py +0 -16
  489. egse/stages/huber/__init__.py +0 -49
  490. egse/stages/huber/smc9300.py +0 -904
  491. egse/stages/huber/smc9300.yaml +0 -63
  492. egse/stages/huber/smc9300_cs.py +0 -178
  493. egse/stages/huber/smc9300_devif.py +0 -345
  494. egse/stages/huber/smc9300_protocol.py +0 -111
  495. egse/stages/huber/smc9300_sim.py +0 -547
  496. egse/stages/huber/smc9300_ui.py +0 -973
  497. egse/state.py +0 -173
  498. egse/statemachine.py +0 -274
  499. egse/storage/__init__.py +0 -1004
  500. egse/storage/persistence.py +0 -2295
  501. egse/storage/storage.yaml +0 -72
  502. egse/storage/storage_cs.py +0 -214
  503. egse/styles/dark.qss +0 -343
  504. egse/styles/default.qss +0 -48
  505. egse/synoptics/__init__.py +0 -412
  506. egse/synoptics/syn.yaml +0 -9
  507. egse/synoptics/syn_cs.py +0 -195
  508. egse/system.py +0 -1408
  509. egse/tcs/__init__.py +0 -14
  510. egse/tcs/tcs.py +0 -874
  511. egse/tcs/tcs.yaml +0 -14
  512. egse/tcs/tcs_cs.py +0 -202
  513. egse/tcs/tcs_devif.py +0 -292
  514. egse/tcs/tcs_protocol.py +0 -177
  515. egse/tcs/tcs_sim.py +0 -177
  516. egse/tcs/tcs_ui.py +0 -543
  517. egse/tdms.py +0 -171
  518. egse/tempcontrol/__init__.py +0 -23
  519. egse/tempcontrol/agilent/agilent34970.py +0 -109
  520. egse/tempcontrol/agilent/agilent34970.yaml +0 -44
  521. egse/tempcontrol/agilent/agilent34970_cs.py +0 -116
  522. egse/tempcontrol/agilent/agilent34970_devif.py +0 -182
  523. egse/tempcontrol/agilent/agilent34970_protocol.py +0 -99
  524. egse/tempcontrol/agilent/agilent34972.py +0 -111
  525. egse/tempcontrol/agilent/agilent34972.yaml +0 -44
  526. egse/tempcontrol/agilent/agilent34972_cs.py +0 -117
  527. egse/tempcontrol/agilent/agilent34972_devif.py +0 -189
  528. egse/tempcontrol/agilent/agilent34972_protocol.py +0 -101
  529. egse/tempcontrol/beaglebone/beaglebone.py +0 -342
  530. egse/tempcontrol/beaglebone/beaglebone.yaml +0 -110
  531. egse/tempcontrol/beaglebone/beaglebone_cs.py +0 -117
  532. egse/tempcontrol/beaglebone/beaglebone_protocol.py +0 -135
  533. egse/tempcontrol/beaglebone/beaglebone_ui.py +0 -681
  534. egse/tempcontrol/digalox/digalox.py +0 -107
  535. egse/tempcontrol/digalox/digalox.yaml +0 -36
  536. egse/tempcontrol/digalox/digalox_cs.py +0 -112
  537. egse/tempcontrol/digalox/digalox_protocol.py +0 -55
  538. egse/tempcontrol/keithley/__init__.py +0 -33
  539. egse/tempcontrol/keithley/daq6510.py +0 -662
  540. egse/tempcontrol/keithley/daq6510.yaml +0 -105
  541. egse/tempcontrol/keithley/daq6510_cs.py +0 -163
  542. egse/tempcontrol/keithley/daq6510_devif.py +0 -343
  543. egse/tempcontrol/keithley/daq6510_protocol.py +0 -78
  544. egse/tempcontrol/keithley/daq6510_sim.py +0 -186
  545. egse/tempcontrol/lakeshore/__init__.py +0 -33
  546. egse/tempcontrol/lakeshore/lsci.py +0 -361
  547. egse/tempcontrol/lakeshore/lsci.yaml +0 -162
  548. egse/tempcontrol/lakeshore/lsci_cs.py +0 -174
  549. egse/tempcontrol/lakeshore/lsci_devif.py +0 -292
  550. egse/tempcontrol/lakeshore/lsci_protocol.py +0 -73
  551. egse/tempcontrol/lakeshore/lsci_ui.py +0 -389
  552. egse/tempcontrol/ni/__init__.py +0 -0
  553. egse/tempcontrol/spid/spid.py +0 -109
  554. egse/tempcontrol/spid/spid.yaml +0 -81
  555. egse/tempcontrol/spid/spid_controller.py +0 -279
  556. egse/tempcontrol/spid/spid_cs.py +0 -136
  557. egse/tempcontrol/spid/spid_protocol.py +0 -107
  558. egse/tempcontrol/spid/spid_ui.py +0 -727
  559. egse/tempcontrol/srs/__init__.py +0 -22
  560. egse/tempcontrol/srs/ptc10.py +0 -875
  561. egse/tempcontrol/srs/ptc10.yaml +0 -227
  562. egse/tempcontrol/srs/ptc10_cs.py +0 -128
  563. egse/tempcontrol/srs/ptc10_devif.py +0 -118
  564. egse/tempcontrol/srs/ptc10_protocol.py +0 -42
  565. egse/tempcontrol/srs/ptc10_ui.py +0 -906
  566. egse/ups/apc/apc.py +0 -236
  567. egse/ups/apc/apc.yaml +0 -45
  568. egse/ups/apc/apc_cs.py +0 -101
  569. egse/ups/apc/apc_protocol.py +0 -125
  570. egse/user.yaml +0 -7
  571. egse/vacuum/beaglebone/beaglebone.py +0 -149
  572. egse/vacuum/beaglebone/beaglebone.yaml +0 -44
  573. egse/vacuum/beaglebone/beaglebone_cs.py +0 -108
  574. egse/vacuum/beaglebone/beaglebone_devif.py +0 -164
  575. egse/vacuum/beaglebone/beaglebone_protocol.py +0 -193
  576. egse/vacuum/beaglebone/beaglebone_ui.py +0 -638
  577. egse/vacuum/instrutech/igm402.py +0 -92
  578. egse/vacuum/instrutech/igm402.yaml +0 -90
  579. egse/vacuum/instrutech/igm402_controller.py +0 -128
  580. egse/vacuum/instrutech/igm402_cs.py +0 -108
  581. egse/vacuum/instrutech/igm402_interface.py +0 -49
  582. egse/vacuum/instrutech/igm402_simulator.py +0 -36
  583. egse/vacuum/keller/kellerBus.py +0 -256
  584. egse/vacuum/keller/leo3.py +0 -102
  585. egse/vacuum/keller/leo3.yaml +0 -38
  586. egse/vacuum/keller/leo3_controller.py +0 -83
  587. egse/vacuum/keller/leo3_cs.py +0 -101
  588. egse/vacuum/keller/leo3_interface.py +0 -33
  589. egse/vacuum/mks/evision.py +0 -86
  590. egse/vacuum/mks/evision.yaml +0 -75
  591. egse/vacuum/mks/evision_cs.py +0 -101
  592. egse/vacuum/mks/evision_devif.py +0 -316
  593. egse/vacuum/mks/evision_interface.py +0 -60
  594. egse/vacuum/mks/evision_simulator.py +0 -24
  595. egse/vacuum/mks/evision_ui.py +0 -704
  596. egse/vacuum/pfeiffer/acp40.py +0 -87
  597. egse/vacuum/pfeiffer/acp40.yaml +0 -60
  598. egse/vacuum/pfeiffer/acp40_controller.py +0 -117
  599. egse/vacuum/pfeiffer/acp40_cs.py +0 -109
  600. egse/vacuum/pfeiffer/acp40_interface.py +0 -40
  601. egse/vacuum/pfeiffer/acp40_simulator.py +0 -39
  602. egse/vacuum/pfeiffer/tc400.py +0 -113
  603. egse/vacuum/pfeiffer/tc400.yaml +0 -83
  604. egse/vacuum/pfeiffer/tc400_controller.py +0 -140
  605. egse/vacuum/pfeiffer/tc400_cs.py +0 -109
  606. egse/vacuum/pfeiffer/tc400_interface.py +0 -70
  607. egse/vacuum/pfeiffer/tc400_simulator.py +0 -24
  608. egse/vacuum/pfeiffer/tpg261.py +0 -81
  609. egse/vacuum/pfeiffer/tpg261.yaml +0 -66
  610. egse/vacuum/pfeiffer/tpg261_controller.py +0 -150
  611. egse/vacuum/pfeiffer/tpg261_cs.py +0 -109
  612. egse/vacuum/pfeiffer/tpg261_interface.py +0 -60
  613. egse/vacuum/pfeiffer/tpg261_simulator.py +0 -24
  614. egse/version.py +0 -174
  615. egse/visitedpositions.py +0 -398
  616. egse/windowing.py +0 -213
  617. egse/zmq/__init__.py +0 -28
  618. egse/zmq/spw.py +0 -160
  619. egse/zmq_ser.py +0 -41
  620. scripts/alerts/cold.yaml +0 -278
  621. scripts/alerts/example_alerts.yaml +0 -54
  622. scripts/alerts/transition.yaml +0 -14
  623. scripts/alerts/warm.yaml +0 -49
  624. scripts/analyse_n_fee_hk_data.py +0 -44
  625. scripts/check_hdf5_files.py +0 -192
  626. scripts/check_register_sync.py +0 -47
  627. scripts/create_hdf5_report.py +0 -295
  628. scripts/csl_model.py +0 -436
  629. scripts/csl_restore_setup.py +0 -230
  630. scripts/export-grafana-dashboards.py +0 -50
  631. scripts/fdir/cs_recovery/fdir_cs_recovery.py +0 -59
  632. scripts/fdir/fdir_table.yaml +0 -70
  633. scripts/fdir/fdir_test_recovery.py +0 -11
  634. scripts/fdir/hw_recovery/fdir_agilent_hw_recovery.py +0 -73
  635. scripts/fdir/limit_recovery/fdir_agilent_limit.py +0 -64
  636. scripts/fdir/limit_recovery/fdir_bb_heater_limit.py +0 -61
  637. scripts/fdir/limit_recovery/fdir_ensemble_limit.py +0 -33
  638. scripts/fdir/limit_recovery/fdir_pressure_limit_recovery.py +0 -71
  639. scripts/fix_csv.py +0 -80
  640. scripts/n_fee_supply_voltage_calculation.py +0 -92
  641. scripts/playground.py +0 -30
  642. scripts/print_hdf5_hk_data.py +0 -68
  643. scripts/print_register_map.py +0 -43
  644. scripts/sron/commanding/control_heaters.py +0 -44
  645. scripts/sron/commanding/pumpdown.py +0 -46
  646. scripts/sron/commanding/set_pid_setpoint.py +0 -19
  647. scripts/sron/commanding/shutdown_bbb_heaters.py +0 -10
  648. scripts/sron/commanding/shutdown_pumps.py +0 -33
  649. scripts/sron/tm_gen/tm_gen_agilent.py +0 -38
  650. scripts/sron/tm_gen/tm_gen_heaters.py +0 -4
  651. scripts/sron/tm_gen/tm_gen_spid.py +0 -13
  652. scripts/update_operational_cgse.py +0 -268
  653. scripts/update_operational_cgse_old.py +0 -273
egse/dpu/fitsgen.py DELETED
@@ -1,2077 +0,0 @@
1
- """
2
- This module define the FITS generation process.
3
-
4
- The FITS generation process connects to the monitoring channel of the DPU control server and starts processing
5
- HDF5 files as soon as they are available. The FITS generation can also be started off-line to process a list
6
- of HDF5 files or to process a given Observation (OBSID).
7
-
8
- """
9
- import glob
10
- import logging
11
- import multiprocessing
12
- import os
13
- import pickle
14
- import threading
15
- from datetime import timedelta, datetime
16
- from enum import Enum
17
- from pathlib import Path, PosixPath
18
- from typing import List, Mapping
19
-
20
- import click
21
- import invoke
22
- import natsort
23
- import numpy as np
24
- import persistqueue
25
- import rich
26
- import sys
27
- import time
28
- import zmq
29
- from astropy.io import fits
30
- from h5py import File
31
- from h5py._hl.attrs import AttributeManager
32
- from itertools import chain
33
- from scipy.interpolate import interp1d
34
-
35
- import egse
36
- from egse import h5
37
- from egse.config import find_file, find_files
38
- from egse.control import time_in_ms
39
- from egse.dpu import DPUMonitoring, get_expected_last_packet_flags
40
- from egse.dpu.dpu_cs import is_dpu_cs_active
41
- from egse.env import get_data_storage_location
42
- from egse.exceptions import Abort
43
- from egse.fee import convert_ccd_order_value
44
- from egse.fee import n_fee_mode
45
- from egse.fee.nfee import HousekeepingData
46
- from egse.hk import get_housekeeping, HKError
47
- from egse.obsid import ObservationIdentifier, LAB_SETUP_TEST, TEST_LAB, obsid_from_storage
48
- from egse.reg import RegisterMap
49
- from egse.settings import Settings
50
- from egse.setup import load_setup, Setup
51
- from egse.spw import SpaceWirePacket
52
- from egse.state import GlobalState
53
- from egse.storage import is_storage_manager_active
54
- from egse.storage.persistence import FITS, HDF5
55
- from egse.synoptics import ORIGIN as SYN_ORIGIN
56
- from egse.synoptics import get_synoptics_table
57
- from egse.system import time_since_epoch_1958, format_datetime, read_last_line
58
- from egse.zmq_ser import bind_address, connect_address
59
-
60
- LOGGER = logging.getLogger(__name__)
61
-
62
- N_FEE_SETTINGS = Settings.load("N-FEE")
63
- CCD_SETTINGS = Settings.load("CCD")
64
- SITE = Settings.load("SITE")
65
- CTRL_SETTINGS = Settings.load("FITS Generator Control Server")
66
- STORAGE_SETTINGS = Settings.load("Storage Control Server")
67
- DPU_SETTINGS = Settings.load("DPU")
68
-
69
- TIMEOUT_RECV = 1.0 # seconds
70
-
71
-
72
- def get_cycle_time(n_fee_state: Mapping, obsid=None, data_dir=None):
73
- """ Return the image cycle time.
74
-
75
- In the given N-FEE state parameters or register map, we check whether we are in internal or external sync:
76
-
77
- - Internal sync: Read the image cycle time from the given N-FEE state parameters or register map;
78
- - External sync: Get the image cycle time from the AEU (AWG2). In case of off-line FITS generation (i.e. from
79
- the HDF5 files), the image cycle time (for the specified obsid) is taken from the AEU housekeeping (AWG2).
80
- In case of on-line FITS generation, the image cycle time is queried from the AEU AWG2.
81
-
82
- Args:
83
- - n_fee_state: N-FEE state parameters or register map.
84
- - obsid: Observation identifier for which the image cycle time is read from the AEU housekeeping.
85
-
86
- Returns: Image cycle time [s].
87
- """
88
-
89
- # Internal sync -> use sync period from the N-FEE state
90
-
91
- if n_fee_state["sync_sel"] == 1:
92
- return n_fee_state["int_sync_period"] / 1000. # [ms] -> [s]
93
-
94
- # External sync -> use AEU sync pulses
95
-
96
- else:
97
- if obsid:
98
- try:
99
- return float(get_housekeeping("GAEU_EXT_CYCLE_TIME", obsid=obsid, data_dir=data_dir)[1])
100
- except HKError as exc: # See GitHub issue #2025
101
- LOGGER.warning("No HK available for AWG2 (using default cycle time of 25s)", exc)
102
- return 25.0
103
- else:
104
- return None
105
-
106
-
107
- def get_cgse_version(obsid=None, data_dir=None):
108
- """ Returns the version of the Common EGSE with which the FITS file was created.
109
-
110
- Args:
111
- - obsid: Observation identifier for which the version of the Common EGSE is read from the Configuration
112
- Manager housekeeping.
113
- """
114
-
115
- try:
116
- return None if obsid is None else get_housekeeping("CM_CGSE_VERSION", obsid=obsid, data_dir=data_dir)[1]
117
- except HKError:
118
- return None
119
-
120
-
121
- class FITSGenerator:
122
-
123
- def __init__(self):
124
- """ Generation of FITS files from HDF5 files with SpW packets.
125
-
126
- In a separate thread, the DPU monitoring puts the name of new HDF5 files with SpW packets in the queue. The
127
- FITS generator accesses this queue (FIFO) and stores the information in a FITS file.
128
-
129
- When there is a change in crucial parameters, the current FITS file (if any) will be closed and a new one will
130
- be created as soon as data packet start coming in (when the N-FEE is in full-image or full-image pattern mode).
131
- """
132
-
133
- # Queue with the full path of the HDF5 files that still need to be processed.
134
-
135
- self.hdf5_filename_queue = persistqueue.Queue(f"{get_data_storage_location()}/{DPU_SETTINGS['HDF5_QUEUE']}")
136
-
137
- # Name of the FITS file currently being written
138
- # (None if the N-FEE is not in full-image mode or in full-image pattern mode)
139
-
140
- self.fits_images_filename = None
141
- self.fits_cube_filename = None
142
-
143
- # Name of the HDF5 file currently being processed
144
-
145
- self.hdf5_filename = None
146
-
147
- # The last obsid that was/is being processed
148
-
149
- self.last_processed_obsid = None
150
-
151
- # Keep track of what was the N-FEE mode and what were the crucial parameters at the previous long pulse
152
- # (When we have checked whether a change has been detected, these values will be overwritten with the new ones)
153
-
154
- self.ccd_mode_config = None
155
- self.v_start = None
156
- self.v_end = None
157
- self.h_end = None
158
- self.ccd_readout_order = None
159
- # self.sensor_sel = None
160
- self.rows_final_dump = None
161
- self.setup = GlobalState.setup
162
- self.sensor_sel_enum = self.setup.camera.fee.sensor_sel.enum
163
- self.camera_name = self.setup.camera.ID
164
-
165
- self.config_slicing_num_cycles = 0 # Configured slicing parameter
166
- self.processed_num_cycles = 0 # HDF5 files with image data processed for current FITS file
167
-
168
- # self._quit_event = multiprocessing.Event()
169
-
170
- self.keep_processing_queue = True
171
-
172
- # The DPU monitoring should populate the queue in a separate thread
173
-
174
- self.dpu_monitoring_thread = threading.Thread(target=self.fill_queue)
175
- self.dpu_monitoring_thread.daemon = True
176
- self.dpu_monitoring_thread.start()
177
-
178
- # Processing the content of the queue should be done in a separate thread
179
-
180
- self.process_queue_thread = threading.Thread(target=self.process_queue)
181
- self.process_queue_thread.daemon = True
182
- self.process_queue_thread.start()
183
-
184
- def fill_queue(self):
185
- """
186
- The DPU monitoring fills the queue.
187
-
188
- Each time an HDF5 file with SpW packets is closed, the DPU monitoring puts the full path of this file on the
189
- queue.
190
- """
191
-
192
- dpu_monitoring_timeout = 30 # seconds
193
-
194
- with DPUMonitoring() as dpu_monitoring:
195
-
196
- start_time = time.time()
197
-
198
- while self.keep_processing_queue:
199
-
200
- try:
201
- hdf5_filename = dpu_monitoring.wait_for_hdf5_filename(retries=0, timeout=1.0)
202
- self.hdf5_filename_queue.put(hdf5_filename)
203
- start_time = time.time()
204
- except TimeoutError:
205
- if time.time() - start_time > dpu_monitoring_timeout:
206
- LOGGER.warning(f"DPU monitoring timeout, "
207
- f"no HDF5 filename received after {dpu_monitoring_timeout} seconds.")
208
- start_time = time.time()
209
-
210
- LOGGER.info(f"Broke out of monitoring loop {self.keep_processing_queue=}.")
211
-
212
- def run(self):
213
- """ Process the content of the queue.
214
-
215
- When there is a filename in the queue, take it from the queue:
216
-
217
- - If there is a change in crucial parameters, close the current FITS file (if any).
218
- - If there is a change in crucial parameter and the N-FEE is in full-image mode or in full-image pattern
219
- mode, or the N-FEE goes to full-image mode or full-image pattern mode, a new FITS file will be created.
220
- - The content of the HDF5 files will be extracted and passed to the FITS persistence layer as SpW packets.
221
- """
222
- zcontext = zmq.Context.instance()
223
-
224
- monitoring_socket = zcontext.socket(zmq.PUB)
225
- monitoring_socket.bind(bind_address(CTRL_SETTINGS.PROTOCOL, CTRL_SETTINGS.MONITORING_PORT,))
226
-
227
- endpoint = bind_address(CTRL_SETTINGS.PROTOCOL, CTRL_SETTINGS.COMMANDING_PORT)
228
- commander = zcontext.socket(zmq.REP)
229
- commander.bind(endpoint)
230
-
231
- poller = zmq.Poller()
232
- poller.register(commander, zmq.POLLIN)
233
-
234
- last_time = time_in_ms()
235
-
236
- try:
237
- while True:
238
-
239
- if _check_commander_status(commander, poller):
240
-
241
- self.keep_processing_queue = False
242
- break
243
-
244
- if time_in_ms() - last_time >= 1000:
245
- last_time = time_in_ms()
246
-
247
- monitoring_info = {"hdf5": self.hdf5_filename,
248
- "fits": self.fits_cube_filename or self.fits_images_filename,
249
- "last obsid (being) processed": self.last_processed_obsid}
250
- pickle_string = pickle.dumps(monitoring_info)
251
- monitoring_socket.send(pickle_string)
252
-
253
- except KeyboardInterrupt:
254
- click.echo("KeyboardInterrupt caught!")
255
-
256
- self.keep_processing_queue = False
257
-
258
- # Clean up all open sockets and running threads
259
-
260
- poller.unregister(commander)
261
-
262
- LOGGER.info("Shutting down FITS generation")
263
-
264
- commander.close(linger=0)
265
- LOGGER.info("Commander closed.")
266
-
267
- monitoring_socket.close(linger=0)
268
- LOGGER.info("Monitoring socket closed.")
269
-
270
- # Check if the Monitoring and Processing Threads are finished
271
- # Since the default timeout on the DPU Monitoring is set to 30s for some reason, this may take some time
272
-
273
- LOGGER.info("Joining worker threads, this may take some time...")
274
- self.dpu_monitoring_thread.join()
275
- self.process_queue_thread.join()
276
- LOGGER.info("Worker threads terminated.")
277
-
278
- del self.hdf5_filename_queue
279
- LOGGER.info("HDF5 filename Queue deleted.")
280
-
281
- del zcontext
282
-
283
- def __del__(self):
284
- egse.logger.close_all_zmq_handlers()
285
-
286
- zmq.Context.instance().term()
287
- logging.getLogger().info("ZMQ Context terminated.")
288
-
289
- def process_queue(self):
290
-
291
- location = get_data_storage_location()
292
-
293
- syn_obsid = None
294
-
295
- while self.keep_processing_queue:
296
-
297
- # There is an HDF5 file ready for processing
298
-
299
- if not self.hdf5_filename_queue.empty():
300
-
301
- try:
302
-
303
- # Get the first item in the queue (FIFO) and open it
304
-
305
- item = self.hdf5_filename_queue.get()
306
- # LOGGER.info(f"HFD5 filename Queue {item = }")
307
- self.hdf5_filename = hdf5_filename = item[0]
308
- self.hdf5_filename_queue.task_done()
309
-
310
- LOGGER.info(f"Processing file {hdf5_filename}")
311
-
312
- with h5.get_file(hdf5_filename, mode="r", locking=False) as hdf5_file:
313
-
314
- LOGGER.info(f"Opened file {hdf5_filename}")
315
-
316
- # Check whether there is data in the HDF5
317
- # (if there is no data in the HDF5 file, nothing has to be done and you can go to the next file)
318
-
319
- try:
320
-
321
- # Slicing
322
-
323
- try:
324
- slicing_num_cycles = hdf5_file["dpu"].attrs["slicing_num_cycles"]
325
- if slicing_num_cycles != self.config_slicing_num_cycles:
326
- LOGGER.debug(f"Slicing parameter changed: {self.config_slicing_num_cycles} "
327
- f"-> {slicing_num_cycles}")
328
- self.close_fits()
329
- self.config_slicing_num_cycles = slicing_num_cycles
330
- except KeyError:
331
- self.config_slicing_num_cycles = 0
332
- LOGGER.debug("No slicing")
333
-
334
- # Obsid
335
-
336
- try:
337
- obsid = hdf5_file["obsid"][()].decode()
338
- # LOGGER.info(f"OBSID from HDF5 file: {obsid = }")
339
- obsid = ObservationIdentifier.create_from_string(obsid, order=LAB_SETUP_TEST)
340
- # LOGGER.info(f"OBSID from string: {obsid = !s}")
341
-
342
- self.last_processed_obsid = obsid_from_storage(
343
- obsid, data_dir=location, camera_name=self.camera_name)
344
- except (KeyError, ValueError) as exc:
345
- # KeyError: when no obsid is included in the HDF5 file
346
- # ValueError: when the format of the obsid does not match LAB_SETUP_TEST
347
- # Uncomment the following line when you need more debug info
348
- # LOGGER.warning(f"Exception caught: {exc.__class__.__name__} - {exc}", exc_info=False)
349
- obsid = None
350
- except AttributeError as exc:
351
- # AttributeError: when is this raised ??
352
- LOGGER.warning(f"Exception caught: {exc.__class__.__name__} - {exc}", exc_info=False)
353
- LOGGER.error(f"No data present for obsid {str(obsid)} in the obs folder, terminating ...")
354
- self.keep_processing_queue = False
355
- break
356
-
357
- register_map = RegisterMap("N-FEE", memory_map=h5.get_data(hdf5_file["register"]))
358
-
359
- # Loop over all groups in the current HDF5 file and check whether the "data" group is
360
- # present
361
-
362
- has_data = False
363
-
364
- for group in h5.groups(hdf5_file):
365
-
366
- if "data" in group.keys():
367
-
368
- has_data = True
369
-
370
- n_fee_state = group["data"].attrs
371
-
372
- # Check whether there is a change in crucial parameters or in the N-FEE mode
373
-
374
- if self.crucial_parameter_change(n_fee_state):
375
-
376
- self.close_fits()
377
-
378
- if in_data_acquisition_mode(n_fee_state):
379
-
380
- if self.fits_images_filename is None:
381
-
382
- # Start writing to a new FITS file
383
-
384
- self.fits_images_filename = construct_images_filename(
385
- hdf5_filename, obsid, location=location, camera_name=self.camera_name
386
- )
387
-
388
- ccd_readout_order = convert_ccd_order_value(self.ccd_readout_order)
389
-
390
- prep = {
391
- "v_start": self.v_start,
392
- "v_end": self.v_end,
393
- "h_end": self.h_end,
394
- "rows_final_dump": self.rows_final_dump,
395
- "ccd_mode_config": self.ccd_mode_config,
396
- "ccd_readout_order": ccd_readout_order, # CCD numbering [1-4]
397
- "expected_last_packet_flags": get_expected_last_packet_flags(
398
- n_fee_state, self.sensor_sel_enum),
399
- "obsid": str(obsid),
400
- "cycle_time": get_cycle_time(n_fee_state, obsid=obsid),
401
- "cgse_version": get_cgse_version(obsid=obsid),
402
- "setup": self.setup,
403
- "register_map": register_map,
404
- }
405
-
406
- persistence = FITS(self.fits_images_filename, prep)
407
- persistence.open()
408
-
409
- # See https://github.com/IvS-KULeuven/plato-common-egse/issues/901
410
- # timecode = group["timecode"]
411
- # spw_packet = SpaceWirePacket.create_packet(h5.get_data(timecode))
412
-
413
- timestamp = group["timecode"].attrs["timestamp"]
414
- persistence.create({"Timestamp": timestamp})
415
-
416
- data = group["data"]
417
- sorted_datasets = natsort.natsorted(data.items(), key=lambda x: x[0])
418
-
419
- persistence.expected_last_packet_flags = get_expected_last_packet_flags(
420
- n_fee_state, self.sensor_sel_enum)
421
-
422
- for identifier, dataset in sorted_datasets:
423
-
424
- spw_packet = SpaceWirePacket.create_packet(h5.get_data(dataset))
425
- persistence.create({f"SpW packet {identifier}": spw_packet})
426
-
427
- if has_data:
428
- self.processed_num_cycles += 1
429
- syn_obsid = obsid
430
-
431
- if self.config_slicing_num_cycles != 0 and \
432
- self.processed_num_cycles == self.config_slicing_num_cycles:
433
- self.close_fits()
434
-
435
- else:
436
-
437
- self.close_fits()
438
- self.clear_crucial_parameters()
439
-
440
- # When the previous HDF5 file still pertained to an observation and the current one doesn't,
441
- # it means that the observation has just finished and all FITS files have been generated. It
442
- # is only at this point that the synoptics can be included in the FITS headers.
443
-
444
- if syn_obsid is not None and obsid is None:
445
- LOGGER.info(f"Adding synoptics for {syn_obsid}")
446
- add_synoptics(syn_obsid, fits_dir=location, syn_dir=location)
447
- syn_obsid = None
448
- except KeyError:
449
- LOGGER.debug("KeyError occurred when accessing data in all groups of the HDF5 file.")
450
-
451
- except IndexError:
452
- LOGGER.debug("Queue contained an emtpy entry")
453
- except RuntimeError as exc:
454
- LOGGER.debug(f"Unable to open HDF5 file: {exc}")
455
-
456
- def clear_crucial_parameters(self):
457
- """ Clear the crucial parameters."""
458
-
459
- self.v_start = None
460
- self.v_end = None
461
- self.h_end = None
462
- self.rows_final_dump = None
463
- self.ccd_readout_order = None
464
- self.ccd_mode_config = None
465
-
466
- def close_fits(self):
467
-
468
- if self.fits_images_filename is not None:
469
-
470
- self.fits_cube_filename = construct_cube_filename(self.fits_images_filename)
471
- convert_to_cubes(self.fits_images_filename)
472
- self.fits_cube_filename = None
473
-
474
- # Stop writing to the current FITS file
475
-
476
- self.fits_images_filename = None
477
-
478
- # Reset the number of HDF5 files with image data processed for current FITS file
479
-
480
- self.processed_num_cycles = 0
481
-
482
- def crucial_parameter_change(self, n_fee_state: AttributeManager):
483
- """ Check for a change in crucial parameters.
484
-
485
- Crucial parameters are:
486
-
487
- - ccd_mode_config: readout mode;
488
- - v_start (int) and v_end(int): index of the first and the last row being transmitted;
489
- - h_end (int): index of the last serial readout of the readout register;
490
- - ccd_readout_order: CCDs that will be read out;
491
- # - sensor_sel: which side(s) of the CCD(s) that will be read out;
492
-
493
- Args:
494
- - n_fee_stage: N-FEE stae parameters.
495
-
496
- Returns: True if a change in crucial parameters has been detected; False otherwise.
497
- """
498
-
499
- ccd_mode_config = n_fee_state["ccd_mode_config"]
500
- v_start = n_fee_state["v_start"]
501
- v_end = n_fee_state["v_end"]
502
- h_end = n_fee_state["h_end"]
503
- ccd_readout_order = n_fee_state["ccd_readout_order"]
504
- rows_final_dump = n_fee_state["n_final_dump"]
505
-
506
- crucial_parameter_change = False
507
-
508
- if v_start != self.v_start:
509
-
510
- LOGGER.info(f"Change in v_start: {self.v_start} -> {v_start}")
511
-
512
- self.v_start = v_start
513
- crucial_parameter_change = True
514
-
515
- if v_end != self.v_end:
516
-
517
- LOGGER.info(f"Change in v_end: {self.v_end} -> {v_end}")
518
-
519
- self.v_end = v_end
520
- crucial_parameter_change = True
521
-
522
- if h_end != self.h_end:
523
-
524
- LOGGER.info(f"Change in h_end: {self.h_end} -> {h_end}")
525
-
526
- self.h_end = h_end
527
- crucial_parameter_change = True
528
-
529
- if rows_final_dump != self.rows_final_dump:
530
-
531
- LOGGER.info(f"Change in rows_final_dump: {self.rows_final_dump} -> {rows_final_dump}")
532
-
533
- self.rows_final_dump = rows_final_dump
534
- crucial_parameter_change = True
535
-
536
- if ccd_readout_order != self.ccd_readout_order:
537
-
538
- LOGGER.info(f"Change in ccd_readout_order: {self.ccd_readout_order} -> {ccd_readout_order}")
539
-
540
- self.ccd_readout_order = ccd_readout_order
541
- crucial_parameter_change = True
542
-
543
- if ccd_mode_config != self.ccd_mode_config:
544
-
545
- LOGGER.info(f"Change in ccd_mode_config: {self.ccd_mode_config} -> {ccd_mode_config}")
546
-
547
- self.ccd_mode_config = ccd_mode_config
548
- crucial_parameter_change = True
549
-
550
- return crucial_parameter_change
551
-
552
-
553
- def convert_to_cubes(filename):
554
- """ Conversion of level-1 FITS files to level-2 FITS files.
555
-
556
- After the conversion, the flat-structure FITS file is removed.
557
-
558
- Args:
559
- - filename: Full path of the level-1 FITS file.
560
- """
561
-
562
- cube_filename = construct_cube_filename(filename)
563
- LOGGER.info(f"Converting to {cube_filename}")
564
- fee_side = GlobalState.setup.camera.fee.ccd_sides.enum
565
-
566
- with fits.open(filename) as level1:
567
-
568
- primary_header = level1["PRIMARY"].header
569
-
570
- selected_ccds = np.unique(primary_header["CCD_READOUT_ORDER"][1:-1].split(", ")) # str
571
- side_is_present = {ccd: {fee_side.E: 0, fee_side.F: 0} for ccd in selected_ccds}
572
-
573
- has_serial_overscan = primary_header["H_END"] >= \
574
- CCD_SETTINGS.LENGTH_SERIAL_PRESCAN + CCD_SETTINGS.NUM_COLUMNS // 2
575
- has_parallel_overscan = primary_header["V_END"] >= CCD_SETTINGS.NUM_ROWS
576
-
577
- # We are going to calculate the relative time since the very first exposure in the FITS file. We don't know
578
- # here which CCD side of which CCD came in first, so we determine the start time here.
579
-
580
- start_time = time_since_epoch_1958(format_datetime(precision=6, width=9)) # Now (data will certainly be older)
581
- date_obs = None
582
-
583
- for ccd_number in selected_ccds:
584
-
585
- for ccd_side in fee_side:
586
-
587
- try:
588
-
589
- finetime = level1[f"IMAGE_{ccd_number}_{ccd_side.name[0]}", 0].header["FINETIME"]
590
-
591
- if finetime < start_time:
592
-
593
- start_time = finetime
594
- date_obs = level1[f"IMAGE_{ccd_number}_{ccd_side.name[0]}", 0].header["DATE-OBS"]
595
-
596
- side_is_present[ccd_number][ccd_side] = True
597
-
598
- except KeyError:
599
-
600
- side_is_present[ccd_number][ccd_side] = False
601
-
602
- primary_hdu = fits.PrimaryHDU()
603
- primary_header["DATE-OBS"] = (date_obs, "Timestamp for 1st frame",)
604
- primary_header["FINETIME"] = (start_time, "Finetime representation of DATE-OBS",)
605
- primary_header["LEVEL"] = 2 # Cube structure
606
- primary_hdu.header = primary_header
607
- primary_hdu.writeto(cube_filename)
608
-
609
- for ccd_number in selected_ccds:
610
-
611
- for ccd_side in fee_side:
612
-
613
- if side_is_present[ccd_number][ccd_side]:
614
-
615
- # Image
616
-
617
- images = []
618
- time_axis = np.array([])
619
-
620
- exposure = 0
621
-
622
- while True:
623
-
624
- try:
625
-
626
- slice = level1[f"IMAGE_{ccd_number}_{ccd_side.name[0]}", exposure]
627
-
628
- time = time_since_epoch_1958(slice.header["DATE-OBS"])
629
- time_axis = np.append(time_axis, time)
630
-
631
- images.append(slice.data)
632
-
633
- exposure += 1
634
-
635
- except KeyError:
636
-
637
- break
638
-
639
- image_cube = np.stack(images)
640
- del images
641
-
642
- time_axis -= start_time
643
- time_column = fits.Column("TIME", format="F", array=time_axis)
644
- time_table = fits.BinTableHDU.from_columns([time_column])
645
- time_table.header["EXTNAME"] = f"WCS-TAB_{ccd_number}_{ccd_side.name[0]}"
646
-
647
- fits.append(cube_filename, time_table.data, time_table.header)
648
- fits.append(filename, time_table.data, time_table.header)
649
-
650
- image_cube_header = level1[f"IMAGE_{ccd_number}_{ccd_side.name[0]}", 0].header
651
- image_cube_header["NAXIS"] = (3, f"Dimensionality of the image cube ({ccd_side.name[0]}-side)",)
652
- image_cube_header["NAXIS3"] = exposure
653
- image_cube_header["CRPIX3"] = 1
654
- image_cube_header["CRVAL3"] = start_time
655
- image_cube_header["CTYPE3"] = "TIMETAB"
656
- image_cube_header["CUNIT3"] = "s"
657
- image_cube_header["PS3_0"] = f"WCS-TAB_{ccd_number}_{ccd_side.name[0]}"
658
- image_cube_header["PS3_1"] = "TIME"
659
-
660
- fits.append(cube_filename, image_cube, image_cube_header)
661
-
662
- # Serial pre-scan
663
-
664
- serial_prescans = []
665
-
666
- exposure = 0
667
-
668
- while True:
669
-
670
- try:
671
-
672
- serial_prescans.append(level1[f"SPRE_{ccd_number}_{ccd_side.name[0]}", exposure].data)
673
- exposure += 1
674
-
675
- except KeyError:
676
-
677
- break
678
-
679
- serial_prescan_cube = np.stack(serial_prescans)
680
- del serial_prescans
681
-
682
- serial_prescan_cube_header = level1[f"SPRE_{ccd_number}_{ccd_side.name[0]}", 0].header
683
- serial_prescan_cube_header["NAXIS"] = (3, f"Dimensionality of the serial pre-scan cube ({ccd_side.name[0]}-side)",)
684
- serial_prescan_cube_header["NAXIS3"] = exposure
685
- serial_prescan_cube_header["CRPIX3"] = 1
686
- serial_prescan_cube_header["CRVAL3"] = start_time
687
- serial_prescan_cube_header["CTYPE3"] = "TIMETAB"
688
- serial_prescan_cube_header["CUNIT3"] = "s"
689
- serial_prescan_cube_header["PS3_0"] = f"WCS-TAB_{ccd_number}_{ccd_side.name[0]}"
690
- serial_prescan_cube_header["PS3_1"] = "TIME"
691
-
692
- fits.append(cube_filename, serial_prescan_cube, serial_prescan_cube_header)
693
-
694
- # Serial over-scan
695
-
696
- if has_serial_overscan:
697
-
698
- serial_overscans = []
699
- exposure = 0
700
-
701
- while True:
702
-
703
- try:
704
-
705
- serial_overscans.append(level1[f"SOVER_{ccd_number}_{ccd_side.name[0]}", exposure].data)
706
- exposure += 1
707
-
708
- except KeyError:
709
-
710
- break
711
-
712
- serial_overscan_cube = np.stack(serial_overscans)
713
- del serial_overscans
714
-
715
- serial_overscan_cube_header = level1[f"SOVER_{ccd_number}_{ccd_side.name[0]}", 0].header
716
- serial_overscan_cube_header["NAXIS"] = (3, f"Dimensionality of the serial over-scan cube ({ccd_side.name[0]}-side)",)
717
- serial_overscan_cube_header["NAXIS3"] = exposure
718
- serial_overscan_cube_header["CRPIX3"] = 1
719
- serial_overscan_cube_header["CRVAL3"] = start_time
720
- serial_overscan_cube_header["CTYPE3"] = "TIMETAB"
721
- serial_overscan_cube_header["CUNIT3"] = "s"
722
- serial_overscan_cube_header["PS3_0"] = f"WCS-TAB_{ccd_number}_{ccd_side.name[0]}"
723
- serial_overscan_cube_header["PS3_1"] = "TIME"
724
-
725
- fits.append(cube_filename, serial_overscan_cube, serial_overscan_cube_header)
726
-
727
- # Parallel over-scan
728
-
729
- if has_parallel_overscan:
730
-
731
- parallel_overscans = []
732
- exposure = 0
733
-
734
- while True:
735
-
736
- try:
737
-
738
- parallel_overscans.append(level1[f"POVER_{ccd_number}_{ccd_side.name[0]}", exposure].data)
739
- exposure += 1
740
-
741
- except KeyError:
742
- break
743
-
744
- parallel_overscan_cube = np.stack(parallel_overscans)
745
- del parallel_overscans
746
-
747
- parallel_overscan_cube_header = level1[f"POVER_{ccd_number}_{ccd_side.name[0]}", 0].header
748
- parallel_overscan_cube_header["NAXIS"] = (3, f"Dimensionality of the parallel over-scan cube ({ccd_side.name[0]}-side)",)
749
- parallel_overscan_cube_header["NAXIS3"] = exposure
750
- parallel_overscan_cube_header["CRPIX3"] = 1
751
- parallel_overscan_cube_header["CRVAL3"] = start_time
752
- parallel_overscan_cube_header["CTYPE3"] = "TIMETAB"
753
- parallel_overscan_cube_header["CUNIT3"] = "s"
754
- parallel_overscan_cube_header["PS3_0"] = f"WCS-TAB_{ccd_number}_{ccd_side.name[0]}"
755
- parallel_overscan_cube_header["PS3_1"] = "TIME"
756
-
757
- fits.append(
758
- cube_filename, parallel_overscan_cube, parallel_overscan_cube_header
759
- )
760
-
761
- # Remove the level-1 FITS file
762
-
763
- LOGGER.info(f"Removing flat-structure FITS file {filename}")
764
- os.remove(filename)
765
-
766
-
767
- def is_incomplete(hdf5_file: File):
768
- """ Check whether the given HDF5 file is incomplete.
769
-
770
- The HDF5 files are created at the start of a cycle. The register map and (if applicable) the format version are
771
- stored at this point. If an observation starts "half way" a cycle, the register map will not be present.
772
-
773
- Args:
774
- - hdf5_file: HDF5 file.
775
-
776
- Returns: True if the given HDF5 file is incomplete (i.e. if the register map is not stored); False otherwise.
777
- """
778
-
779
- return "register" not in hdf5_file
780
-
781
-
782
- def is_corrupt(hdf5_file: File):
783
- """ Check whether the given HDF5 file is corrupt.
784
-
785
- Args:
786
- - hdf5_file: HDF5 file.
787
-
788
- Returns: True if an error flag is set in one of the groups; False otherwise.
789
- """
790
-
791
- for count in range(4):
792
-
793
- if f"/{count}/hk" in hdf5_file:
794
-
795
- hk_packet = SpaceWirePacket.create_packet(hdf5_file[f"/{count}/hk"][...])
796
- error_flags = HousekeepingData(hk_packet.data)['error_flags']
797
-
798
- if error_flags:
799
- return True
800
-
801
- return False
802
-
803
-
804
- def any_crucial_parameters_changed(prep: dict, n_fee_state: Mapping):
805
- """ Check whether there is a change in crucial parameters.
806
-
807
- Return True if any of the following parameters changed with respect to the revious check: v_start, v_end, h_end,
808
- rows_final_dump, ccd_mode_config, and ccd_readout_order.
809
-
810
- Args:
811
- - prep (dict): Current values for the crucial parameters.
812
- - n_fee_state: N-FEE state parameters or register map.
813
-
814
- Returns: True if any of the values have changed, False otherwise.
815
- """
816
-
817
- v_start = n_fee_state['v_start']
818
- v_end = n_fee_state['v_end']
819
- h_end = n_fee_state['h_end']
820
- rows_final_dump = n_fee_state['n_final_dump']
821
- ccd_mode_config = n_fee_state['ccd_mode_config']
822
- ccd_readout_order = n_fee_state['ccd_readout_order']
823
- ccd_readout_order = convert_ccd_order_value(ccd_readout_order)
824
-
825
- for x, y in dict(
826
- v_start=v_start, v_end=v_end, h_end=h_end, rows_final_dump=rows_final_dump,
827
- ccd_mode_config=ccd_mode_config, ccd_readout_order=ccd_readout_order,
828
- ).items():
829
- if prep.get(x) != y:
830
- LOGGER.debug(f"{x=}, {prep.get(x)=}, {y=}")
831
- return True
832
-
833
- return False
834
-
835
-
836
- def in_data_acquisition_mode(n_fee_state: Mapping):
837
- """ Check whether the N-FEE is in data acquisition mode.
838
-
839
- Args:
840
- - n_fee_state: N-FEE state parameters or register map.
841
-
842
- Returns: True if the N-FEE is in imaging mode (full-image (pattern) mode, windowing (pattern) mode, or
843
- parallel/serial trap pumping mode (1/2)) and the digitised data is transferred to the N-DPU.
844
- """
845
-
846
- ccd_mode_config = n_fee_state["ccd_mode_config"]
847
- digitise_en = n_fee_state["digitise_en"]
848
-
849
- return ccd_mode_config in [n_fee_mode.FULL_IMAGE_MODE, n_fee_mode.FULL_IMAGE_PATTERN_MODE,
850
- n_fee_mode.PARALLEL_TRAP_PUMPING_1_MODE, n_fee_mode.PARALLEL_TRAP_PUMPING_2_MODE,
851
- n_fee_mode.SERIAL_TRAP_PUMPING_1_MODE, n_fee_mode.SERIAL_TRAP_PUMPING_2_MODE,
852
- n_fee_mode.WINDOWING_PATTERN_MODE, n_fee_mode.WINDOWING_MODE] and digitise_en
853
-
854
-
855
- def construct_cube_filename(fits_filename: str) -> str:
856
- """ Construct the filename for the level-2 FITS file.
857
-
858
- The level-2 FITS file will have the data arranged in cubes, rather than in a flat structure.
859
-
860
- Args:
861
- - fits_filename: Filename for the level-1 FITS file. The level-1 FITS files has the data arranged in a flat
862
- structure.
863
-
864
- Returns: Filename for the level-2 FITS file.
865
- """
866
-
867
- LOGGER.info(f"Construct cube filename from {fits_filename}")
868
-
869
- # LOGGER.info(f"Images: {'images' in fits_filename}")
870
-
871
- if "images" in fits_filename:
872
- return fits_filename.replace("images", "cube")
873
-
874
- else:
875
- prefix, suffix = str(fits_filename).rsplit('_', 1)
876
- return f"{prefix}_cube_{suffix}"
877
-
878
-
879
- def construct_images_filename(hdf5_filename: PosixPath, obsid: ObservationIdentifier = None,
880
- location=None, camera_name: str = None):
881
- """ Construct the filename for the level-1 FITS file.
882
-
883
-
884
- The level-1 FITS files has the data arranged in a flat structure.
885
-
886
- Args:
887
- - identifier (str): Identifier for the source of the data, this string is usually what is sent in the `origin`
888
- of the item dictionary.
889
- - ext (str): File extension: this depends on the persistence class that is used for storing the data.
890
- - obsid (ObservationIdentifier): Unique identifier for the observation (LAB_SETUP_TEST).
891
- - use_counter: Indicates whether or not a counter should be included in the filename.
892
- - location: Folder (with /daily and /obs sub-folders) in which the FITS files should be written (in a
893
- dedicated directory in the /obs folder). If not specified, the `PLATO_DATA_STORAGE_LOCATION`
894
- environment variable will be used to construct the location..
895
-
896
- Returns: Full path to the file as a `PurePath`.
897
- """
898
-
899
- location = location or get_data_storage_location()
900
-
901
- if obsid is None:
902
-
903
- timestamp, site_id, _, _, counter = str.split(str.split(str(hdf5_filename), ".")[0], "_")
904
- fits_filename = f"{timestamp}_{site_id}_{N_FEE_SETTINGS.ORIGIN_CCD_DATA}_{counter}_images.{FITS.extension}"
905
-
906
- location += "/daily/"
907
-
908
- return str(Path(location) / timestamp / fits_filename)
909
-
910
- else:
911
-
912
- # Make sure that the FITS file ends up in the correct sub-folder
913
- # - oldest data: TEST_LAB_SETUP
914
- # - more recent data: TEST_LAB
915
-
916
- obsid = obsid_from_storage(obsid, data_dir=location, camera_name=camera_name)
917
-
918
- timestamp = str.split(str(hdf5_filename).split("/")[-1], "_")[0]
919
- location += "/obs/"
920
-
921
- if not os.path.isdir(f"{location}/{obsid}"):
922
- os.makedirs(f"{location}/{obsid}")
923
- location += f"{obsid}/"
924
-
925
- # Determine the filename
926
-
927
- pattern = f"{obsid}_{N_FEE_SETTINGS.ORIGIN_CCD_DATA}_*_{timestamp}_cube.{FITS.extension}"
928
- counter = get_fits_counter(location, pattern)
929
-
930
- fits_filename = f"{obsid}_{N_FEE_SETTINGS.ORIGIN_CCD_DATA}_{counter:05d}_{timestamp}_images.{FITS.extension}"
931
-
932
- return str(Path(location) / fits_filename)
933
-
934
-
935
- def get_fits_counter(location, pattern):
936
- """ Determine counter for a new FITS file at the given location and with the given pattern.
937
-
938
- Args:
939
- - location: Location where the FITS file should be stored.
940
- - pattern: Pattern for the filename.
941
-
942
- Returns: Value of the next counter; 1 if no previous files were found or if an error occurred.
943
- """
944
-
945
- LOGGER.debug(f"Pattern: {pattern=}")
946
- LOGGER.debug(f"Location: {location=}")
947
-
948
- files = sorted(find_files(pattern=pattern, root=location))
949
-
950
- # No filenames found showing the given pattern -> start counting at 1
951
-
952
- LOGGER.debug(f"Number of matches: {len(files)=}")
953
-
954
- if len(files) == 0:
955
- return 1
956
-
957
- last_file = files[-1]
958
-
959
- counter = last_file.name.split("_")
960
-
961
- LOGGER.debug(f"{counter = }")
962
-
963
- try:
964
-
965
- # Observation files have the following pattern:
966
- # <test ID>_<lab ID>_N-FEE_CCD_<counter>_<day YYYYmmdd>_cube.fits
967
-
968
- counter = int(counter[-3]) + 1
969
- LOGGER.debug(f"{counter = }")
970
- return counter
971
-
972
- except ValueError:
973
-
974
- LOGGER.warning("ValueError", exc_info=True)
975
- return 1
976
-
977
-
978
- def create_fits_from_hdf5(files: List, location: str = None, setup: Setup = None):
979
- """ Off-line generation of FITS files from HDF5 files with SpW packets.
980
-
981
- When there is a change in crucial parameters, the current FITS file (if any) will be closed and a new one will be
982
- created as soon and HDF5 file with data content is encountered (when the N-FEE is in full-image or full-image
983
- pattern mode):
984
-
985
- - If there is a change in crucial parameters, close the current FITS file (if any).
986
- - If there is a change in crucial parameter and the N-FEE is in full-image mode or in full-image pattern
987
- mode, or the N-FEE goes to full-image mode or full-image pattern mode, a new FITS file will be created.
988
- - The content of the HDF5 files will be extracted and passed to the FITS persistence layer as SpW packets.
989
-
990
- In the older HDF5 files, only the register map is stored, which does not always reflect the actual N-FEE state.
991
- This is solved in the later version of the HDF5 files (format version >= 2.0). In these files, the current N-FEE
992
- state is stored in each of the data groups.
993
-
994
- It's possible that the first file in the list is incomplete, because it was already created by the time the
995
- current observation started. That file
996
-
997
- Args:
998
- - files: List of filenames of the HDF5 files to use to create the FITS file.
999
- - location: Folder (with /daily and /obs sub-folders) in which the FITS files should be written (in a
1000
- dedicated directory in the /obs folder). If not specified, the `PLATO_DATA_STORAGE_LOCATION`
1001
- environment variable will be used to construct the location.
1002
- - setup: Setup to retrieve information from.
1003
- """
1004
-
1005
- location = location or get_data_storage_location()
1006
-
1007
- # Loop over the filenames. When you encounter an HDF5 file, check its format version.
1008
-
1009
- for filename in files:
1010
-
1011
- filename = Path(filename)
1012
-
1013
- if filename.suffix == f".{HDF5.extension}":
1014
-
1015
- try:
1016
-
1017
- with h5.get_file(filename, mode="r", locking=False) as hdf5_file:
1018
-
1019
- # It happens that some of the HDF5 files are incomplete. These should not be considered to determine
1020
- # whether the register map (original format version) or the N-FEE state (format version >= 2.0) to
1021
- # determine the state of the crucial parameters.
1022
-
1023
- if is_incomplete(hdf5_file): # or is_corrupt(hdf5_file):
1024
-
1025
- files = files[1:]
1026
-
1027
- else:
1028
-
1029
- # The N-FEE state is stored in the data groups of the HDF5 files (format version >= 2.0)
1030
-
1031
- if "versions" in hdf5_file:
1032
-
1033
- version_attrs = hdf5_file["versions"]["format_version"].attrs
1034
-
1035
- if version_attrs["major_version"] == 2:
1036
- create_fits_from_hdf5_nfee_state(files, location=location, setup=setup)
1037
- break
1038
-
1039
- else:
1040
-
1041
- version = f"{version_attrs['major_version']}.{version_attrs['minor_version']}"
1042
-
1043
- raise AttributeError(f"HDF5 file format version {version} cannot be handled by the FITS generator")
1044
-
1045
- # The register map is stored (globally) in the HDF5 files
1046
-
1047
- else:
1048
- create_fits_from_hdf5_register_map(files, location=location, setup=setup)
1049
- break
1050
-
1051
- except RuntimeError as exc:
1052
- LOGGER.debug(f"Unable to open HDF5 file: {exc}")
1053
-
1054
-
1055
- def create_fits_from_hdf5_register_map(files: List, location: str = None, setup: Setup = None):
1056
- """ Off-line generation of FITS files from HDF5 files with SpW packets.
1057
-
1058
- When there is a change in crucial parameters, the current FITS file (if any) will be closed and a new one will be
1059
- created as soon and HDF5 file with data content is encountered (when the N-FEE is in full-image or full-image
1060
- pattern mode):
1061
-
1062
- - If there is a change in crucial parameters, close the current FITS file (if any).
1063
- - If there is a change in crucial parameter and the N-FEE is in full-image mode or in full-image pattern
1064
- mode, or the N-FEE goes to full-image mode or full-image pattern mode, a new FITS file will be created.
1065
- - The content of the HDF5 files will be extracted and passed to the FITS persistence layer as SpW packets.
1066
-
1067
- In the given HDF5 files, only the register map is stored, which does not always reflect the actual N-FEE state. As
1068
- a result not all data may be present in the generated FITS files (e.g. because the register map says the N-FEE is
1069
- already in dump mode) or the data might be split over more FITS files than expected (e.g. because the v_start and
1070
- v_end parameters are already / not yet changed in the register map but not in the N-FEE state).
1071
-
1072
- Note that this problem is solved in the later version of the HDF5 files (format version >= 2.0).
1073
-
1074
- Args:
1075
- - files: List of filenames of the HDF5 files to use to create the FITS file.
1076
- - location: Folder (with /daily and /obs sub-folders) in which the FITS files should be written (in a
1077
- dedicated directory in the /obs folder). If not specified, the `PLATO_DATA_STORAGE_LOCATION`
1078
- environment variable will be used to construct the location.
1079
- - setup: Setup to retrieve information from.
1080
- """
1081
- setup = setup or GlobalState.setup
1082
-
1083
- location = location or get_data_storage_location()
1084
- hdf5_file_root = Path(files[0]).parent.parent.parent
1085
- sensor_sel_enum = setup.camera.fee.sensor_sel.enum
1086
- camera_name = setup.camera.ID
1087
-
1088
- prep = {}
1089
- fits_filename = None
1090
-
1091
- for filename in files:
1092
-
1093
- filename = Path(filename)
1094
-
1095
- if filename.suffix == '.hdf5':
1096
-
1097
- print(f"Processing {filename=!s}...")
1098
-
1099
- try:
1100
-
1101
- with h5.get_file(filename, mode="r", locking=False) as hdf5_file:
1102
-
1103
- # if is_corrupt(hdf5_file):
1104
- # LOGGER.warning(f"Skipping {filename} (corrupt)")
1105
- #
1106
- # else:
1107
-
1108
- if 'register' not in hdf5_file:
1109
- LOGGER.warning(f"No register map found for {filename=!s}, continue with next file..")
1110
- continue # next HDF5 file
1111
-
1112
- register_map = RegisterMap("N-FEE", memory_map=h5.get_data(hdf5_file["register"]))
1113
-
1114
- has_data = False
1115
-
1116
- for group in h5.groups(hdf5_file):
1117
-
1118
- if "data" in group.keys():
1119
-
1120
- has_data = True
1121
-
1122
- # Should a new FITS file be created?
1123
-
1124
- if any_crucial_parameters_changed(prep, register_map):
1125
-
1126
- if fits_filename:
1127
-
1128
- LOGGER.info(f"Creating a FITS CUBE file ...")
1129
- convert_to_cubes(fits_filename)
1130
- fits_filename = None
1131
-
1132
- if in_data_acquisition_mode(register_map):
1133
-
1134
- if fits_filename is None:
1135
-
1136
- LOGGER.info(f"A new FITS file will be created...")
1137
-
1138
- # Start writing to a new FITS file
1139
- # Collect all information to sent to the FITS layer
1140
-
1141
- if "obsid" in hdf5_file:
1142
- obsid = hdf5_file["obsid"][()].decode()
1143
- obsid = ObservationIdentifier.create_from_string(obsid, order=LAB_SETUP_TEST)
1144
- else:
1145
- obsid = None
1146
-
1147
- fits_filename = construct_images_filename(
1148
- filename, obsid, location=location, camera_name=camera_name
1149
- )
1150
- LOGGER.info(f"{fits_filename = !s}")
1151
-
1152
- ccd_readout_order = register_map['ccd_readout_order']
1153
- ccd_readout_order = convert_ccd_order_value(ccd_readout_order)
1154
-
1155
- prep = {
1156
- "v_start": register_map['v_start'],
1157
- "v_end": register_map['v_end'],
1158
- "h_end": register_map['h_end'],
1159
- "rows_final_dump": register_map['n_final_dump'],
1160
- "ccd_mode_config": register_map['ccd_mode_config'],
1161
- "ccd_readout_order": ccd_readout_order, # CCD numbering [1-4]
1162
- "expected_last_packet_flags": get_expected_last_packet_flags(register_map,
1163
- sensor_sel_enum),
1164
- "obsid": str(obsid) if obsid is not None else None,
1165
- "cycle_time": get_cycle_time(register_map, obsid=obsid, data_dir=hdf5_file_root),
1166
- "cgse_version": get_cgse_version(obsid=obsid, data_dir=hdf5_file_root),
1167
- "setup": setup,
1168
- "register_map": register_map
1169
- }
1170
-
1171
- persistence = FITS(str(fits_filename), prep)
1172
- persistence.open()
1173
-
1174
- # See https://github.com/IvS-KULeuven/plato-common-egse/issues/901
1175
- # timecode = group["timecode"]
1176
- # spw_packet = SpaceWirePacket.create_packet(h5.get_data(timecode))
1177
-
1178
- timestamp = group["timecode"].attrs["timestamp"]
1179
- persistence.create({"Timestamp": timestamp})
1180
-
1181
- data = group["data"]
1182
- sorted_datasets = natsort.natsorted(data.items(), key=lambda x: x[0])
1183
-
1184
- persistence.expected_last_packet_flags = get_expected_last_packet_flags(register_map,
1185
- sensor_sel_enum)
1186
-
1187
- for identifier, dataset in sorted_datasets:
1188
-
1189
- spw_packet = SpaceWirePacket.create_packet(h5.get_data(dataset))
1190
- # LOGGER.debug(f"{spw_packet.type = !s}")
1191
- persistence.create({f"SpW packet {identifier}": spw_packet})
1192
-
1193
- if not has_data:
1194
-
1195
- if fits_filename:
1196
-
1197
- LOGGER.info(f"Creating a FITS CUBE file ...")
1198
- convert_to_cubes(fits_filename)
1199
- fits_filename = None
1200
-
1201
- prep = clear_crucial_parameters(prep)
1202
-
1203
- except RuntimeError as exc:
1204
- LOGGER.debug(f"Unable to open HDF5 file: {exc}")
1205
- else:
1206
- print(f"Skipping {filename=}")
1207
-
1208
- try:
1209
- if fits_filename:
1210
- LOGGER.info(f"Creating a FITS CUBE file ...")
1211
- convert_to_cubes(fits_filename)
1212
- except OSError:
1213
- # The last file in the list still contained data, so we reached the end of the list without creating a cube
1214
- # FITS file yet
1215
- pass
1216
-
1217
-
1218
- def create_fits_from_hdf5_nfee_state(files: List, location: str = None, setup: Setup = None):
1219
- """ Off-line generation of FITS files from HDF5 files with SpW packets.
1220
-
1221
- When there is a change in crucial parameters, the current FITS file (if any) will be closed and a new one will be
1222
- created as soon and HDF5 file with data content is encountered (when the N-FEE is in full-image or full-image
1223
- pattern mode):
1224
-
1225
- - If there is a change in crucial parameters, close the current FITS file (if any).
1226
- - If there is a change in crucial parameter and the N-FEE is in full-image mode or in full-image pattern
1227
- mode, or the N-FEE goes to full-image mode or full-image pattern mode, a new FITS file will be created.
1228
- - The content of the HDF5 files will be extracted and passed to the FITS persistence layer as SpW packets.
1229
-
1230
- In the given HDF5 files, the N-FEE state is saved in all data groups, reflecting the actual N-FEE state (i.e.
1231
- solving the problem of the mismatch between the register map and the N-FEE state).
1232
-
1233
- Args:
1234
- - files: List of filenames of the HDF5 files to use to create the FITS file.
1235
- - location: Folder (with /daily and /obs sub-folders) in which the FITS files should be written (in a
1236
- dedicated directory in the /obs folder). If not specified, the `PLATO_DATA_STORAGE_LOCATION`
1237
- environment variable will be used to construct the location.
1238
- - setup: Setup to retrieve information from.
1239
- """
1240
- setup = setup or GlobalState.setup
1241
-
1242
- location = location or get_data_storage_location()
1243
- hdf5_file_root = Path(files[0]).parent.parent.parent
1244
- sensor_sel_enum = setup.camera.fee.sensor_sel.enum
1245
- camera_name = setup.camera.ID
1246
-
1247
- config_slicing_num_cycles = 0 # Configured slicing parameter
1248
- processed_num_cycles = 0 # HDF5 files with image data processed for current FITS file
1249
-
1250
- prep = {}
1251
- fits_filename = None
1252
-
1253
- for filename in files:
1254
-
1255
- filename = Path(filename)
1256
-
1257
- if filename.suffix == '.hdf5':
1258
-
1259
- print(f"Processing {filename=!s}...")
1260
-
1261
- try:
1262
-
1263
- with h5.get_file(filename, mode="r", locking=False) as hdf5_file:
1264
-
1265
- # Slicing
1266
-
1267
- try:
1268
- slicing_num_cycles = hdf5_file["dpu"].attrs["slicing_num_cycles"]
1269
- if slicing_num_cycles != config_slicing_num_cycles:
1270
- if fits_filename:
1271
- convert_to_cubes(fits_filename)
1272
- fits_filename = None
1273
- processed_num_cycles = 0
1274
- config_slicing_num_cycles = slicing_num_cycles
1275
- except KeyError:
1276
- config_slicing_num_cycles = 0
1277
-
1278
- register_map = RegisterMap("N-FEE", memory_map=h5.get_data(hdf5_file["register"]))
1279
-
1280
- # if is_corrupt(hdf5_file):
1281
- # LOGGER.warning(f"Skipping {filename} (corrupt)")
1282
- #
1283
- # else:
1284
-
1285
- has_data = False
1286
-
1287
- for group in h5.groups(hdf5_file):
1288
-
1289
- if "data" in group.keys():
1290
-
1291
- has_data = True
1292
-
1293
- n_fee_state = group["data"].attrs
1294
-
1295
- # Should a new FITS file be created?
1296
-
1297
- if any_crucial_parameters_changed(prep, n_fee_state):
1298
-
1299
- if fits_filename:
1300
-
1301
- LOGGER.info(f"Creating a FITS CUBE file ...")
1302
- convert_to_cubes(fits_filename)
1303
- fits_filename = None
1304
- processed_num_cycles = 0
1305
-
1306
- if in_data_acquisition_mode(n_fee_state):
1307
-
1308
- if fits_filename is None:
1309
-
1310
- LOGGER.info(f"A new FITS file will be created...")
1311
-
1312
- # Start writing to a new FITS file
1313
- # Collect all information to sent to the FITS layer
1314
-
1315
- if "obsid" in hdf5_file:
1316
- obsid = hdf5_file["obsid"][()].decode()
1317
- obsid = ObservationIdentifier.create_from_string(obsid, order=LAB_SETUP_TEST)
1318
- else:
1319
- obsid = None
1320
-
1321
- fits_filename = construct_images_filename(
1322
- filename, obsid, location=location, camera_name=camera_name
1323
- )
1324
- LOGGER.info(f"{fits_filename = !s}")
1325
-
1326
- ccd_readout_order = n_fee_state['ccd_readout_order']
1327
- ccd_readout_order = convert_ccd_order_value(ccd_readout_order)
1328
-
1329
- prep = {
1330
- "v_start": n_fee_state['v_start'],
1331
- "v_end": n_fee_state['v_end'],
1332
- "h_end": n_fee_state['h_end'],
1333
- "rows_final_dump": n_fee_state['n_final_dump'],
1334
- "ccd_mode_config": n_fee_state['ccd_mode_config'],
1335
- "ccd_readout_order": ccd_readout_order, # CCD numbering [1-4]
1336
- "expected_last_packet_flags": get_expected_last_packet_flags(n_fee_state,
1337
- sensor_sel_enum),
1338
- "obsid": str(obsid) if obsid is not None else None,
1339
- "cycle_time": get_cycle_time(n_fee_state, obsid=obsid, data_dir=hdf5_file_root),
1340
- "cgse_version": get_cgse_version(obsid=obsid, data_dir=hdf5_file_root),
1341
- "setup": setup,
1342
- "register_map": register_map
1343
- }
1344
-
1345
- persistence = FITS(str(fits_filename), prep)
1346
- persistence.open()
1347
-
1348
- # See https://github.com/IvS-KULeuven/plato-common-egse/issues/901
1349
- # timecode = group["timecode"]
1350
- # spw_packet = SpaceWirePacket.create_packet(h5.get_data(timecode))
1351
-
1352
- timestamp = group["timecode"].attrs["timestamp"]
1353
- persistence.create({"Timestamp": timestamp})
1354
-
1355
- data = group["data"]
1356
- sorted_datasets = natsort.natsorted(data.items(), key=lambda x: x[0])
1357
-
1358
- persistence.expected_last_packet_flags = get_expected_last_packet_flags(n_fee_state,
1359
- sensor_sel_enum)
1360
-
1361
- for identifier, dataset in sorted_datasets:
1362
-
1363
- spw_packet = SpaceWirePacket.create_packet(h5.get_data(dataset))
1364
- # LOGGER.debug(f"{spw_packet.type = !s}")
1365
- persistence.create({f"SpW packet {identifier}": spw_packet})
1366
-
1367
- if has_data:
1368
- processed_num_cycles += 1
1369
-
1370
- if fits_filename and config_slicing_num_cycles != 0 \
1371
- and processed_num_cycles == config_slicing_num_cycles:
1372
- convert_to_cubes(fits_filename)
1373
- fits_filename = None
1374
- processed_num_cycles = 0
1375
- else:
1376
-
1377
- if fits_filename:
1378
- LOGGER.info(f"Creating a FITS CUBE file ...")
1379
- convert_to_cubes(fits_filename)
1380
- fits_filename = None
1381
- processed_num_cycles = 0
1382
-
1383
- prep = clear_crucial_parameters(prep)
1384
-
1385
- except RuntimeError as exc:
1386
- LOGGER.debug(f"Unable to open HDF5 file: {exc}")
1387
- else:
1388
- print(f"skipping {filename=}")
1389
-
1390
- try:
1391
- if fits_filename:
1392
- LOGGER.info(f"Creating a FITS CUBE file ...")
1393
- convert_to_cubes(fits_filename)
1394
- except OSError:
1395
- # The last file in the list still contained data, so we reached the end of the list without creating a cube
1396
- # FITS file yet
1397
- pass
1398
-
1399
-
1400
- def clear_crucial_parameters(prep: dict):
1401
- """ Clear the crucial parameters from the given dictionary.
1402
-
1403
- Args:
1404
- - prep: Dictionary with crucial parameters.
1405
-
1406
- Returns: Dictionary with the cleared crucial parameters.
1407
- """
1408
-
1409
- prep["v_start"] = None
1410
- prep["v_end"] = None
1411
- prep["h_end"] = None
1412
- prep["rows_final_dump"] = None
1413
- prep["ccd_mode_config"] = None
1414
- prep["ccd_readout_order"] = None
1415
-
1416
- return prep
1417
-
1418
-
1419
- class SynopticsFwdFill(tuple, Enum):
1420
- """ Enumeration of the synoptics to forward fill.
1421
-
1422
- This is only applicable for the commanded source position.
1423
- """
1424
-
1425
- # Source position (commanded)
1426
-
1427
- THETA_CMD = ("GSYN_CMD_THETA", "Commanded source position theta [deg]")
1428
- PHI_CMD = ("GSYN_CMD_PHI", "Commanded source position phi [deg]")
1429
-
1430
-
1431
- class SynopticsInterp1d(tuple, Enum):
1432
- """ Enumeration of the synoptics to linearly interpolate.
1433
-
1434
- This is only applicable for:
1435
- - calibrated TCS temperatures;
1436
- - calibrated N-FEE temperatures (TOU + CCDs + and board sensors);
1437
- - selection of TH DAQ(s) temperatures;
1438
- - OGSE attenuation (relative intensity + FWC fraction for the OGSE);
1439
- - actual source position.
1440
- """
1441
-
1442
- # TCS temperatures
1443
-
1444
- T_TRP1 = ("GSYN_TRP1", "Mean T for TOU TRP1 (TCS) [deg C]")
1445
- T_TRP22 = ("GSYN_TRP22", "Mean T for FEE TRP22 (TCS) [deg C]")
1446
-
1447
- # TOU TRP PT1000 sensors (N-FEE)
1448
-
1449
- T_TRP5 = ("GSYN_TRP5", "Mean T for TRP5 (TOU baffle ring) [deg C]")
1450
- T_TRP6 = ("GSYN_TRP6", "Mean T for TRP6 (FPA I/F) [deg C]")
1451
- T_TRP8 = ("GSYN_TRP8", "Mean T for TRP8 (L3) [deg C]")
1452
- T_TRP21 = ("GSYN_TRP21", "Mean T for TRP21 (TOU bipod +X bottom) [deg C]")
1453
- T_TRP31 = ("GSYN_TRP31", "Mean T for TRP31 (TOU bipod -Y bottom) [deg C]")
1454
- T_TRP41 = ("GSYN_TRP41", "Mean T for TRP41 (TOU bipod +Y bottom) [deg C]")
1455
-
1456
- # CCD PT100/PT1000 sensors (N-FEE)
1457
-
1458
- T_CCD1 = ("GSYN_CCD1", "Mean T for CCD1 [deg C]")
1459
- T_CCD2 = ("GSYN_CCD2", "Mean T for CCD2 [deg C]")
1460
- T_CCD3 = ("GSYN_CCD3", "Mean T for CCD3 [deg C]")
1461
- T_CCD4 = ("GSYN_CCD4", "Mean T for CCD4 [deg C]")
1462
-
1463
- T_CCD1_AMB = ("GSYN_CCD1_AMB", "Mean T for CCD1 (ambient calibration) [deg C]")
1464
- T_CCD2_AMB = ("GSYN_CCD2_AMB", "Mean T for CCD2 (ambient calibration) [deg C]")
1465
- T_CCD3_AMB = ("GSYN_CCD3_AMB", "Mean T for CCD3 (ambient calibration) [deg C]")
1466
- T_CCD4_AMB = ("GSYN_CCD4_AMB", "Mean T for CCD4 (ambient calibration) [deg C]")
1467
-
1468
- # Board sensors: type PT1000 (N-FEE)
1469
-
1470
- T_PCB1 = ("GSYN_NFEE_T_PCB1", "Mean T for board sensor PCB1 [deg C]")
1471
- T_PCB2 = ("GSYN_NFEE_T_PCB2", "Mean T for board sensor PCB2 [deg C]")
1472
- T_PCB3 = ("GSYN_NFEE_T_PCB3", "Mean T for board sensor PCB3 [deg C]")
1473
- T_PCB4 = ("GSYN_NFEE_T_PCB4", "Mean T for board sensor PCB4 [deg C]")
1474
-
1475
- # Board sensors: type ISL71590
1476
-
1477
- T_ADC = ("GSYN_NFEE_T_ADC", "Mean ADC board T [deg C]")
1478
- T_CDS = ("GSYN_NFEE_T_CDS", "Mean CDS board T [deg C]")
1479
- T_ANALOG = ("GSYN_NFEE_T_ANALOG", "Mean analog board T [deg C]")
1480
- T_SKYSHROUD = ("GSYN_SKYSHROUD", "Mean front shroud T [deg C]")
1481
- T_TEB_TOU = ("GSYN_TEB_TOU", "Mean TEB TOU T [deg C]")
1482
- T_TEB_FEE = ("GSYN_TEB_FEE", "Mean TEB FEE T [deg C]")
1483
-
1484
- # Temperatures from the TH DAQ
1485
-
1486
- T_TRP2 = ("GSYN_TRP2", "Mean T for TRP2 (MaRi bipod +X I/F) [deg C]")
1487
- T_TRP3 = ("GSYN_TRP3", "Mean T for TRP3 (MaRi bipod -Y I/F) [deg C]")
1488
- T_TRP4 = ("GSYN_TRP4", "Mean T for TRP4 (MaRi bipod +Y I/F) [deg C]")
1489
-
1490
- T_TRP7 = ("GSYN_TRP7", "Mean T for TRP7 (thermal strap) [deg C]")
1491
- T_TRP10 = ("GSYN_TRP10", "Mean T for TRP10 (FPA) [deg C]")
1492
-
1493
- # OGSE attenuation
1494
-
1495
- OGATT = ("GSYN_OGSE_REL_INTENSITY", "Relative OGSE intensity")
1496
- OGFWC = ("GSYN_OGSE_FWC_FRACTION", "OGSE FWC fraction")
1497
-
1498
- # Source position (actual)
1499
-
1500
- THETA = ("GSYN_ACT_THETA", "Actual source position theta [deg]")
1501
- PHI = ("GSYN_ACT_PHI", "Actual source position phi [deg]")
1502
-
1503
-
1504
- class SynopticsLeaveGaps(tuple, Enum):
1505
- """ Enumeration of the synoptics not to fill the gaps for.
1506
-
1507
- This is only applicable for the status of the shutter (open/closed). Note that there is no shutter in CSL, so we
1508
- indicate that the shutter is always open there.
1509
- """
1510
-
1511
- OGSHTTR = ("GSYN_OGSE_SHUTTER_OPEN", "Is the shutter open?")
1512
-
1513
-
1514
- def get_fits_synoptics(obsid: str, data_dir=None) -> dict:
1515
- """ Retrieve the synoptics that need to be included in the FITS files for the given observation.
1516
-
1517
- The synoptics that need to be included in the FITS files are represented by the following enumerations:
1518
-
1519
- - SynopticsFwdFill: Use forward filling for the gaps -> only at the beginning of the observation it is possible
1520
- that there still are gaps (but it is unlikely that the data acquisition has already started then);
1521
- - SynopticsInterp1d: Use linear interpolation to fill the gaps. At the extremes, we use extrapolation;
1522
- - SynopticsLeaveGaps: Don't fill the gaps.
1523
-
1524
- Args:
1525
- - obsid: Observation identifier [TEST_LAB or TEST_LAB_SETUP].
1526
-
1527
- Returns: Dictionary with the synoptics that should go into the FITS files for the given observation.
1528
- """
1529
-
1530
- synoptics_table = get_synoptics_table(obsid, data_dir=data_dir)
1531
-
1532
- # We keep the original timestamps (when filling the gaps)
1533
-
1534
- timestamps = synoptics_table["timestamp"].values
1535
- for index in range(len(timestamps)):
1536
- timestamps[index] = time_since_epoch_1958(timestamps[index])
1537
- timestamps = timestamps.astype(float)
1538
-
1539
- synoptics = {"timestamps": timestamps} # Don't forget to include the timestamps to the returned dictionary
1540
-
1541
- # Linear interpolation
1542
-
1543
- for syn_enum in SynopticsInterp1d:
1544
-
1545
- syn_name = syn_enum.value[0]
1546
-
1547
- if syn_name in synoptics_table:
1548
-
1549
- # We need to filter out the NaNs or the interpolation will not work
1550
-
1551
- values = synoptics_table[syn_name].values
1552
-
1553
- if len(values) > 0:
1554
- selection = ~np.isnan(values)
1555
-
1556
- if np.any(selection):
1557
- selected_timestamps = timestamps[np.where(selection)]
1558
- selected_values = values[np.where(selection)]
1559
-
1560
- if len(selected_timestamps) > 1:
1561
- interpolation = interp1d(selected_timestamps, selected_values, kind='linear',
1562
- fill_value='extrapolate')
1563
- synoptics[syn_enum] = interpolation(timestamps)
1564
-
1565
- # Forward fill
1566
-
1567
- for syn_enum in SynopticsFwdFill:
1568
-
1569
- syn_name = syn_enum.value[0]
1570
-
1571
- if syn_name in synoptics_table:
1572
- synoptics[syn_enum] = synoptics_table[syn_name].ffill()
1573
-
1574
- # Leave the gaps in
1575
-
1576
- for syn_enum in SynopticsLeaveGaps:
1577
-
1578
- syn_name = syn_enum.value[0]
1579
-
1580
- if syn_name in synoptics_table:
1581
- synoptics[syn_enum] = synoptics_table[syn_name]
1582
-
1583
- return synoptics
1584
-
1585
-
1586
- def add_synoptics(obsid: str, fits_dir: str, syn_dir: str):
1587
- """ Add synoptics to the FITS headers for the given observation.
1588
-
1589
- When all FITS files have been produced for the given obsid, synoptics is added to the headers. This is done in the
1590
- following steps:
1591
- - Determine which folder in the /obs directory comprises the HK and FITS files for the given obsid;
1592
- - Read the synoptics for the given obsid (from said folder) into a pandas DataFrame;
1593
- - Compose the list of FITS files for the given observation (from said folder);
1594
- - For all of these FITS files, loop over the cubes it contains and:
1595
- - Determine the time range covered by the cube;
1596
- - Select the synoptics (from the pandas DataFrame) over that time range;
1597
- - For the synoptical temperatures, source position (commanded + actual), and OGSE intensity: calculate
1598
- the average and add this to the header of the cube;
1599
- - For the shutter: calculate the mean and add this to the header of the cube.
1600
-
1601
- Args:
1602
- obsid: Observation identifier [TEST_LAB or TEST_LAB_SETUP]
1603
- fits_dir: Directory (with /daily and /obs sub-folders) with the FITS files
1604
- syn_dir: Directory (with /daily and /obs sub-folders) with the original synoptics files
1605
- """
1606
-
1607
- fits_dir = fits_dir or get_data_storage_location()
1608
- syn_dir = syn_dir or get_data_storage_location()
1609
- fee_side = GlobalState.setup.camera.fee.ccd_sides.enum
1610
-
1611
- obsid = obsid_from_storage(obsid, data_dir=fits_dir)
1612
- obs_dir = f"{fits_dir}/obs/{obsid}" # Where the HK and FITS files are stored
1613
-
1614
- try:
1615
- synoptics = get_fits_synoptics(obsid, data_dir=fits_dir)
1616
- except FileNotFoundError:
1617
- synoptics = get_fits_synoptics(obsid, data_dir=syn_dir)
1618
- timestamps = synoptics["timestamps"] # Timestamps of the synoptics -> compare with absolute time in FITS file
1619
-
1620
- # Compose the list of FITS files for the given obsid
1621
-
1622
- pattern = f"{obsid}_{N_FEE_SETTINGS.ORIGIN_CCD_DATA}_*_*_cube.fits"
1623
- fits_filenames = sorted(find_files(pattern=pattern, root=obs_dir))
1624
-
1625
- # Loop over all FITS files (cubes) for the given obsid
1626
-
1627
- for fits_filename in fits_filenames:
1628
-
1629
- syn_info = {}
1630
-
1631
- # Loop over all image cubes
1632
-
1633
- with fits.open(fits_filename) as fits_file:
1634
-
1635
- start_time = fits_file["PRIMARY"].header["FINETIME"]
1636
-
1637
- # Loop over both sides of all CCDs (not all of them might be in -> hence the KeyError)
1638
-
1639
- for ccd_number in range(1, 5):
1640
-
1641
- for ccd_side in fee_side:
1642
-
1643
- try:
1644
- # Absolute time = time at the start of the readout
1645
- # = time at the end of the exposure
1646
- # -> Extract relative time from the WCS-TAB and add the DATE-OBS (which is the time of the
1647
- # 1st frame in the FITS file; all times in the WCS-TAB are relative to this)
1648
-
1649
- wcs_table_name = f"WCS-TAB_{ccd_number}_{ccd_side.name[0]}" # Holds relative time
1650
- absolute_time = np.array(fits_file[wcs_table_name].data["TIME"]) + start_time
1651
-
1652
- # We don't care about the 1st frame of any CCD side, as the image is saturated anyway, and it is
1653
- # very difficult to determine the start of that exposure anyway
1654
- # -> Simplest solution: indicate that the synoptics is unknown for those frames
1655
- # For all other frames:
1656
- # - Determine when the readout for the previous frame started -> start_previous_readout;
1657
- # - Determine when the readout for the current frame started -> start_current_readout;
1658
- # - For each synoptics parameter, gather the values acquired in the timespan
1659
- # [start_previous_readout, start_current_readout]
1660
- # - For the numerical values: take the mean (skipping the NaNs)
1661
- # - For the boolean values (i.c. status of the shutter):
1662
- # - Only NaN selected -> "U" (unknown)
1663
- # - Both True & False selected (potentially also NaNs) -> "M" (mixed)
1664
- # - Only True (potentially also NaNs) selected -> "T" (True = shutter open)
1665
- # - Only False (potentially also NaNs) selected -> "F" (False = shutter closed)
1666
- #
1667
- # For each synoptical parameter, first determine all the values that need to be included in the
1668
- # current cube of the current FITS file (it is only when we have composed these arrays, that we
1669
- # can included them in a table in the FITS file)
1670
-
1671
- fits_synoptics = {syn_enum: np.array([np.nan]) for syn_enum in chain(SynopticsFwdFill,
1672
- SynopticsInterp1d)}
1673
- fits_synoptics.update({syn_enum: np.array(["U"]) for syn_enum in SynopticsLeaveGaps})
1674
-
1675
- for index in range(1, len(absolute_time)):
1676
-
1677
- # Selection of the synoptics for the current frame: based on timestamps:
1678
- # - Start (start_previous_readout): start of the readout of the previous exposure
1679
- # - End (start_current_readout): start of the readout of the current exposure
1680
-
1681
- start_previous_readout = absolute_time[index - 1]
1682
- start_current_readout = absolute_time[index]
1683
-
1684
- selection = np.where(timestamps >= start_previous_readout) \
1685
- and np.where(timestamps <= start_current_readout)[0]
1686
-
1687
- # Average (skipping NaNs)
1688
-
1689
- for syn_enum in chain(SynopticsFwdFill, SynopticsInterp1d):
1690
- try:
1691
- selected_values = synoptics[syn_enum][selection]
1692
- average_value = np.nanmean(selected_values)
1693
-
1694
- fits_synoptics[syn_enum] = np.append(fits_synoptics[syn_enum], average_value)
1695
- except (KeyError, AttributeError):
1696
- fits_synoptics[syn_enum] = np.append(fits_synoptics[syn_enum], np.nan)
1697
-
1698
- for syn_enum in SynopticsLeaveGaps:
1699
- try:
1700
- selected_values = synoptics[syn_enum][selection].astype(float)
1701
- selection = ~np.isnan(selected_values)
1702
-
1703
- if not np.any(selection): # No data -> "U" (unknown)
1704
- value = "U"
1705
- else: # Use "T" (True) / "F" (False) only when unique (otherwise: "M" (mixed))
1706
- unique_values = np.unique(selected_values[selection])
1707
- value = str(bool(unique_values[0]))[0] if len(unique_values) == 1 else "M"
1708
- fits_synoptics[syn_enum] = np.append(fits_synoptics[syn_enum], value)
1709
- except (KeyError, AttributeError): # "U" (unknown)
1710
- fits_synoptics[syn_enum] = np.append(fits_synoptics[syn_enum], "U")
1711
-
1712
- # At this point, we have for each synoptical parameter an array of the values that need to
1713
- # be included in the FITS file. We now put all this information in a dedicated table and
1714
- # add it to the FITS file.
1715
-
1716
- syn_columns = []
1717
-
1718
- for syn_enum in chain(SynopticsFwdFill, SynopticsInterp1d, SynopticsLeaveGaps):
1719
- column_format = "A" if syn_enum == SynopticsLeaveGaps.OGSHTTR else "F"
1720
-
1721
- syn_column = fits.Column(syn_enum.value[0], format=column_format,
1722
- array=fits_synoptics[syn_enum])
1723
- syn_columns.append(syn_column)
1724
-
1725
- syn_table = fits.BinTableHDU.from_columns(syn_columns)
1726
- syn_table.header["EXTNAME"] = f"SYN-TAB_{ccd_number}_{ccd_side.name[0]}"
1727
-
1728
- # merged_columns = wcs_table.columns + syn_table.columns
1729
- # merged_table = fits.BinTableHDU.from_columns(merged_columns)
1730
-
1731
- syn_info[syn_table.header["EXTNAME"]] = (syn_table.data, syn_table.header)
1732
- except KeyError:
1733
- pass
1734
-
1735
- for data in syn_info.values():
1736
- fits.append(str(fits_filename), data[0], data[1])
1737
-
1738
- @click.group()
1739
- def cli():
1740
- pass
1741
-
1742
-
1743
- @cli.command()
1744
- def start():
1745
- multiprocessing.current_process().name = "fitsgen"
1746
-
1747
- # FIXME: Why is this line commented out?
1748
- # start_http_server(CTRL_SETTINGS.METRICS_PORT)
1749
-
1750
- # The Storage Manager must be active (otherwise the HK cannot be stored)
1751
-
1752
- if not is_storage_manager_active():
1753
- LOGGER.error("The Storage Manager is not running, start the core services before running the data acquisition.")
1754
- return
1755
-
1756
- if not is_dpu_cs_active():
1757
- LOGGER.critical("DPU Control Server must be running to be able to start the FITS generator.")
1758
- return
1759
-
1760
- FITSGenerator().run()
1761
-
1762
-
1763
- @cli.command()
1764
- def start_bg():
1765
-
1766
- invoke.run("fitsgen start", disown=True)
1767
-
1768
-
1769
- @cli.command()
1770
- def stop():
1771
- """Stop the FOV HK Control Server. """
1772
-
1773
- # In the while True loop in the start command, _should_stop needs to force a break from the loop.When this happens
1774
- # (and also when a keyboard interrupt has been caught), the monitoring socket needs to be closed (this needs to be
1775
- # done in the TH - specific implementation of _start). Unregistering from the Storage Manager is done
1776
- # automatically.
1777
-
1778
- response = send_request("quit")
1779
-
1780
- if response == "ACK":
1781
- rich.print("FITS generation successfully terminated.")
1782
- else:
1783
- rich.print(f"[red] ERROR: {response}")
1784
-
1785
-
1786
- def _check_commander_status(commander, poller) -> bool:
1787
- """ Check the status of the commander.
1788
-
1789
- Checks whether a command has been received by the given commander.
1790
-
1791
- Returns: True if a quit command was received; False otherwise.
1792
-
1793
- Args:
1794
- - commander: Commanding socket for the FOV HK generation.
1795
- - poller: Poller for the FOV HK generation.
1796
- """
1797
-
1798
- socks = dict(poller.poll(timeout=5000)) # Timeout of 5s
1799
-
1800
- if commander in socks:
1801
- pickle_string = commander.recv()
1802
- command = pickle.loads(pickle_string)
1803
-
1804
- if command.lower() == "quit":
1805
-
1806
- commander.send(pickle.dumps("ACK"))
1807
- return True
1808
-
1809
- if command.lower() == "status":
1810
- response = dict(
1811
- status="ACK",
1812
- host=CTRL_SETTINGS.HOSTNAME,
1813
- command_port=CTRL_SETTINGS.COMMANDING_PORT
1814
- )
1815
- commander.send(pickle.dumps(response))
1816
-
1817
- return False
1818
-
1819
- return False
1820
-
1821
-
1822
- @cli.command()
1823
- def status():
1824
- """Print the status of the FITS Generation Control Server."""
1825
-
1826
- rich.print("FITS generation:")
1827
-
1828
- response = send_request("status")
1829
-
1830
- if response.get("status") == "ACK":
1831
- rich.print(" Status: [green]active")
1832
- rich.print(f" Hostname: {response.get('host')}")
1833
- rich.print(f" Commanding port: {response.get('command_port')}")
1834
- else:
1835
- rich.print(" Status: [red]not active")
1836
-
1837
- def send_request(command_request: str):
1838
- """Sends a request to the FOV HK Control Server and waits for a response.
1839
-
1840
- Args:
1841
- - command_request: Request.
1842
-
1843
- Returns: Response to the request.
1844
- """
1845
-
1846
- ctx = zmq.Context().instance()
1847
- endpoint = connect_address(CTRL_SETTINGS.PROTOCOL, CTRL_SETTINGS.HOSTNAME, CTRL_SETTINGS.COMMANDING_PORT)
1848
- socket = ctx.socket(zmq.REQ)
1849
- socket.connect(endpoint)
1850
-
1851
- socket.send(pickle.dumps(command_request))
1852
- rlist, _, _ = zmq.select([socket], [], [], timeout=TIMEOUT_RECV)
1853
-
1854
- if socket in rlist:
1855
- response = socket.recv()
1856
- response = pickle.loads(response)
1857
- else:
1858
- response = {"error": "Receive from ZeroMQ socket timed out for FITS generation Control Server."}
1859
- socket.close(linger=0)
1860
-
1861
- return response
1862
-
1863
-
1864
- @cli.command()
1865
- @click.argument('files', type=str, nargs=-1)
1866
- @click.option("--location", type=str, is_flag=False, default=None, help="Set the root folder for the output "
1867
- "(i.e. folder with /daily and /obs)")
1868
- @click.option("--setup_id", type=int, is_flag=False, default=None, help="Setup ID")
1869
- @click.option("--site_id", type=str, is_flag=False, default=None, help="Site ID")
1870
- def from_hdf5(files, location=None, setup_id=None, site_id=None):
1871
- """ Generate the FITS files for the given list of HDF5 files.
1872
-
1873
- Args:
1874
- - files: List of HDF5 filenames.
1875
- - setup_id: Identifier of the setup that should be used. When not specified, the setup loading in the
1876
- Configuration Manager will be used to retrieve information from.
1877
- - site_id: Identifier for the test site.
1878
- """
1879
-
1880
- setup = get_offline_setup(site_id=site_id, setup_id=setup_id)
1881
- location = location or get_data_storage_location()
1882
-
1883
- create_fits_from_hdf5(files, location=location, setup=setup)
1884
-
1885
-
1886
- @cli.command()
1887
- @click.argument('obsid', type=str)
1888
- @click.option("--input_dir", type=str, is_flag=False, default=None, help="Set the root folder for the input "
1889
- "(i.e. folder with /daily and /obs)")
1890
- @click.option("--output_dir", type=str, is_flag=False, default=None, help="Set the root folder for the output "
1891
- "(i.e. folder with /daily and /obs)")
1892
- @click.option("--setup_id", type=int, is_flag=False, default=None, help="Setup ID")
1893
- @click.option("--site_id", type=str, is_flag=False, default=None, help="Site ID")
1894
- def for_obsid(obsid, input_dir=None, output_dir=None, setup_id=None, site_id=None):
1895
- """ Generate the FITS files for the given obsid.
1896
-
1897
- The setup that was loaded in the Configuration Manager during the given observation, will be used to retrieve
1898
- information from.
1899
-
1900
- Args:
1901
- - obsid: Observation identifier [TEST_LAB or TEST_LAB_SETUP].
1902
- - location: Folder (with /daily and /obs sub-folders) in which the FITS files should be written (in a
1903
- dedicated directory in the /obs folder). If not specified, the `PLATO_DATA_STORAGE_LOCATION`
1904
- environment variable will be used to construct the location.
1905
- """
1906
-
1907
- input_dir = input_dir or get_data_storage_location() # Location of HDF5 files (under /daily)
1908
- output_dir = output_dir or input_dir # Location of the FITS files that will be generated (under /obs)
1909
-
1910
- obsid = obsid_from_storage(obsid, data_dir=input_dir)
1911
- # Folder in the output /obs directory in which the FITS files will be stored (full path)
1912
- output_obs_folder = Path(f"{output_dir}/obs/{obsid}")
1913
- if not output_obs_folder.exists(): # If this directory doesn't exist yet, create it
1914
- os.makedirs(output_obs_folder)
1915
-
1916
- setup = get_offline_setup(site_id=site_id, setup_id=setup_id) # Setup (complete for the camera in question)
1917
-
1918
- hdf5_filenames = get_hdf5_filenames_for_obsid(obsid, data_dir=input_dir) # HDF5 files to process
1919
-
1920
- # Create FITS files (flat structure -> cubes)
1921
- create_fits_from_hdf5(hdf5_filenames, location=output_dir, setup=setup)
1922
-
1923
- # Add synoptics
1924
-
1925
- if find_file(f"{obsid_from_storage(obsid, data_dir=input_dir)}_{SYN_ORIGIN}_*.csv", root=output_obs_folder):
1926
- # Synoptics have already been re-processed (located in the directory to which the FITS files will be stored)
1927
- add_synoptics(obsid, fits_dir=output_dir, syn_dir=output_dir)
1928
- else:
1929
- # Use the original synoptics files
1930
- add_synoptics(obsid, fits_dir=output_dir, syn_dir=input_dir)
1931
-
1932
-
1933
- def get_offline_setup(site_id: str = None, setup_id: int = None):
1934
- """ Return setup to use for the off-line FITS generation.
1935
-
1936
- If the setup ID and site ID have been specified, the corresponding setup is used. Otherwise, the setup that is
1937
- currently loaded in the Configuration Manager is used.
1938
-
1939
- Args:
1940
- site_id: Identifier of the testhouse
1941
- setup_id: Identifier of the setup
1942
-
1943
- Returns:
1944
- - Setup to use for the off-line FITS generation.
1945
- """
1946
-
1947
- if setup_id is None:
1948
- return GlobalState.setup
1949
- else:
1950
- site_id = site_id or SITE.ID
1951
- return load_setup(setup_id=setup_id, site_id=site_id, from_disk=True)
1952
-
1953
-
1954
- def get_hdf5_filenames_for_obsid(obsid: str, data_dir: str = None) -> List:
1955
- """ Return list of HDF5 filenames that contribute to the given obsid.
1956
-
1957
- The given obsid can be specified in either of these two formats: TEST_LAB or TEST_LAB_SETUP. The obsid that is
1958
- stored in the HDF5 files is of format LAB_SETUP_TEST. In this method, we gather the list of HDF5 filenames for
1959
- which the combination (TEST, SITE) matches with the (TEST, SITE) combination from the given obsid. To do this, the
1960
- list of relevant ODs is composed, based on the first and last timestamp in the DPU HK file (this file will always
1961
- be present if data has been acquired). Then all HDF5 files for these ODs are looped over and the obsid stored in
1962
- there is compared with the given obsid. In case of a match, the HDF5 filename is added to the list.
1963
-
1964
- Args:
1965
- - obsid: Observation identifier [TEST_LAB or TEST_LAB_SETUP].
1966
- - data_dir: Full path to the directory in which the data resides. This is the folder with a sub-folder /daily,
1967
- in which the HDF5 files are stored.
1968
- """
1969
-
1970
- data_dir = data_dir or get_data_storage_location()
1971
-
1972
- # Determine in which location (i.e. in the folder of which OD in the /daily sub-folder of the data directory)
1973
- # the required HDF5 files are stored. This sub-folder carries the OD [yyyymmdd] as name.
1974
-
1975
- od_list = get_od(obsid, data_dir) # Obsid -> OD
1976
- LOGGER.info(f"OD for obsid {obsid}: {od_list}")
1977
-
1978
- obs_hdf5_files = []
1979
-
1980
- for od in od_list:
1981
-
1982
- day_dir = Path(f"{data_dir}/daily/{od}") # Sub-folder with the data for that OD
1983
-
1984
- daily_hdf5_filenames = glob.glob(str(day_dir / f"*.{HDF5.extension}"))
1985
-
1986
- for hdf5_filename in sorted(daily_hdf5_filenames):
1987
-
1988
- try:
1989
- with h5.get_file(hdf5_filename, mode="r", locking=False) as hdf5_file:
1990
-
1991
- if "/obsid" in hdf5_file:
1992
-
1993
- hdf5_obsid = h5.get_data(hdf5_file["/obsid"]).item().decode()
1994
-
1995
- if hdf5_obsid != "None":
1996
- hdf5_obsid = ObservationIdentifier.create_from_string(
1997
- hdf5_obsid, LAB_SETUP_TEST).create_id(order=TEST_LAB) # TEST_LAB
1998
- if hdf5_obsid in str(obsid):
1999
- obs_hdf5_files.append(hdf5_filename)
2000
-
2001
- except OSError as exc:
2002
- LOGGER.error(f"Couldn't open {hdf5_filename} ({exc=})")
2003
- except RuntimeError as exc:
2004
- LOGGER.debug(f"Unable to open HDF5 file: {exc}")
2005
-
2006
- return obs_hdf5_files
2007
-
2008
-
2009
- def get_od(obsid: str, data_dir: str = None):
2010
- """ Return list of OD(s) for the given obsid.
2011
-
2012
- The given obsid can be specified in either of these two formats: TEST_LAB or TEST_LAB_SETUP. In this method, we
2013
- determine during which OD(s) the given obsid was executed. To do this, the first and last timestamp from the DPU HK
2014
- file (this file will always be present if data has been acquired) are extracted. This file resides in the folder of
2015
- the given obsid in the /obs directory, with the name (i.e. obsid) in the format TEST_SITE_SETUP or TEST_SITE
2016
- (depending on how old the observation is). The obsid that is used in the filename follows the same pattern, so the
2017
- given obsid must be converted to that format.
2018
-
2019
- Args:
2020
- - obsid: Observation identifier [TEST_LAB or TEST_LAB_SETUP].
2021
- - data_dir: Full path to the directory in which the data resides. This is the folder with a sub-folder /daily,
2022
- in which the HDF5 files are stored.
2023
-
2024
- Returns: List of observation day [yyyymmdd].
2025
- """
2026
-
2027
- data_dir = data_dir or get_data_storage_location()
2028
- obsid = obsid_from_storage(obsid, data_dir=data_dir) # Convert the obsid to the correct format
2029
- obs_dir = f"{data_dir}/obs/{obsid}"
2030
-
2031
- try:
2032
- filename = str(find_file(f"{obsid}_DPU_*.csv", root=obs_dir))
2033
-
2034
- od_start = datetime.strptime(filename.split("_")[-2], "%Y%m%d") # First OD (from filename)
2035
- od_end = datetime.strptime(read_last_line(filename)[:10], "%Y-%m-%d") # Last OD (from last line)
2036
-
2037
- od = od_start
2038
- delta = timedelta(days=1)
2039
- od_list = []
2040
-
2041
- while od <= od_end:
2042
-
2043
- od_list.append(od.strftime("%Y%m%d"))
2044
-
2045
- od += delta
2046
-
2047
- return od_list
2048
- except IndexError:
2049
- raise Abort(f"DPU was not running during obsid {obsid}: no data could be acquired")
2050
-
2051
-
2052
- def get_obsid(od: str, index: int, day_dir: str) -> int:
2053
- """ Return the obsid stored in the HDF5 file for the given OD and the given index.
2054
-
2055
- Args:
2056
- - od: Observation day.
2057
- - index: Index of the HDF5 file.
2058
- - day_dir: Full path to the directory with the HDF5 files for the given OD.
2059
-
2060
- Returns: Obsid as stored in the HDF5 file for the given OD and the given index (LAB_SETUP_TEST).
2061
- """
2062
-
2063
- if index == 0: # For the first file, no index is used
2064
- hdf5_filename = f"{day_dir}/{od}_{SITE.ID}_{N_FEE_SETTINGS.ORIGIN_SPW_DATA}.hdf5"
2065
- else:
2066
- hdf5_filename = f"{day_dir}/{od}_{SITE.ID}_{N_FEE_SETTINGS.ORIGIN_SPW_DATA}_{index:05d}.hdf5"
2067
-
2068
- with h5.get_file(hdf5_filename, mode="r", locking=False) as hdf5_file:
2069
- try:
2070
- return int(hdf5_file["obsid"][()].decode().split("_")[-1])
2071
- except:
2072
- return None
2073
-
2074
-
2075
- if __name__ == "__main__":
2076
-
2077
- sys.exit(cli())