cgse 2024.7.0__py3-none-any.whl → 2025.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (664) hide show
  1. README.md +27 -0
  2. bump.py +85 -0
  3. cgse-2025.0.1.dist-info/METADATA +38 -0
  4. cgse-2025.0.1.dist-info/RECORD +5 -0
  5. {cgse-2024.7.0.dist-info → cgse-2025.0.1.dist-info}/WHEEL +1 -2
  6. cgse-2024.7.0.dist-info/COPYING +0 -674
  7. cgse-2024.7.0.dist-info/COPYING.LESSER +0 -165
  8. cgse-2024.7.0.dist-info/METADATA +0 -144
  9. cgse-2024.7.0.dist-info/RECORD +0 -660
  10. cgse-2024.7.0.dist-info/entry_points.txt +0 -75
  11. cgse-2024.7.0.dist-info/top_level.txt +0 -2
  12. egse/__init__.py +0 -12
  13. egse/__main__.py +0 -32
  14. egse/aeu/aeu.py +0 -5238
  15. egse/aeu/aeu_awg.yaml +0 -265
  16. egse/aeu/aeu_crio.yaml +0 -273
  17. egse/aeu/aeu_cs.py +0 -627
  18. egse/aeu/aeu_devif.py +0 -321
  19. egse/aeu/aeu_main_ui.py +0 -903
  20. egse/aeu/aeu_metrics.py +0 -131
  21. egse/aeu/aeu_protocol.py +0 -463
  22. egse/aeu/aeu_psu.yaml +0 -204
  23. egse/aeu/aeu_ui.py +0 -873
  24. egse/aeu/arbdata/FccdRead.arb +0 -2
  25. egse/aeu/arbdata/FccdRead_min_points.arb +0 -2
  26. egse/aeu/arbdata/HeaterSync_FccdRead.arb +0 -2
  27. egse/aeu/arbdata/HeaterSync_ccdRead25.arb +0 -2
  28. egse/aeu/arbdata/HeaterSync_ccdRead31_25.arb +0 -2
  29. egse/aeu/arbdata/HeaterSync_ccdRead37_50.arb +0 -2
  30. egse/aeu/arbdata/HeaterSync_ccdRead43_75.arb +0 -2
  31. egse/aeu/arbdata/HeaterSync_ccdRead50.arb +0 -2
  32. egse/aeu/arbdata/Heater_FccdRead_min_points.arb +0 -2
  33. egse/aeu/arbdata/ccdRead25.arb +0 -2
  34. egse/aeu/arbdata/ccdRead25_150ms.arb +0 -2
  35. egse/aeu/arbdata/ccdRead31_25.arb +0 -2
  36. egse/aeu/arbdata/ccdRead31_25_150ms.arb +0 -2
  37. egse/aeu/arbdata/ccdRead37_50.arb +0 -2
  38. egse/aeu/arbdata/ccdRead37_50_150ms.arb +0 -2
  39. egse/aeu/arbdata/ccdRead43_75.arb +0 -2
  40. egse/aeu/arbdata/ccdRead43_75_150ms.arb +0 -2
  41. egse/aeu/arbdata/ccdRead50.arb +0 -2
  42. egse/aeu/arbdata/ccdRead50_150ms.arb +0 -2
  43. egse/alert/__init__.py +0 -1049
  44. egse/alert/alertman.yaml +0 -37
  45. egse/alert/alertman_cs.py +0 -233
  46. egse/alert/alertman_ui.py +0 -600
  47. egse/alert/gsm/beaglebone.py +0 -138
  48. egse/alert/gsm/beaglebone.yaml +0 -51
  49. egse/alert/gsm/beaglebone_cs.py +0 -108
  50. egse/alert/gsm/beaglebone_devif.py +0 -122
  51. egse/alert/gsm/beaglebone_protocol.py +0 -46
  52. egse/bits.py +0 -318
  53. egse/camera.py +0 -44
  54. egse/collimator/__init__.py +0 -0
  55. egse/collimator/fcul/__init__.py +0 -0
  56. egse/collimator/fcul/ogse.py +0 -1077
  57. egse/collimator/fcul/ogse.yaml +0 -14
  58. egse/collimator/fcul/ogse_cs.py +0 -154
  59. egse/collimator/fcul/ogse_devif.py +0 -358
  60. egse/collimator/fcul/ogse_protocol.py +0 -132
  61. egse/collimator/fcul/ogse_sim.py +0 -431
  62. egse/collimator/fcul/ogse_ui.py +0 -1108
  63. egse/command.py +0 -699
  64. egse/config.py +0 -410
  65. egse/confman/__init__.py +0 -1058
  66. egse/confman/confman.yaml +0 -70
  67. egse/confman/confman_cs.py +0 -240
  68. egse/confman/confman_ui.py +0 -381
  69. egse/confman/setup_ui.py +0 -565
  70. egse/control.py +0 -632
  71. egse/coordinates/__init__.py +0 -534
  72. egse/coordinates/avoidance.py +0 -100
  73. egse/coordinates/cslmodel.py +0 -127
  74. egse/coordinates/laser_tracker_to_dict.py +0 -122
  75. egse/coordinates/point.py +0 -707
  76. egse/coordinates/pyplot.py +0 -194
  77. egse/coordinates/referenceFrame.py +0 -1279
  78. egse/coordinates/refmodel.py +0 -737
  79. egse/coordinates/rotationMatrix.py +0 -85
  80. egse/coordinates/transform3d_addon.py +0 -419
  81. egse/csl/__init__.py +0 -50
  82. egse/csl/commanding.py +0 -78
  83. egse/csl/icons/hexapod-connected-selected.svg +0 -30
  84. egse/csl/icons/hexapod-connected.svg +0 -30
  85. egse/csl/icons/hexapod-homing-selected.svg +0 -68
  86. egse/csl/icons/hexapod-homing.svg +0 -68
  87. egse/csl/icons/hexapod-retract-selected.svg +0 -56
  88. egse/csl/icons/hexapod-retract.svg +0 -51
  89. egse/csl/icons/hexapod-zero-selected.svg +0 -56
  90. egse/csl/icons/hexapod-zero.svg +0 -56
  91. egse/csl/icons/logo-puna.svg +0 -92
  92. egse/csl/icons/stop.svg +0 -1
  93. egse/csl/initialisation.py +0 -102
  94. egse/csl/mech_pos_settings.yaml +0 -18
  95. egse/das.py +0 -1240
  96. egse/das.yaml +0 -7
  97. egse/data/conf/SETUP_CSL_00000_170620_150000.yaml +0 -5
  98. egse/data/conf/SETUP_CSL_00001_170620_151010.yaml +0 -69
  99. egse/data/conf/SETUP_CSL_00002_170620_151020.yaml +0 -69
  100. egse/data/conf/SETUP_CSL_00003_170620_151030.yaml +0 -69
  101. egse/data/conf/SETUP_CSL_00004_170620_151040.yaml +0 -69
  102. egse/data/conf/SETUP_CSL_00005_170620_151050.yaml +0 -69
  103. egse/data/conf/SETUP_CSL_00006_170620_151060.yaml +0 -69
  104. egse/data/conf/SETUP_CSL_00007_170620_151070.yaml +0 -69
  105. egse/data/conf/SETUP_CSL_00008_170620_151080.yaml +0 -75
  106. egse/data/conf/SETUP_CSL_00010_210308_083016.yaml +0 -138
  107. egse/data/conf/SETUP_INTA_00000_170620_150000.yaml +0 -4
  108. egse/data/conf/SETUP_SRON_00000_170620_150000.yaml +0 -4
  109. egse/decorators.py +0 -514
  110. egse/device.py +0 -269
  111. egse/dpu/__init__.py +0 -2698
  112. egse/dpu/ccd_ui.py +0 -514
  113. egse/dpu/dpu.py +0 -783
  114. egse/dpu/dpu.yaml +0 -153
  115. egse/dpu/dpu_cs.py +0 -272
  116. egse/dpu/dpu_ui.py +0 -671
  117. egse/dpu/fitsgen.py +0 -2096
  118. egse/dpu/fitsgen_ui.py +0 -399
  119. egse/dpu/hdf5_model.py +0 -332
  120. egse/dpu/hdf5_ui.py +0 -277
  121. egse/dpu/hdf5_viewer.py +0 -506
  122. egse/dpu/hk_ui.py +0 -468
  123. egse/dpu_commands.py +0 -81
  124. egse/dsi/__init__.py +0 -33
  125. egse/dsi/_libesl.py +0 -232
  126. egse/dsi/constants.py +0 -296
  127. egse/dsi/esl.py +0 -630
  128. egse/dsi/rmap.py +0 -444
  129. egse/dsi/rmapci.py +0 -39
  130. egse/dsi/spw.py +0 -335
  131. egse/dsi/spw_state.py +0 -29
  132. egse/dummy.py +0 -318
  133. egse/dyndummy.py +0 -179
  134. egse/env.py +0 -278
  135. egse/exceptions.py +0 -88
  136. egse/fdir/__init__.py +0 -26
  137. egse/fdir/fdir_manager.py +0 -85
  138. egse/fdir/fdir_manager.yaml +0 -37
  139. egse/fdir/fdir_manager_controller.py +0 -136
  140. egse/fdir/fdir_manager_cs.py +0 -164
  141. egse/fdir/fdir_manager_interface.py +0 -15
  142. egse/fdir/fdir_remote.py +0 -73
  143. egse/fdir/fdir_remote.yaml +0 -30
  144. egse/fdir/fdir_remote_controller.py +0 -30
  145. egse/fdir/fdir_remote_cs.py +0 -94
  146. egse/fdir/fdir_remote_interface.py +0 -9
  147. egse/fdir/fdir_remote_popup.py +0 -26
  148. egse/fee/__init__.py +0 -106
  149. egse/fee/f_fee_register.yaml +0 -43
  150. egse/fee/feesim.py +0 -914
  151. egse/fee/n_fee_hk.py +0 -768
  152. egse/fee/nfee.py +0 -188
  153. egse/filterwheel/__init__.py +0 -4
  154. egse/filterwheel/eksma/__init__.py +0 -49
  155. egse/filterwheel/eksma/fw8smc4.py +0 -657
  156. egse/filterwheel/eksma/fw8smc4.yaml +0 -121
  157. egse/filterwheel/eksma/fw8smc4_cs.py +0 -144
  158. egse/filterwheel/eksma/fw8smc4_devif.py +0 -473
  159. egse/filterwheel/eksma/fw8smc4_protocol.py +0 -82
  160. egse/filterwheel/eksma/fw8smc4_ui.py +0 -940
  161. egse/filterwheel/eksma/fw8smc5.py +0 -115
  162. egse/filterwheel/eksma/fw8smc5.yaml +0 -105
  163. egse/filterwheel/eksma/fw8smc5_controller.py +0 -307
  164. egse/filterwheel/eksma/fw8smc5_cs.py +0 -141
  165. egse/filterwheel/eksma/fw8smc5_interface.py +0 -65
  166. egse/filterwheel/eksma/fw8smc5_simulator.py +0 -29
  167. egse/filterwheel/eksma/fw8smc5_ui.py +0 -1065
  168. egse/filterwheel/eksma/testpythonfw.py +0 -215
  169. egse/fov/__init__.py +0 -65
  170. egse/fov/fov_hk.py +0 -710
  171. egse/fov/fov_ui.py +0 -859
  172. egse/fov/fov_ui_controller.py +0 -140
  173. egse/fov/fov_ui_model.py +0 -200
  174. egse/fov/fov_ui_view.py +0 -345
  175. egse/gimbal/__init__.py +0 -32
  176. egse/gimbal/symetrie/__init__.py +0 -26
  177. egse/gimbal/symetrie/alpha.py +0 -586
  178. egse/gimbal/symetrie/generic_gimbal_ui.py +0 -1521
  179. egse/gimbal/symetrie/gimbal.py +0 -877
  180. egse/gimbal/symetrie/gimbal.yaml +0 -168
  181. egse/gimbal/symetrie/gimbal_cs.py +0 -183
  182. egse/gimbal/symetrie/gimbal_protocol.py +0 -138
  183. egse/gimbal/symetrie/gimbal_ui.py +0 -361
  184. egse/gimbal/symetrie/pmac.py +0 -1006
  185. egse/gimbal/symetrie/pmac_regex.py +0 -83
  186. egse/graph.py +0 -132
  187. egse/gui/__init__.py +0 -47
  188. egse/gui/buttons.py +0 -378
  189. egse/gui/focalplane.py +0 -1285
  190. egse/gui/formatter.py +0 -10
  191. egse/gui/led.py +0 -162
  192. egse/gui/limitswitch.py +0 -143
  193. egse/gui/mechanisms.py +0 -587
  194. egse/gui/states.py +0 -148
  195. egse/gui/stripchart.py +0 -729
  196. egse/gui/styles.qss +0 -48
  197. egse/gui/switch.py +0 -112
  198. egse/h5.py +0 -274
  199. egse/help/__init__.py +0 -0
  200. egse/help/help_ui.py +0 -126
  201. egse/hexapod/__init__.py +0 -32
  202. egse/hexapod/symetrie/__init__.py +0 -137
  203. egse/hexapod/symetrie/alpha.py +0 -874
  204. egse/hexapod/symetrie/dynalpha.py +0 -1387
  205. egse/hexapod/symetrie/hexapod_ui.py +0 -1516
  206. egse/hexapod/symetrie/pmac.py +0 -1010
  207. egse/hexapod/symetrie/pmac_regex.py +0 -83
  208. egse/hexapod/symetrie/puna.py +0 -1167
  209. egse/hexapod/symetrie/puna.yaml +0 -193
  210. egse/hexapod/symetrie/puna_cs.py +0 -195
  211. egse/hexapod/symetrie/puna_protocol.py +0 -134
  212. egse/hexapod/symetrie/puna_ui.py +0 -433
  213. egse/hexapod/symetrie/punaplus.py +0 -107
  214. egse/hexapod/symetrie/zonda.py +0 -872
  215. egse/hexapod/symetrie/zonda.yaml +0 -337
  216. egse/hexapod/symetrie/zonda_cs.py +0 -172
  217. egse/hexapod/symetrie/zonda_devif.py +0 -414
  218. egse/hexapod/symetrie/zonda_protocol.py +0 -123
  219. egse/hexapod/symetrie/zonda_ui.py +0 -449
  220. egse/hk.py +0 -791
  221. egse/icons/aeu-cs-start.svg +0 -117
  222. egse/icons/aeu-cs-stop.svg +0 -118
  223. egse/icons/aeu-cs.svg +0 -107
  224. egse/icons/aeu_cs-started.svg +0 -112
  225. egse/icons/aeu_cs-stopped.svg +0 -112
  226. egse/icons/aeu_cs.svg +0 -55
  227. egse/icons/alert.svg +0 -1
  228. egse/icons/arrow-double-left.png +0 -0
  229. egse/icons/arrow-double-right.png +0 -0
  230. egse/icons/arrow-up.svg +0 -11
  231. egse/icons/backward.svg +0 -1
  232. egse/icons/busy.svg +0 -1
  233. egse/icons/cleaning.svg +0 -115
  234. egse/icons/color-scheme.svg +0 -1
  235. egse/icons/cs-connected-alert.svg +0 -91
  236. egse/icons/cs-connected-disabled.svg +0 -43
  237. egse/icons/cs-connected.svg +0 -89
  238. egse/icons/cs-not-connected.svg +0 -44
  239. egse/icons/double-left-arrow.svg +0 -1
  240. egse/icons/double-right-arrow.svg +0 -1
  241. egse/icons/erase-disabled.svg +0 -19
  242. egse/icons/erase.svg +0 -59
  243. egse/icons/fitsgen-start.svg +0 -47
  244. egse/icons/fitsgen-stop.svg +0 -48
  245. egse/icons/fitsgen.svg +0 -1
  246. egse/icons/forward.svg +0 -1
  247. egse/icons/fov-hk-start.svg +0 -33
  248. egse/icons/fov-hk-stop.svg +0 -37
  249. egse/icons/fov-hk.svg +0 -1
  250. egse/icons/front-desk.svg +0 -1
  251. egse/icons/home-actioned.svg +0 -15
  252. egse/icons/home-disabled.svg +0 -15
  253. egse/icons/home.svg +0 -13
  254. egse/icons/info.svg +0 -1
  255. egse/icons/invalid.png +0 -0
  256. egse/icons/led-green.svg +0 -20
  257. egse/icons/led-grey.svg +0 -20
  258. egse/icons/led-orange.svg +0 -20
  259. egse/icons/led-red.svg +0 -20
  260. egse/icons/led-square-green.svg +0 -134
  261. egse/icons/led-square-grey.svg +0 -134
  262. egse/icons/led-square-orange.svg +0 -134
  263. egse/icons/led-square-red.svg +0 -134
  264. egse/icons/limit-switch-all-green.svg +0 -115
  265. egse/icons/limit-switch-all-red.svg +0 -117
  266. egse/icons/limit-switch-el+.svg +0 -116
  267. egse/icons/limit-switch-el-.svg +0 -117
  268. egse/icons/location-marker.svg +0 -1
  269. egse/icons/logo-dpu.svg +0 -48
  270. egse/icons/logo-gimbal.svg +0 -112
  271. egse/icons/logo-huber.svg +0 -23
  272. egse/icons/logo-ogse.svg +0 -31
  273. egse/icons/logo-puna.svg +0 -92
  274. egse/icons/logo-tcs.svg +0 -29
  275. egse/icons/logo-zonda.svg +0 -66
  276. egse/icons/maximize.svg +0 -1
  277. egse/icons/meter.svg +0 -1
  278. egse/icons/more.svg +0 -45
  279. egse/icons/n-fee-hk-start.svg +0 -24
  280. egse/icons/n-fee-hk-stop.svg +0 -25
  281. egse/icons/n-fee-hk.svg +0 -83
  282. egse/icons/observing-off.svg +0 -46
  283. egse/icons/observing-on.svg +0 -46
  284. egse/icons/open-document-hdf5.png +0 -0
  285. egse/icons/open-document-hdf5.svg +0 -21
  286. egse/icons/ops-mode.svg +0 -1
  287. egse/icons/play-green.svg +0 -17
  288. egse/icons/plugged-disabled.svg +0 -27
  289. egse/icons/plugged.svg +0 -21
  290. egse/icons/pm_ui.svg +0 -1
  291. egse/icons/power-button-green.svg +0 -27
  292. egse/icons/power-button-red.svg +0 -27
  293. egse/icons/power-button.svg +0 -27
  294. egse/icons/radar.svg +0 -1
  295. egse/icons/radioactive.svg +0 -2
  296. egse/icons/reload.svg +0 -1
  297. egse/icons/remote-control-off.svg +0 -28
  298. egse/icons/remote-control-on.svg +0 -28
  299. egse/icons/repeat-blue.svg +0 -15
  300. egse/icons/repeat.svg +0 -1
  301. egse/icons/settings.svg +0 -1
  302. egse/icons/shrink.svg +0 -1
  303. egse/icons/shutter.svg +0 -1
  304. egse/icons/sign-off.svg +0 -1
  305. egse/icons/sign-on.svg +0 -1
  306. egse/icons/sim-mode.svg +0 -1
  307. egse/icons/small-buttons-go.svg +0 -20
  308. egse/icons/small-buttons-minus.svg +0 -51
  309. egse/icons/small-buttons-plus.svg +0 -51
  310. egse/icons/sponge.svg +0 -220
  311. egse/icons/start-button-disabled.svg +0 -84
  312. egse/icons/start-button.svg +0 -50
  313. egse/icons/stop-button-disabled.svg +0 -84
  314. egse/icons/stop-button.svg +0 -50
  315. egse/icons/stop-red.svg +0 -17
  316. egse/icons/stop.svg +0 -1
  317. egse/icons/switch-disabled-square.svg +0 -87
  318. egse/icons/switch-disabled.svg +0 -15
  319. egse/icons/switch-off-square.svg +0 -87
  320. egse/icons/switch-off.svg +0 -72
  321. egse/icons/switch-on-square.svg +0 -87
  322. egse/icons/switch-on.svg +0 -61
  323. egse/icons/temperature-control.svg +0 -44
  324. egse/icons/th_ui_logo.svg +0 -1
  325. egse/icons/unplugged.svg +0 -23
  326. egse/icons/unvalid.png +0 -0
  327. egse/icons/user-interface.svg +0 -1
  328. egse/icons/vacuum.svg +0 -1
  329. egse/icons/valid.png +0 -0
  330. egse/icons/zoom-to-pixel-dark.svg +0 -64
  331. egse/icons/zoom-to-pixel-white.svg +0 -36
  332. egse/images/big-rotation-stage.png +0 -0
  333. egse/images/connected-100.png +0 -0
  334. egse/images/cross.svg +0 -6
  335. egse/images/disconnected-100.png +0 -0
  336. egse/images/gui-icon.png +0 -0
  337. egse/images/home.svg +0 -6
  338. egse/images/info-icon.png +0 -0
  339. egse/images/led-black.svg +0 -89
  340. egse/images/led-green.svg +0 -85
  341. egse/images/led-orange.svg +0 -85
  342. egse/images/led-red.svg +0 -85
  343. egse/images/load-icon.png +0 -0
  344. egse/images/load-setup.png +0 -0
  345. egse/images/load.png +0 -0
  346. egse/images/pause.png +0 -0
  347. egse/images/play-button.svg +0 -8
  348. egse/images/play.png +0 -0
  349. egse/images/process-status.png +0 -0
  350. egse/images/restart.png +0 -0
  351. egse/images/search.png +0 -0
  352. egse/images/sma.png +0 -0
  353. egse/images/start.png +0 -0
  354. egse/images/stop-button.svg +0 -8
  355. egse/images/stop.png +0 -0
  356. egse/images/switch-off.svg +0 -48
  357. egse/images/switch-on.svg +0 -48
  358. egse/images/undo.png +0 -0
  359. egse/images/update-button.svg +0 -11
  360. egse/imageviewer/exposureselection.py +0 -475
  361. egse/imageviewer/imageviewer.py +0 -198
  362. egse/imageviewer/matchfocalplane.py +0 -179
  363. egse/imageviewer/subfieldposition.py +0 -133
  364. egse/lampcontrol/__init__.py +0 -4
  365. egse/lampcontrol/beaglebone/beaglebone.py +0 -178
  366. egse/lampcontrol/beaglebone/beaglebone.yaml +0 -62
  367. egse/lampcontrol/beaglebone/beaglebone_cs.py +0 -106
  368. egse/lampcontrol/beaglebone/beaglebone_devif.py +0 -150
  369. egse/lampcontrol/beaglebone/beaglebone_protocol.py +0 -73
  370. egse/lampcontrol/energetiq/__init__.py +0 -22
  371. egse/lampcontrol/energetiq/eq99.yaml +0 -98
  372. egse/lampcontrol/energetiq/lampEQ99.py +0 -283
  373. egse/lampcontrol/energetiq/lampEQ99_cs.py +0 -128
  374. egse/lampcontrol/energetiq/lampEQ99_devif.py +0 -158
  375. egse/lampcontrol/energetiq/lampEQ99_encode_decode_errors.py +0 -73
  376. egse/lampcontrol/energetiq/lampEQ99_protocol.py +0 -71
  377. egse/lampcontrol/energetiq/lampEQ99_ui.py +0 -465
  378. egse/lib/CentOS-7/EtherSpaceLink_v34_86.dylib +0 -0
  379. egse/lib/CentOS-8/ESL-RMAP_v34_86.dylib +0 -0
  380. egse/lib/CentOS-8/EtherSpaceLink_v34_86.dylib +0 -0
  381. egse/lib/Debian/ESL-RMAP_v34_86.dylib +0 -0
  382. egse/lib/Debian/EtherSpaceLink_v34_86.dylib +0 -0
  383. egse/lib/Debian/libetherspacelink_v35_21.dylib +0 -0
  384. egse/lib/Linux/ESL-RMAP_v34_86.dylib +0 -0
  385. egse/lib/Linux/EtherSpaceLink_v34_86.dylib +0 -0
  386. egse/lib/Ubuntu-20/ESL-RMAP_v34_86.dylib +0 -0
  387. egse/lib/Ubuntu-20/EtherSpaceLink_v34_86.dylib +0 -0
  388. egse/lib/gssw/python3-gssw_2.2.3+31f63c9f-1_all.deb +0 -0
  389. egse/lib/ximc/__pycache__/pyximc.cpython-38 2.pyc +0 -0
  390. egse/lib/ximc/__pycache__/pyximc.cpython-38.pyc +0 -0
  391. egse/lib/ximc/libximc.framework/Frameworks/libbindy.dylib +0 -0
  392. egse/lib/ximc/libximc.framework/Frameworks/libxiwrapper.dylib +0 -0
  393. egse/lib/ximc/libximc.framework/Headers/ximc.h +0 -5510
  394. egse/lib/ximc/libximc.framework/Resources/Info.plist +0 -42
  395. egse/lib/ximc/libximc.framework/Resources/keyfile.sqlite +0 -0
  396. egse/lib/ximc/libximc.framework/libbindy.so +0 -0
  397. egse/lib/ximc/libximc.framework/libximc +0 -0
  398. egse/lib/ximc/libximc.framework/libximc.so +0 -0
  399. egse/lib/ximc/libximc.framework/libximc.so.7.0.0 +0 -0
  400. egse/lib/ximc/libximc.framework/libxiwrapper.so +0 -0
  401. egse/lib/ximc/pyximc.py +0 -922
  402. egse/listener.py +0 -179
  403. egse/logger/__init__.py +0 -243
  404. egse/logger/log_cs.py +0 -321
  405. egse/metrics.py +0 -102
  406. egse/mixin.py +0 -464
  407. egse/monitoring.py +0 -95
  408. egse/ni/alarms/__init__.py +0 -26
  409. egse/ni/alarms/cdaq9375.py +0 -300
  410. egse/ni/alarms/cdaq9375.yaml +0 -89
  411. egse/ni/alarms/cdaq9375_cs.py +0 -130
  412. egse/ni/alarms/cdaq9375_devif.py +0 -183
  413. egse/ni/alarms/cdaq9375_protocol.py +0 -48
  414. egse/obs_inspection.py +0 -165
  415. egse/observer.py +0 -41
  416. egse/obsid.py +0 -163
  417. egse/powermeter/__init__.py +0 -0
  418. egse/powermeter/ni/__init__.py +0 -38
  419. egse/powermeter/ni/cdaq9184.py +0 -224
  420. egse/powermeter/ni/cdaq9184.yaml +0 -73
  421. egse/powermeter/ni/cdaq9184_cs.py +0 -130
  422. egse/powermeter/ni/cdaq9184_devif.py +0 -201
  423. egse/powermeter/ni/cdaq9184_protocol.py +0 -48
  424. egse/powermeter/ni/cdaq9184_ui.py +0 -544
  425. egse/powermeter/thorlabs/__init__.py +0 -25
  426. egse/powermeter/thorlabs/pm100a.py +0 -380
  427. egse/powermeter/thorlabs/pm100a.yaml +0 -132
  428. egse/powermeter/thorlabs/pm100a_cs.py +0 -136
  429. egse/powermeter/thorlabs/pm100a_devif.py +0 -127
  430. egse/powermeter/thorlabs/pm100a_protocol.py +0 -80
  431. egse/powermeter/thorlabs/pm100a_ui.py +0 -725
  432. egse/process.py +0 -451
  433. egse/procman/__init__.py +0 -834
  434. egse/procman/cannot_start_process_popup.py +0 -43
  435. egse/procman/procman.yaml +0 -49
  436. egse/procman/procman_cs.py +0 -201
  437. egse/procman/procman_ui.py +0 -2081
  438. egse/protocol.py +0 -605
  439. egse/proxy.py +0 -531
  440. egse/randomwalk.py +0 -140
  441. egse/reg.py +0 -585
  442. egse/reload.py +0 -122
  443. egse/reprocess.py +0 -693
  444. egse/resource.py +0 -333
  445. egse/rmap.py +0 -406
  446. egse/rst.py +0 -135
  447. egse/search.py +0 -182
  448. egse/serialdevice.py +0 -190
  449. egse/services.py +0 -247
  450. egse/services.yaml +0 -68
  451. egse/settings.py +0 -379
  452. egse/settings.yaml +0 -980
  453. egse/setup.py +0 -1181
  454. egse/shutter/__init__.py +0 -0
  455. egse/shutter/thorlabs/__init__.py +0 -19
  456. egse/shutter/thorlabs/ksc101.py +0 -205
  457. egse/shutter/thorlabs/ksc101.yaml +0 -105
  458. egse/shutter/thorlabs/ksc101_cs.py +0 -136
  459. egse/shutter/thorlabs/ksc101_devif.py +0 -201
  460. egse/shutter/thorlabs/ksc101_protocol.py +0 -71
  461. egse/shutter/thorlabs/ksc101_ui.py +0 -548
  462. egse/shutter/thorlabs/sc10.py +0 -82
  463. egse/shutter/thorlabs/sc10.yaml +0 -52
  464. egse/shutter/thorlabs/sc10_controller.py +0 -81
  465. egse/shutter/thorlabs/sc10_cs.py +0 -108
  466. egse/shutter/thorlabs/sc10_interface.py +0 -25
  467. egse/shutter/thorlabs/sc10_simulator.py +0 -30
  468. egse/simulator.py +0 -41
  469. egse/slack.py +0 -61
  470. egse/socketdevice.py +0 -218
  471. egse/sockets.py +0 -218
  472. egse/spw.py +0 -1401
  473. egse/stages/__init__.py +0 -12
  474. egse/stages/aerotech/ensemble.py +0 -245
  475. egse/stages/aerotech/ensemble.yaml +0 -205
  476. egse/stages/aerotech/ensemble_controller.py +0 -275
  477. egse/stages/aerotech/ensemble_cs.py +0 -110
  478. egse/stages/aerotech/ensemble_interface.py +0 -132
  479. egse/stages/aerotech/ensemble_parameters.py +0 -433
  480. egse/stages/aerotech/ensemble_simulator.py +0 -27
  481. egse/stages/aerotech/mgse_sim.py +0 -188
  482. egse/stages/arun/smd3.py +0 -110
  483. egse/stages/arun/smd3.yaml +0 -68
  484. egse/stages/arun/smd3_controller.py +0 -470
  485. egse/stages/arun/smd3_cs.py +0 -112
  486. egse/stages/arun/smd3_interface.py +0 -53
  487. egse/stages/arun/smd3_simulator.py +0 -27
  488. egse/stages/arun/smd3_stop.py +0 -16
  489. egse/stages/huber/__init__.py +0 -49
  490. egse/stages/huber/smc9300.py +0 -920
  491. egse/stages/huber/smc9300.yaml +0 -63
  492. egse/stages/huber/smc9300_cs.py +0 -178
  493. egse/stages/huber/smc9300_devif.py +0 -345
  494. egse/stages/huber/smc9300_protocol.py +0 -113
  495. egse/stages/huber/smc9300_sim.py +0 -547
  496. egse/stages/huber/smc9300_ui.py +0 -973
  497. egse/state.py +0 -173
  498. egse/statemachine.py +0 -274
  499. egse/storage/__init__.py +0 -1067
  500. egse/storage/persistence.py +0 -2295
  501. egse/storage/storage.yaml +0 -79
  502. egse/storage/storage_cs.py +0 -231
  503. egse/styles/dark.qss +0 -343
  504. egse/styles/default.qss +0 -48
  505. egse/synoptics/__init__.py +0 -417
  506. egse/synoptics/syn.yaml +0 -9
  507. egse/synoptics/syn_cs.py +0 -195
  508. egse/system.py +0 -1611
  509. egse/tcs/__init__.py +0 -14
  510. egse/tcs/tcs.py +0 -879
  511. egse/tcs/tcs.yaml +0 -14
  512. egse/tcs/tcs_cs.py +0 -202
  513. egse/tcs/tcs_devif.py +0 -292
  514. egse/tcs/tcs_protocol.py +0 -180
  515. egse/tcs/tcs_sim.py +0 -177
  516. egse/tcs/tcs_ui.py +0 -543
  517. egse/tdms.py +0 -171
  518. egse/tempcontrol/__init__.py +0 -23
  519. egse/tempcontrol/agilent/agilent34970.py +0 -109
  520. egse/tempcontrol/agilent/agilent34970.yaml +0 -44
  521. egse/tempcontrol/agilent/agilent34970_cs.py +0 -114
  522. egse/tempcontrol/agilent/agilent34970_devif.py +0 -182
  523. egse/tempcontrol/agilent/agilent34970_protocol.py +0 -96
  524. egse/tempcontrol/agilent/agilent34972.py +0 -111
  525. egse/tempcontrol/agilent/agilent34972.yaml +0 -44
  526. egse/tempcontrol/agilent/agilent34972_cs.py +0 -115
  527. egse/tempcontrol/agilent/agilent34972_devif.py +0 -189
  528. egse/tempcontrol/agilent/agilent34972_protocol.py +0 -98
  529. egse/tempcontrol/beaglebone/beaglebone.py +0 -341
  530. egse/tempcontrol/beaglebone/beaglebone.yaml +0 -110
  531. egse/tempcontrol/beaglebone/beaglebone_cs.py +0 -117
  532. egse/tempcontrol/beaglebone/beaglebone_protocol.py +0 -134
  533. egse/tempcontrol/beaglebone/beaglebone_ui.py +0 -674
  534. egse/tempcontrol/digalox/digalox.py +0 -115
  535. egse/tempcontrol/digalox/digalox.yaml +0 -36
  536. egse/tempcontrol/digalox/digalox_cs.py +0 -108
  537. egse/tempcontrol/digalox/digalox_protocol.py +0 -56
  538. egse/tempcontrol/keithley/__init__.py +0 -33
  539. egse/tempcontrol/keithley/daq6510.py +0 -662
  540. egse/tempcontrol/keithley/daq6510.yaml +0 -105
  541. egse/tempcontrol/keithley/daq6510_cs.py +0 -163
  542. egse/tempcontrol/keithley/daq6510_devif.py +0 -343
  543. egse/tempcontrol/keithley/daq6510_protocol.py +0 -79
  544. egse/tempcontrol/keithley/daq6510_sim.py +0 -186
  545. egse/tempcontrol/lakeshore/__init__.py +0 -33
  546. egse/tempcontrol/lakeshore/lsci.py +0 -361
  547. egse/tempcontrol/lakeshore/lsci.yaml +0 -162
  548. egse/tempcontrol/lakeshore/lsci_cs.py +0 -174
  549. egse/tempcontrol/lakeshore/lsci_devif.py +0 -292
  550. egse/tempcontrol/lakeshore/lsci_protocol.py +0 -76
  551. egse/tempcontrol/lakeshore/lsci_ui.py +0 -387
  552. egse/tempcontrol/ni/__init__.py +0 -0
  553. egse/tempcontrol/spid/spid.py +0 -109
  554. egse/tempcontrol/spid/spid.yaml +0 -81
  555. egse/tempcontrol/spid/spid_controller.py +0 -279
  556. egse/tempcontrol/spid/spid_cs.py +0 -136
  557. egse/tempcontrol/spid/spid_protocol.py +0 -107
  558. egse/tempcontrol/spid/spid_ui.py +0 -723
  559. egse/tempcontrol/srs/__init__.py +0 -22
  560. egse/tempcontrol/srs/ptc10.py +0 -867
  561. egse/tempcontrol/srs/ptc10.yaml +0 -227
  562. egse/tempcontrol/srs/ptc10_cs.py +0 -128
  563. egse/tempcontrol/srs/ptc10_devif.py +0 -116
  564. egse/tempcontrol/srs/ptc10_protocol.py +0 -39
  565. egse/tempcontrol/srs/ptc10_ui.py +0 -906
  566. egse/ups/apc/apc.py +0 -236
  567. egse/ups/apc/apc.yaml +0 -45
  568. egse/ups/apc/apc_cs.py +0 -101
  569. egse/ups/apc/apc_protocol.py +0 -125
  570. egse/user.yaml +0 -7
  571. egse/vacuum/beaglebone/beaglebone.py +0 -149
  572. egse/vacuum/beaglebone/beaglebone.yaml +0 -44
  573. egse/vacuum/beaglebone/beaglebone_cs.py +0 -108
  574. egse/vacuum/beaglebone/beaglebone_devif.py +0 -159
  575. egse/vacuum/beaglebone/beaglebone_protocol.py +0 -192
  576. egse/vacuum/beaglebone/beaglebone_ui.py +0 -638
  577. egse/vacuum/instrutech/igm402.py +0 -91
  578. egse/vacuum/instrutech/igm402.yaml +0 -90
  579. egse/vacuum/instrutech/igm402_controller.py +0 -124
  580. egse/vacuum/instrutech/igm402_cs.py +0 -108
  581. egse/vacuum/instrutech/igm402_interface.py +0 -49
  582. egse/vacuum/instrutech/igm402_simulator.py +0 -36
  583. egse/vacuum/keller/kellerBus.py +0 -256
  584. egse/vacuum/keller/leo3.py +0 -100
  585. egse/vacuum/keller/leo3.yaml +0 -38
  586. egse/vacuum/keller/leo3_controller.py +0 -81
  587. egse/vacuum/keller/leo3_cs.py +0 -101
  588. egse/vacuum/keller/leo3_interface.py +0 -33
  589. egse/vacuum/mks/evision.py +0 -86
  590. egse/vacuum/mks/evision.yaml +0 -75
  591. egse/vacuum/mks/evision_cs.py +0 -101
  592. egse/vacuum/mks/evision_devif.py +0 -313
  593. egse/vacuum/mks/evision_interface.py +0 -60
  594. egse/vacuum/mks/evision_simulator.py +0 -24
  595. egse/vacuum/mks/evision_ui.py +0 -701
  596. egse/vacuum/pfeiffer/acp40.py +0 -87
  597. egse/vacuum/pfeiffer/acp40.yaml +0 -60
  598. egse/vacuum/pfeiffer/acp40_controller.py +0 -117
  599. egse/vacuum/pfeiffer/acp40_cs.py +0 -109
  600. egse/vacuum/pfeiffer/acp40_interface.py +0 -40
  601. egse/vacuum/pfeiffer/acp40_simulator.py +0 -37
  602. egse/vacuum/pfeiffer/tc400.py +0 -87
  603. egse/vacuum/pfeiffer/tc400.yaml +0 -83
  604. egse/vacuum/pfeiffer/tc400_controller.py +0 -136
  605. egse/vacuum/pfeiffer/tc400_cs.py +0 -109
  606. egse/vacuum/pfeiffer/tc400_interface.py +0 -70
  607. egse/vacuum/pfeiffer/tc400_simulator.py +0 -35
  608. egse/vacuum/pfeiffer/tpg261.py +0 -80
  609. egse/vacuum/pfeiffer/tpg261.yaml +0 -66
  610. egse/vacuum/pfeiffer/tpg261_controller.py +0 -150
  611. egse/vacuum/pfeiffer/tpg261_cs.py +0 -109
  612. egse/vacuum/pfeiffer/tpg261_interface.py +0 -59
  613. egse/vacuum/pfeiffer/tpg261_simulator.py +0 -23
  614. egse/version.py +0 -174
  615. egse/visitedpositions.py +0 -398
  616. egse/windowing.py +0 -213
  617. egse/zmq/__init__.py +0 -28
  618. egse/zmq/spw.py +0 -160
  619. egse/zmq_ser.py +0 -41
  620. scripts/alerts/cold.yaml +0 -278
  621. scripts/alerts/example_alerts.yaml +0 -54
  622. scripts/alerts/transition.yaml +0 -14
  623. scripts/alerts/warm.yaml +0 -49
  624. scripts/analyse_n_fee_hk_data.py +0 -52
  625. scripts/check_hdf5_files.py +0 -192
  626. scripts/check_register_sync.py +0 -47
  627. scripts/check_tcs_calib_coef.py +0 -90
  628. scripts/correct_ccd_cold_temperature_cal.py +0 -157
  629. scripts/create_hdf5_report.py +0 -293
  630. scripts/csl_model.py +0 -420
  631. scripts/csl_restore_setup.py +0 -229
  632. scripts/export-grafana-dashboards.py +0 -49
  633. scripts/fdir/cs_recovery/fdir_cs_recovery.py +0 -54
  634. scripts/fdir/fdir_table.yaml +0 -70
  635. scripts/fdir/fdir_test_recovery.py +0 -10
  636. scripts/fdir/hw_recovery/fdir_agilent_hw_recovery.py +0 -73
  637. scripts/fdir/limit_recovery/fdir_agilent_limit.py +0 -61
  638. scripts/fdir/limit_recovery/fdir_bb_heater_limit.py +0 -59
  639. scripts/fdir/limit_recovery/fdir_ensemble_limit.py +0 -33
  640. scripts/fdir/limit_recovery/fdir_pressure_limit_recovery.py +0 -71
  641. scripts/fix_csv.py +0 -80
  642. scripts/ias/correct_ccd_temp_cal_elfique.py +0 -43
  643. scripts/ias/correct_ccd_temp_cal_floreffe.py +0 -43
  644. scripts/ias/correct_trp_swap_achel.py +0 -199
  645. scripts/inta/correct_ccd_temp_cal_duvel.py +0 -43
  646. scripts/inta/correct_ccd_temp_cal_gueuze.py +0 -43
  647. scripts/n_fee_supply_voltage_calculation.py +0 -92
  648. scripts/playground.py +0 -30
  649. scripts/print_hdf5_hk_data.py +0 -68
  650. scripts/print_register_map.py +0 -43
  651. scripts/remove_lines_between_matches.py +0 -188
  652. scripts/sron/commanding/control_heaters.py +0 -44
  653. scripts/sron/commanding/pumpdown.py +0 -46
  654. scripts/sron/commanding/set_pid_setpoint.py +0 -19
  655. scripts/sron/commanding/shutdown_bbb_heaters.py +0 -10
  656. scripts/sron/commanding/shutdown_pumps.py +0 -33
  657. scripts/sron/correct_mgse_coordinates_brigand_chimay.py +0 -272
  658. scripts/sron/correct_trp_swap_brigand.py +0 -204
  659. scripts/sron/gimbal_conversions.py +0 -75
  660. scripts/sron/tm_gen/tm_gen_agilent.py +0 -37
  661. scripts/sron/tm_gen/tm_gen_heaters.py +0 -4
  662. scripts/sron/tm_gen/tm_gen_spid.py +0 -13
  663. scripts/update_operational_cgse.py +0 -268
  664. scripts/update_operational_cgse_old.py +0 -273
@@ -1,2295 +0,0 @@
1
- """
2
- This module handles the persistence storage for the Common-EGSE.
3
- """
4
- import csv
5
- import logging
6
- import os
7
- import re
8
- import sqlite3
9
- import warnings
10
- from abc import ABC
11
- from abc import abstractmethod
12
- from math import cos
13
- from math import radians
14
- from math import sin
15
- from pathlib import Path
16
- from sqlite3 import Connection
17
- from typing import Optional
18
- from typing import Union
19
-
20
- import h5py
21
- import natsort
22
- import numpy as np
23
- from astropy.io import fits
24
- from astropy.io.ascii.cparser import AstropyWarning
25
-
26
- from egse.fee import n_fee_mode
27
- from egse.fee.nfee import HousekeepingData
28
- from egse.settings import Settings
29
- from egse.setup import Setup
30
- from egse.setup import SetupError
31
- from egse.spw import DataDataPacket
32
- from egse.spw import DataPacket
33
- from egse.spw import DataPacketType
34
- from egse.spw import HousekeepingPacket
35
- from egse.spw import OverscanDataPacket
36
- from egse.spw import PacketType
37
- from egse.spw import TimecodePacket
38
- from egse.state import GlobalState
39
- from egse.system import read_last_line
40
- from egse.system import time_since_epoch_1958
41
-
42
- logger = logging.getLogger(__name__)
43
-
44
- FOV_SETTINGS = Settings.load("Field-Of-View")
45
- CCD_SETTINGS = Settings.load("CCD")
46
- INT_SYNC_TIMING_OFFSET = 0.4 # See https://github.com/IvS-KULeuven/plato-common-egse/issues/2475
47
-
48
- try:
49
- _ = os.environ["PLATO_CAMERA_IS_EM"]
50
- PLATO_CAMERA_IS_EM = True if _.capitalize() in ("1", "True", "Yes") else 0
51
- except KeyError:
52
- PLATO_CAMERA_IS_EM = False
53
-
54
-
55
-
56
- class PersistenceLayer(ABC):
57
- """The Persistence Layer implements the CRUD paradigm for storing data."""
58
-
59
- extension = "no_ext"
60
- """The file extension to use for this persistence type."""
61
-
62
- @abstractmethod
63
- def open(self, mode=None):
64
- """Opens the resource."""
65
- raise NotImplementedError("Persistence layers must implement the open method")
66
-
67
- @abstractmethod
68
- def close(self):
69
- """Closes the resource."""
70
- raise NotImplementedError("Persistence layers must implement the close method")
71
-
72
- @abstractmethod
73
- def exists(self):
74
- """Does the resource exists."""
75
- raise NotImplementedError("Persistence layers must implement the exists method")
76
-
77
- @abstractmethod
78
- def create(self, data):
79
- """Creates an entry in the persistence store."""
80
- raise NotImplementedError("Persistence layers must implement a create method")
81
-
82
- @abstractmethod
83
- def read(self, select=None):
84
- """Returns a list of all entries in the persistence store.
85
-
86
- The list can be filtered based on a selection from the `select` argument which
87
- should be a Callable object.
88
-
89
- Args:
90
- select (Callable): a filter function to narrow down the list of all entries.
91
- Returns:
92
- A list or generator for all entries in the persistence store.
93
- """
94
- raise NotImplementedError("Persistence layers must implement a read method")
95
-
96
- @abstractmethod
97
- def update(self, idx, data):
98
- """Updates the entry for index `idx` in the persistence store."""
99
- raise NotImplementedError("Persistence layers must implement an update method")
100
-
101
- @abstractmethod
102
- def delete(self, idx):
103
- """Deletes the entry for index `idx` from the persistence store."""
104
- raise NotImplementedError("Persistence layers must implement a delete method")
105
-
106
- @abstractmethod
107
- def get_filepath(self):
108
- """If this persistence class is file based, return its file path, otherwise return None."""
109
- raise NotImplementedError("Persistence layers must implement a get_filepath method")
110
-
111
-
112
- class FITSPersistenceError(Exception):
113
- """ Error for the FITS persistence layer."""
114
-
115
- pass
116
-
117
-
118
- class MissingSpWPacketsError(Exception):
119
- """ Error when the SpW data for a frame in an HDF5 file is incomplete."""
120
-
121
- pass
122
-
123
-
124
- class FITS(PersistenceLayer):
125
- """ Persistence layer that saves (image) data in a FITS file."""
126
-
127
- extension = "fits"
128
-
129
- warnings.simplefilter('ignore', category=AstropyWarning)
130
-
131
- def __init__(self, filename: str, prep: dict):
132
- """ Initialisation of the FITS persistence layer.
133
-
134
- This consists of the following steps:
135
-
136
- - Initialise the filepath (for the given filename);
137
- - Fetch the register map from the DPU;
138
- - Read all necessary data from that register map:
139
- - Which CCD(s) will be read;
140
- - Which side(s) of the CCD(s) will be read (E = left; F = right);
141
- - Which rows and columns will be transmitted;
142
- - How can we know whether all information has been received for a particular exposure;
143
- - Initialisation of the (1D) arrays in which the received data will be stored;
144
- - Initialisation of basic headers for the image, serial pre-scan(s), serial over-scan(s), and parallel
145
- over-scan for any exposure.
146
-
147
- Assumed is that the crucial parameters in the register map (N-FEE mode, v_start, v_end, h_end) will stay the
148
- same throughout data acquisition and assembly of the FITS file. It is not explicitly checked whether this is
149
- indeed the case.
150
-
151
- All information (image data, serial pre-scan, serial over-scan, and parallel over-scan) will be stored in the
152
- same FITS file. If the data of multiple CCDs would be transmitted, it is all stored in the same FITS file. The
153
- extension name will indicate which kind of data it contains (image data, serial pre-scan, serial over-scan, or
154
- parallel over-scan), for which CCD. In case of the serial scan maps, the extension will also indicate which
155
- CCD side it applies to.
156
-
157
- Args:
158
- - filename: Name of the output FITS file.
159
- - prep: Dictionary with the following information:
160
- * v_start (int) and v_end(int): index of the first and the last row being transmitted;
161
- * h_end (int): index of the last serial readout of the readout register;
162
- * rows_final_dump:
163
- * ccd_mode_config (egse.fee.n_fee_mode): readout mode;
164
- * ccd_readout_order (List[int]): CCDs that will be read out;
165
- * expected_last_packet_flags (List[bool]): expected last packet flags;
166
- * obsid: observation identifier;
167
- * cycle_time: image cycle time [s];
168
- * cgse_version: version of the Common EGSE;
169
- * setup: setup;
170
- * register_map: FEE register map;
171
- * vgd: configured VGD;
172
- * in case of windowing mode or windowing pattern mode: dictionary with one window list for each CCD.
173
- """
174
-
175
- self.fee_side = GlobalState.setup.camera.fee.ccd_sides.enum
176
-
177
- self._filepath = Path(filename)
178
-
179
- # Which side(s) of which CCD(s) will be transmitted?
180
- # (initialise the data arrays for each of these)
181
-
182
- self.ccd_readout_order = prep["ccd_readout_order"] # CCD numbering [1-4]
183
- self.selected_ccds = np.unique(self.ccd_readout_order)
184
- logger.debug(f"Selected CCDs: {self.selected_ccds}")
185
-
186
- self.data_arrays = {}
187
- self.init_data_arrays()
188
-
189
- self.frame_number = {ccd: {self.fee_side.LEFT_SIDE: 0, self.fee_side.RIGHT_SIDE: 0} for ccd in self.selected_ccds}
190
- self.timestamp = None
191
- self.finetime = None
192
-
193
- # How can you know whether or not all data for a given CCD has been received?
194
- # (this depends on whether or not there is parallel over-scan data and on which CCD side(s) will be transmitted)
195
-
196
- self.expected_last_packet_flags = prep["expected_last_packet_flags"] # To be received
197
- self.received_last_packet_flags = {ccd: [False] * 4 for ccd in self.selected_ccds} # Actually received
198
-
199
- logger.debug(f"Init last packets flag: {self.received_last_packet_flags}")
200
-
201
- # Readout mode
202
-
203
- self.ccd_mode_config = prep["ccd_mode_config"]
204
- self.is_windowing_mode = self.check_readout_mode()
205
-
206
- # if self.is_windowing_mode:
207
- #
208
- # self.windows = self.init_window_list(prep["window_list"])
209
-
210
- # Define the values of the WCS keywords
211
-
212
-
213
- self.v_start = prep["v_start"] # First transmitted row
214
- self.v_end = prep["v_end"] # Last transmitted row
215
- self.h_end = prep["h_end"] # Last transmitted column
216
- self.rows_final_dump = prep["rows_final_dump"] # Number of rows to be dumped after readout
217
- self.cycle_time = prep["cycle_time"] # Image cycle time [s]
218
- self.cgse_version = prep["cgse_version"] # Version of the Common EGSE
219
- self.obsid = prep["obsid"]
220
-
221
- self.register_map = prep["register_map"] # Register map
222
-
223
- # Read information from the setup
224
-
225
- self.setup: Setup = prep["setup"]
226
-
227
- self.site_name = self.setup["site_id"] # Site ID
228
- self.setup_id = self.setup.get_id() # Setup ID
229
- self.camera_id = self.setup.camera.get("ID") # Camera ID (None if not present in the setup)
230
-
231
- self.readout_time = self.calculate_readout_time(self.setup) # Readout time [s]
232
- try:
233
- self.exposure_time = self.cycle_time - self.readout_time # Exposure time [s]
234
- except TypeError:
235
- # Image cycle time is unknown (None)
236
- self.exposure_time = None
237
-
238
- self.has_serial_overscan = self.h_end >= CCD_SETTINGS.LENGTH_SERIAL_PRESCAN + CCD_SETTINGS.NUM_COLUMNS / 2
239
- self.has_parallel_overscan = self.v_end >= CCD_SETTINGS.NUM_ROWS
240
-
241
- # Create basic WCS
242
- # (part of the information can only be determined when the data is assembled)
243
-
244
- self.image_header = self.create_base_image_wcs()
245
- self.serial_prescan_header = self.create_base_serial_prescan_wcs()
246
- self.serial_overscan_header = self.create_base_serial_overscan_wcs()
247
- self.parallel_overscan_header = self.create_base_parallel_overscan_wcs()
248
-
249
- self.is_fits_file_open = False
250
-
251
- def calculate_readout_time(self, setup: Setup):
252
- """ Calculate the readout time.
253
-
254
- The readout time consists of:
255
-
256
- - clearout for the rows up to v_start;
257
- - reading (i.e. parallel transfer of the row + serial transfer of all its pixels up to h_end) rows v_start
258
- to v_end;
259
- - dumping rows_final_dump.
260
-
261
- Returns: Readout time for the requested part of a single CCD side [s].
262
- """
263
-
264
- time_row_parallel = setup.camera.fee.time_row_parallel
265
- time_row_clearout = setup.camera.fee.time_row_clearout
266
- time_pixel_readout = setup.camera.fee.time_pixel_readout
267
-
268
- time_read_rows = (self.v_end - self.v_start + 1) * (time_row_parallel + (self.h_end + 1) * time_pixel_readout)
269
- time_dump_rows = (self.v_start + self.rows_final_dump) * time_row_clearout
270
-
271
- return time_read_rows + time_dump_rows
272
-
273
- def get_vgd(self):
274
- """ Extract the VGD voltage from the register map.
275
-
276
- Return: Configured VGD voltage [V].
277
- """
278
-
279
- vgd_19 = self.register_map[('reg_19_config', 'ccd_vgd_config')]
280
- vgd_20 = self.register_map[('reg_20_config', 'ccd_vgd_config')]
281
-
282
- return ((vgd_20 << 4) + vgd_19) / 1000 * 5.983
283
-
284
- def get_ccd2_vrd(self):
285
- """
286
- Extract the VRD voltage for CCD2 from the register map.
287
-
288
- NOTE: Use this function only for the FM cameras.
289
-
290
- Return: Configured VRD voltage.
291
- """
292
- vrd_18 = self.register_map[('reg_18_config', 'ccd2_vrd_config')]
293
- vrd_19 = self.register_map[('reg_19_config', 'ccd2_vrd_config')]
294
-
295
- return int(f'0x{vrd_19:x}{vrd_18:x}', 16)
296
-
297
-
298
- def get_ccd3_vrd(self):
299
- """
300
- Extract the VRD voltage for CCD3 from the register map.
301
-
302
- NOTE: Use this function only for the EM camera.
303
-
304
- Return: Configured VRD voltage.
305
- """
306
- vrd_18 = self.register_map[('reg_18_config', 'ccd3_vrd_config')]
307
- vrd_19 = self.register_map[('reg_19_config', 'ccd3_vrd_config')]
308
-
309
- return int(f'0x{vrd_19:x}{vrd_18:x}', 16)
310
-
311
-
312
- def init_data_arrays(self):
313
- """ Initialise data arrays in which the data content of the SpW packets will be dumped.
314
-
315
- At this point, we have already determined which side(s) of which CCD(s) will be read out. For each of them,
316
- a placeholder will be foreseen in a dedicated dictionary. The structure of this dictionary is the following,
317
- in case both sides of all CCDs will be read:
318
-
319
- data_arrays[fee_side.LEFT_SIDE][1] # left side of CCD1
320
- data_arrays[fee_side.LEFT_SIDE][2] # left side of CCD2
321
- data_arrays[fee_side.LEFT_SIDE][3] # left side of CCD3
322
- data_arrays[fee_side.LEFT_SIDE][4] # left side of CCD4
323
- data_arrays[fee_side.RIGHT_SIDE][1] # right side of CCD1
324
- data_arrays[fee_side.RIGHT_SIDE][2] # right side of CCD2
325
- data_arrays[fee_side.RIGHT_SIDE][3] # right side of CCD3
326
- data_arrays[fee_side.RIGHT_SIDE][4] # right side of CCD4
327
-
328
- In case not all CCDs will be read out and/or only one side, placeholders will only be foreseen for the relevant
329
- CCD data.
330
-
331
- Returns: Dictionary with placeholders for the data arrays of the selected CCD sides.
332
- """
333
-
334
- left_side_arrays = {ccd: np.array([], dtype=np.uint16) for ccd in self.selected_ccds}
335
- self.data_arrays[self.fee_side.LEFT_SIDE] = left_side_arrays
336
-
337
- right_side_arrays = {ccd: np.array([], dtype=np.uint16) for ccd in self.selected_ccds}
338
- self.data_arrays[self.fee_side.RIGHT_SIDE] = right_side_arrays
339
-
340
- def clear_for_next_exposure(self, ccd_number: int, ccd_side):
341
- """ Indicate that no data has been received yet for the next exposure of the given CCD.
342
-
343
- At the end of an exposure, when the data have been assembled and written to FITS:
344
-
345
- - clear the data arrays for the next exposure;
346
- - indicate that the last packet has not been received yet for the next exposure;
347
- - clear the timestamp for the next exposure.
348
-
349
- Args:
350
- - ccd_number: CCD identifier (1/2/3/4).
351
- - ccd_side: CCD side from which the last received data originates.
352
- """
353
-
354
- for ccd_side in self.fee_side:
355
- self.clear_data_arrays(ccd_number, ccd_side)
356
-
357
- self.clear_last_packet_received(ccd_number)
358
-
359
- def clear_data_arrays(self, ccd_number: int, ccd_side):
360
- """ Clear the data arrays for the given CCD.
361
-
362
- At the end of an exposure, when the data have been assembled and written to FITS, the data arrays must be
363
- cleared for the next exposure.
364
-
365
- Args:
366
- - ccd_number: CCD identifier (1/2/3/4).
367
- """
368
-
369
- self.data_arrays[ccd_side][ccd_number] = np.array([], dtype=np.uint16)
370
-
371
- def clear_last_packet_received(self, ccd_number: int):
372
- """ Clear the information about the last packet being received for the given CCD.
373
-
374
- At the end of an exposure, when the data have been assembled and written to FITS, it must be indicated that the
375
- last packet has not been received for the next exposure for the given CCD.
376
-
377
- Args:
378
- - ccd_number: CCD identifier (1/2/3/4).
379
- """
380
-
381
- self.received_last_packet_flags[ccd_number] = [False for el in self.received_last_packet_flags[ccd_number]]
382
-
383
- # def get_data_array(self, ccd_number: int, ccd_side: fee_side):
384
- # """ Return the data array for the given side of the given CCD.
385
- #
386
- # In this array all SpW data concerning the serial pre-scan, image, serial over-scan, and parallel over-scan
387
- # will be dumped (in 1D). If the last data packet has been received, the different parts will be extracted and
388
- # written to FITS.
389
- #
390
- # Args:
391
- # - ccd_number: CCD identifier (1/2/3/4).
392
- # - ccd_side: CCD side.
393
- #
394
- # Returns: Data array for the given side of the given CCD.
395
- # """
396
- #
397
- # return self.data_arrays[ccd_side][ccd_number]
398
-
399
- def check_readout_mode(self):
400
- """ For now only checks whether the N-FEE is in the correct mode, i.e. full-image (pattern) mode.
401
-
402
- In the future, if deemed necessary, windowing (pattern) mode may be implemented as well.
403
- """
404
-
405
- if self.ccd_mode_config in [n_fee_mode.FULL_IMAGE_MODE, n_fee_mode.FULL_IMAGE_PATTERN_MODE,
406
- n_fee_mode.PARALLEL_TRAP_PUMPING_1_MODE, n_fee_mode.PARALLEL_TRAP_PUMPING_2_MODE,
407
- n_fee_mode.SERIAL_TRAP_PUMPING_1_MODE, n_fee_mode.SERIAL_TRAP_PUMPING_2_MODE]:
408
-
409
- return False
410
-
411
- if self.ccd_mode_config in [n_fee_mode.WINDOWING_PATTERN_MODE, n_fee_mode.WINDOWING_MODE]:
412
-
413
- return True
414
-
415
- else:
416
-
417
- raise FITSPersistenceError("Construction of FITS files from SpW packets only implemented for full-image "
418
- "(pattern) mode, windowing (pattern) mode, and parallel/serial trap pumping 1/2 "
419
- "mode")
420
-
421
- # def init_window_list(self, window_list: dict):
422
- # """ Compile the window list.
423
- #
424
- # For each of the CCDs that will be read out, execute the following steps:
425
- #
426
- # - For each of the CCD sides that will be transmitted, identify which pixels in the compound 2D array (i.e.
427
- # in which the image and the scan maps are still glued together) will be transmitted. Store their index
428
- # in the corresponding 1D array in a dictionary.
429
- # - Store the pixel coordinates of the lower left corner of the window (in the CCD reference frame) of all
430
- # windows in the image area.
431
- # """
432
- #
433
- # window_indices = {ccd_side: {} for ccd_side in self.selected_ccd_side}
434
- #
435
- # for ccd_number in self.selected_ccds:
436
- #
437
- # # Get the window information for the current CCD
438
- #
439
- # ccd_window_list_obj: WindowList = window_list[ccd_number]
440
- # ccd_window_list = ccd_window_list_obj.get_window_list() # Ordered set of windows
441
- # ccd_window_num_columns, ccd_window_num_rows = ccd_window_list_obj.get_window_size() # Window size
442
- #
443
- # ccd_window_x_array = np.array([])
444
- # ccd_window_y_array = np.array([])
445
- #
446
- # ccd_window_indices = {ccd_side: np.array([]) for ccd_side in self.selected_ccd_side}
447
- #
448
- # for (x_window, y_window, ccd_side_window) in ccd_window_list:
449
- #
450
- # # Calculate the index in the 1D array
451
- #
452
- # for row in range(y_window, y_window + ccd_window_num_rows + 1):
453
- #
454
- # for column in range(x_window, x_window + ccd_window_num_columns + 1):
455
- #
456
- # index = row * (self.v_end - self.v_start + 1) + column
457
- # ccd_window_indices[ccd_side_window] = np.append(ccd_window_indices[ccd_side_window], index)
458
- #
459
- # # Store the coordinates of the lower left corner of the windows in the CCD reference frame
460
- # # (only for the windows in the image area)
461
- #
462
- # x_to_append = x_window - CCD_SETTINGS.LENGTH_SERIAL_PRESCAN
463
- #
464
- # # TODO Should we check that the window is on the image area?
465
- #
466
- # if ccd_side_window == fee_side.F_SIDE:
467
- #
468
- # x_to_append = CCD_SETTINGS.NUM_COLUMNS / 2 - 1 - x_to_append
469
- #
470
- # if fee_side.E_SIDE in self.selected_ccd_side:
471
- #
472
- # x_to_append += CCD_SETTINGS.NUM_COLUMNS / 2
473
- #
474
- # ccd_window_x_array = np.append(ccd_window_x_array, x_to_append)
475
- # ccd_window_y_array = np.append(ccd_window_y_array, y_window)
476
- #
477
- # for ccd_side in self.selected_ccd_side:
478
- #
479
- # window_indices[ccd_side][ccd_number] = np.sort(np.unique(ccd_window_indices[ccd_side]))
480
- #
481
- # rows = fits.Column("Rows", format="I", array=ccd_window_y_array)
482
- # columns = fits.Column("Columns", format="I", array=ccd_window_x_array)
483
- # table = fits.BinTableHDU.from_columns([rows, columns])
484
- # table.header["EXTNAME"] = f"Windows"
485
- # table.header["EXTVER"] = ccd_number
486
- # table.header["CCD_ID"] = (ccd_number, "CCD identifier",)
487
- #
488
- # # TODO
489
- #
490
- # # fits.append(table)
491
- #
492
- # # with fits.open(self._filepath, mode="append") as hdul:
493
- # #
494
- # # hdul.append(table)
495
- #
496
- # return window_indices
497
-
498
- def create_primary_header(self):
499
- """ Creates the primary header (i.e. the header of the primary HDU).
500
-
501
- This contains information that is specific for the camera.
502
- """
503
-
504
- primary_header = fits.PrimaryHDU().header
505
-
506
- primary_header["LEVEL"] = 1 # Flat structure
507
-
508
- primary_header["V_START"] = (self.v_start, "Index of 1st row that is transmitted (counting starts at 0)")
509
- primary_header["V_END"] = (self.v_end, "Index of last row that is transmitted (counting starts at 0)")
510
- primary_header["H_END"] = (self.h_end, "Number of serial register transfers")
511
- primary_header["ROWS_FINAL_DUMP"] = (self.rows_final_dump, "Number of rows for clearout after readout")
512
- primary_header["READ_MODE"] = (n_fee_mode(self.ccd_mode_config).name, "N-FEE operating mode")
513
-
514
- primary_header["CI_WIDTH"] = (self.register_map["charge_injection_width"],
515
- "Number of rows in each charge injection region")
516
- primary_header["CI_GAP"] = (self.register_map["charge_injection_gap"],
517
- "Number of rows between charge injection regions")
518
- primary_header["PARALLEL_TOI_PERIOD"] = (self.register_map["parallel_toi_period"],
519
- "Duration of a parallel overlap period (TOI) [us]")
520
- primary_header["PARALLEL_CLK_OVERLAP"] = (self.register_map["parallel_clk_overlap"],
521
- "Extra parallel clock overlap [us]")
522
- primary_header["CI_EN"] = (self.register_map["charge_injection_width"],
523
- "Charge injection enabled (1) / disabled (0)")
524
- primary_header["TRI_LEVEL_CLK_EN"] = (self.register_map["tri_level_clk_en"],
525
- "Generating bi-level parallel clocks (0) / tri-level parallel clocks (1)")
526
- primary_header["IMG_CLK_DIR"] = (self.register_map["img_clk_dir"],
527
- "Generating reverse parallel clocks (1) / normal forward parallel clocks (0)")
528
- primary_header["REG_CLK_DIR"] = (self.register_map["reg_clk_dir"],
529
- "Generating reverse serial clocks (1) / normal forward serial clocks (0)")
530
- primary_header["PACKET_SIZE"] = (self.register_map["packet_size"],
531
- "Packet size = load bytes + 10 (header bytes)")
532
-
533
- primary_header["TRAP_PUMP_DWELL_CTR"] = (self.register_map["Trap_Pumping_Dwell_counter"],
534
- "Dwell timer for trap pumping [ns]")
535
- primary_header["SENSOR_SEL"] = (self.register_map["sensor_sel"], "CCD port data transmission selection control")
536
- primary_header["SYNC_SEL"] = (self.register_map["sync_sel"], "Internal (1) / external (0) sync")
537
- primary_header["DIGITISE_EN"] = (self.register_map["digitise_en"],
538
- "Digitised data transferred to the N-DPU (1) or not (0) during image mode")
539
- primary_header["DG_EN"] = (self.register_map["DG_en"], "Dump gate high (1) / low (0)")
540
- primary_header["CCD_READ_EN"] = (self.register_map["ccd_read_en"], "CCD readout enabled (1) / disabled (0)")
541
- primary_header["CONV_DLY"] = (self.register_map["conv_dly"],
542
- "Delay value from rising edge of CCD_R_EF_DRV (where ADC convert pulse "
543
- "is generated) [ns]")
544
- primary_header["HIGH_PRECISION_HK_EN"] = (self.register_map["High_precision_HK_en"],
545
- "Sending high-precision HK (1) / pixel data (0)")
546
-
547
- primary_header["CCD_VOD"] = (self.register_map["ccd_vod_config"], "Configured VOD")
548
-
549
- primary_header["CCD1_VRD"] = (self.register_map["ccd1_vrd_config"], "Configured VRD for CCD1")
550
- if PLATO_CAMERA_IS_EM:
551
- primary_header["CCD2_VRD"] = (self.register_map["ccd2_vrd_config"], "Configured VRD for CCD2")
552
- primary_header["CCD3_VRD"] = (self.get_ccd3_vrd(), "Configured VRD for CCD3")
553
- else:
554
- primary_header["CCD2_VRD"] = (self.get_ccd2_vrd(), "Configured VRD for CCD2")
555
- primary_header["CCD3_VRD"] = (self.register_map["ccd3_vrd_config"], "Configured VRD for CCD3")
556
-
557
- primary_header["CCD4_VRD"] = (self.register_map["ccd4_vrd_config"], "Configured VRD for CCD4")
558
- primary_header["CCD_VOG"] = (self.register_map["ccd_vog_config"], "Configured VOG")
559
- primary_header["CCD_VGD"] = (self.get_vgd(), "Configured VGD [V]")
560
- primary_header["CCD_IG_HI"] = (self.register_map["ccd_ig_hi_config"], "Configured IG-high")
561
- primary_header["CCD_IG_LO"] = (self.register_map["ccd_ig_lo_config"], "Configured IG-high")
562
- primary_header["TRK_HLD_HI"] = (self.register_map["trk_hld_hi"], "Track and hold high")
563
- primary_header["TRK_HLD_LO"] = (self.register_map["trk_hld_lo"], "Track and hold low")
564
- primary_header["CONT_RST_ON"] = (self.register_map["cont_rst_on"],
565
- "When 1, FPGA generates continuous reset clock during readout")
566
- if not PLATO_CAMERA_IS_EM:
567
- primary_header["CONT_CDSCLP_ON"] = (self.register_map["cont_cdsclp_on"],
568
- "When 1, FPGA generates continuous CDS clamp during readout")
569
- primary_header["CONT_ROWCLP_ON"] = (self.register_map["cont_rowclp_on"],
570
- "When 1, FPGA generates continuous row clamp during readout")
571
- primary_header["R_CFG1"] = (self.register_map["r_cfg1"], "Clock cycle for Rph3-low, Rph1-high")
572
- primary_header["R_CFG2"] = (self.register_map["r_cfg2"], "Clock cycle for Rph1-low, Rph2-high")
573
- primary_header["CDSCLP_LO"] = (self.register_map["cdsclp_lo"], "Clock cycle for cdsclp low")
574
- if not PLATO_CAMERA_IS_EM:
575
- primary_header["ADC_PWRDN_EN"] = (self.register_map["adc_pwrdn_en"],
576
- "ADC power-down enabled (0) / disabled (1)")
577
- primary_header["CDSCLP_HI"] = (self.register_map["cdsclp_hi"], "Clock cycle for cdsclp high")
578
- primary_header["ROWCLP_HI"] = (self.register_map["rowclp_hi"], "Clock cycle for rowclp high")
579
- primary_header["ROWCLP_LO"] = (self.register_map["rowclp_lo"], "Clock cycle for rowclp low")
580
- # primary_header["SURFACE_INV_CTR"] = (self.register_map["Surface_Inversion_counter"],
581
- # "Surface inversion counter")
582
- primary_header["READOUT_PAUSE_CTR"] = (self.register_map["Readout_pause_counter"], "Readout pause counter")
583
- primary_header["TRAP_PUMP_SHUFFLE_CTR"] = (self.register_map["Trap_Pumping_Shuffle_counter"],
584
- "Trap pumping shuffle counter")
585
-
586
- # primary_header["FOCALLEN"] = (FOV_SETTINGS["FOCAL_LENGTH"], "Focal length [mm]")
587
-
588
- # Additional keywords
589
-
590
- primary_header["TELESCOP"] = "PLATO"
591
- if self.camera_id:
592
- primary_header["INSTRUME"] = (self.camera_id, "Camera ID")
593
- primary_header["SITENAME"] = (self.site_name, "Name of the test site")
594
- primary_header["SETUP"] = (self.setup_id, "Setup ID")
595
- primary_header["CCD_READOUT_ORDER"] = (str(self.ccd_readout_order), "Transmitted CCDs")
596
- primary_header["CYCLETIME"] = (self.cycle_time, "Image cycle time [s]")
597
- primary_header["READTIME"] = (self.readout_time,
598
- "Time needed to read out the requested part for a single CCD side [s]")
599
-
600
- if self.register_map["sync_sel"] == 1: # See https://github.com/IvS-KULeuven/plato-common-egse/issues/2475
601
- primary_header["READPERIOD"] = (self.cycle_time + INT_SYNC_TIMING_OFFSET, "Time between frames [s] "
602
- "(internal sync)")
603
- texp_cmd = self.cycle_time - self.readout_time
604
- primary_header["TEXP_CMD"] = (texp_cmd, "Commanded exposure time [s] (internal sync)")
605
- primary_header["TEXP_EFF"] = (texp_cmd + INT_SYNC_TIMING_OFFSET, "Effective exposure time [s] (internal "
606
- "sync)")
607
-
608
- primary_header["CGSE"] = (self.cgse_version, "Version of the Common EGSE")
609
-
610
- logger.info(f"Obsid in FITS persistence layer: {self.obsid}")
611
-
612
- if self.obsid is not None:
613
-
614
- logger.debug(f"{self.obsid = }")
615
-
616
- primary_header["OBSID"] = (self.obsid, "Observation identifier")
617
-
618
- primary_header["PRE_SC_N"] = (CCD_SETTINGS.LENGTH_SERIAL_PRESCAN,
619
- "Number of pixels/columns in the serial pre-scan")
620
- primary_header["OVR_SC_N"] = (max(0, self.h_end + 1
621
- - CCD_SETTINGS.LENGTH_SERIAL_PRESCAN - CCD_SETTINGS.NUM_COLUMNS // 2),
622
- "Number of virtual pixels / columns in the serial over-scan")
623
- primary_header["OVR_SC_R"] = (max(0, self.v_end + 1 - CCD_SETTINGS.NUM_ROWS),
624
- "Number of rows in the parallel over-scan")
625
- primary_header["IMG_REPR"] = ("FOV_IMG", "Right CCD side flipped w.r.t. readout direction")
626
-
627
- return primary_header
628
-
629
- def create_base_image_wcs(self):
630
- """ Create a basic FITS header for the image.
631
-
632
- Not all information can be filled out at this point (i.c. extension, CCD identifier, number of columns,
633
- rotation, reference pixel). This will be done at the moment the serial pre-scan will be written to file.
634
-
635
- Note that, if both CCD sides will be transmitted, the image of both sides will be glued together (the E-side
636
- must be flipped horizontally first). If only the E-side will be transmitted, its image must be flipped
637
- horizontally.
638
-
639
- Returns: Basic FITS header for the image.
640
- """
641
-
642
- image_header = fits.ImageHDU().header
643
-
644
- # Make sure the data is saved as 16-bit
645
-
646
- image_header["BITPIX"] = 16
647
- image_header["BZERO"] = 32768
648
-
649
- # Dimensionality
650
-
651
- num_rows = min(CCD_SETTINGS["NUM_ROWS"] - 1, self.v_end) - self.v_start + 1
652
-
653
- image_header["NAXIS"] = (2, "Dimensionality of the image",)
654
- image_header["NAXIS2"] = (num_rows, "Number of rows in the image",)
655
-
656
-
657
- # Focal length (this is needed for the conversion to field angles)
658
-
659
- image_header["FOCALLEN"] = (FOV_SETTINGS["FOCAL_LENGTH"], "Focal length [mm]",) # TODO
660
-
661
- # Linear coordinate transformation from sub-field to focal-plane coordinates
662
-
663
- image_header["ctype1"] = ("LINEAR", "Linear coordinate transformation",)
664
- image_header["ctype2"] = ("LINEAR", "Linear coordinate transformation",)
665
-
666
- # Focal-plane coordinates are expressed in mm
667
-
668
- image_header["CUNIT1"] = ("MM", "Target unit in the column direction (mm)",)
669
- image_header["CUNIT2"] = ("MM", "Target unit in the row direction (mm)",)
670
-
671
- # Pixel size
672
-
673
- cdelt = CCD_SETTINGS["PIXEL_SIZE"] / 1000.0 # Pixel size [mm]
674
- image_header["CDELT1"] = (cdelt, "Pixel size in the x-direction [mm]",)
675
- image_header["CDELT2"] = (cdelt, "Pixel size in the y-direction [mm]",)
676
-
677
- # Additional keywords
678
-
679
- # image_header["TELESCOP"] = (setup["camera_id"], "Camera ID")
680
- # image_header["INSTRUME"] = (setup["camera_id"], "Camera ID")
681
-
682
- image_header["TELESCOP"] = "PLATO"
683
- if self.camera_id:
684
- image_header["INSTRUME"] = (self.camera_id, "Camera ID")
685
- image_header["SITENAME"] = (self.site_name, "Name of the test site")
686
- image_header["SETUP"] = (self.setup_id, "Setup ID")
687
- if self.obsid is not None:
688
- image_header["OBSID"] = (self.obsid, "Observation identifier")
689
-
690
- return image_header
691
-
692
- def create_base_serial_prescan_wcs(self):
693
- """ Create a basic FITS header for the serial pre-scan.
694
-
695
- Not all information can be filled out at this point (i.c. extension, CCD identifier). This will be
696
- done at the moment the serial pre-scan will be written to file.
697
-
698
- Note that, if both CCD sides will be transmitted, you will end up having to serial pre-scan maps per exposures
699
- (one for each side).
700
-
701
- Returns: Basic FITS header for the serial pre-scan.
702
- """
703
-
704
- serial_prescan_wcs = fits.ImageHDU().header
705
-
706
- # Make sure the data is saved as 16-bit
707
-
708
- serial_prescan_wcs["BITPIX"] = 16
709
- serial_prescan_wcs["BZERO"] = 32768
710
-
711
- # Dimensionality
712
-
713
- num_rows = self.v_end - self.v_start + 1
714
- num_columns = CCD_SETTINGS.LENGTH_SERIAL_PRESCAN
715
-
716
- serial_prescan_wcs["NAXIS"] = (2, "Dimensionality of the serial pre-scan",)
717
- serial_prescan_wcs["NAXIS1"] = (num_columns, "Number of columns in the serial pre-scan",)
718
- serial_prescan_wcs["NAXIS2"] = (num_rows, "Number of rows in the serial pre-scan",)
719
-
720
- serial_prescan_wcs["TELESCOP"] = "PLATO"
721
- if self.camera_id:
722
- serial_prescan_wcs["INSTRUME"] = (self.camera_id, "Camera ID")
723
- serial_prescan_wcs["SITENAME"] = (self.site_name, "Name of the test site")
724
- serial_prescan_wcs["SETUP"] = (self.setup_id, "Setup ID")
725
- if self.obsid is not None:
726
- serial_prescan_wcs["OBSID"] = (self.obsid, "Observation identifier")
727
-
728
- return serial_prescan_wcs
729
-
730
- def create_base_serial_overscan_wcs(self):
731
- """ Create a basic FITS header for the serial over-scan.
732
-
733
- Not all information can be filled out at this point (i.c. extension, CCD identifier). This will be
734
- done at the moment the serial over-scan will be written to file.
735
-
736
- Note that, if both CCD sides will be transmitted, you will end up having to serial over-scan maps per exposures
737
- (one for each side).
738
-
739
- Returns: Basic FITS header for the serial over-scan.
740
- """
741
-
742
- serial_overscan_wcs = fits.ImageHDU().header
743
-
744
- # Make sure the data is saved as 16-bit
745
-
746
- serial_overscan_wcs["BITPIX"] = 16
747
- serial_overscan_wcs["BZERO"] = 32768
748
-
749
- # Dimensionality
750
-
751
- num_rows = self.v_end - self.v_start + 1
752
- num_columns = self.v_end + 1 - CCD_SETTINGS.LENGTH_SERIAL_PRESCAN - CCD_SETTINGS.NUM_COLUMNS / 2
753
-
754
- serial_overscan_wcs["NAXIS"] = (2, "Dimensionality of the serial over-scan",)
755
- serial_overscan_wcs["NAXIS1"] = (num_columns, "Number of columns in the serial over-scan",)
756
- serial_overscan_wcs["NAXIS2"] = (num_rows, "Number of rows in the serial over-scan",)
757
-
758
- # Site name
759
-
760
- serial_overscan_wcs["TELESCOP"] = "PLATO"
761
- if self.camera_id:
762
- serial_overscan_wcs["INSTRUME"] = (self.camera_id, "Camera ID")
763
- serial_overscan_wcs["SITENAME"] = (self.site_name, "Name of the test site")
764
- serial_overscan_wcs["SETUP"] = (self.setup_id, "Setup ID")
765
- if self.obsid is not None:
766
- serial_overscan_wcs["OBSID"] = (self.obsid, "Observation identifier")
767
-
768
- return serial_overscan_wcs
769
-
770
- def create_base_parallel_overscan_wcs(self):
771
- """ Create a basic FITS header for the parallel over-scan.
772
-
773
- Not all information can be filled out at this point (i.c. extension, CCD identifier, number of columns). This
774
- will be done at the moment the parallel over-scan will be written to file.
775
-
776
- Note that, if both CCD sides will be transmitted, the parallel over-scan of both sides will be glued together
777
- (the E-side must be flipped horizontally first). If only the E-side will be transmitted, its parallel
778
- over-scan must be flipped horizontally.
779
-
780
- Returns: Basic FITS header for the parallel over-scan.
781
- """
782
-
783
- parallel_overscan_wcs = fits.ImageHDU().header
784
-
785
- # Make sure the data is saved as 16-bit
786
-
787
- parallel_overscan_wcs["BITPIX"] = 16
788
- parallel_overscan_wcs["BZERO"] = 32768
789
-
790
- # Dimensionality
791
-
792
- num_rows_parallel_overscan = max(0, self.v_end - CCD_SETTINGS.NUM_ROWS + 1)
793
-
794
- parallel_overscan_wcs["NAXIS"] = (2, "Dimensionality of the parallel over-scan",)
795
- parallel_overscan_wcs["NAXIS2"] = (num_rows_parallel_overscan, "Number of rows in the parallel over-scan",)
796
-
797
- # Site name
798
-
799
- parallel_overscan_wcs["TELESCOP"] = "PLATO"
800
- if self.camera_id:
801
- parallel_overscan_wcs["INSTRUME"] = (self.camera_id, "Camera ID")
802
- parallel_overscan_wcs["SITENAME"] = (self.site_name, "Name of the test site")
803
- parallel_overscan_wcs["SETUP"] = (self.setup_id, "Setup ID")
804
- if self.obsid is not None:
805
- parallel_overscan_wcs["OBSID"] = (self.obsid, "Observation identifier")
806
-
807
- return parallel_overscan_wcs
808
-
809
- def open(self, mode=None):
810
-
811
- primary_header = self.create_primary_header()
812
-
813
- # The primary HDU contains only this header and no image data
814
-
815
- primary_hdu = fits.PrimaryHDU()
816
- primary_hdu.header = primary_header
817
-
818
- # The FITS file is created. If the filename is already in use, and exception
819
- # will be thrown.
820
-
821
- primary_hdu.writeto(self._filepath)
822
-
823
- self.is_fits_file_open = True
824
-
825
- def close(self):
826
- """Closes the resource."""
827
-
828
- self.is_fits_file_open = False
829
-
830
- def exists(self):
831
-
832
- return self._filepath.exists()
833
-
834
- def create(self, data: dict):
835
- """Add the given data to the FITS file.
836
-
837
- The given data is a stream of SpW packets, only part of which contains information should go in the FITS file:
838
-
839
- - Image (and over-scan) data contain the transmitted readout of the CCD(s);
840
- - Timecode and housekeeping packets are not needed for this purpose.
841
- """
842
-
843
- for key, spw_packet in data.items():
844
-
845
- # if isinstance(spw_packet, TimecodePacket):
846
- #
847
- # self.timestamp = spw_packet.
848
-
849
- if key == "Timestamp":
850
-
851
- self.timestamp = spw_packet
852
- self.finetime = time_since_epoch_1958(self.timestamp)
853
-
854
- elif isinstance(spw_packet, DataPacket):
855
-
856
- try:
857
- ccd_bin_to_id = self.setup.camera.fee.ccd_numbering.CCD_BIN_TO_ID
858
- except AttributeError:
859
- raise SetupError("No entry in the setup for camera.fee.ccd_numbering.CCD_BIN_TO_ID")
860
- spw_packet_data_type = spw_packet.type
861
- ccd_number = ccd_bin_to_id[spw_packet_data_type.ccd_number] # 1-4
862
- ccd_side = self.fee_side(spw_packet_data_type.ccd_side)
863
- data_array = spw_packet.data_as_ndarray
864
-
865
- self.data_arrays[ccd_side][ccd_number] = np.append(self.data_arrays[ccd_side][ccd_number], data_array)
866
-
867
- # If all data has been received for the current exposure of this CCD, the following steps must be
868
- # performed:
869
- # - re-shape the 1D data arrays to 2D data arrays;
870
- # - extract the different pieces of the 2D data arrays (image + scan maps);
871
- # - for the F-side: flip the image and parallel over-scan horizontally;
872
- # - if both CCD sides are transmitted, stitch them together (for the image and parallel
873
- # over-scan);
874
- # - update the WCS of the different regions;
875
- # - add to the FITS file.
876
-
877
- if self.is_all_data_received(spw_packet_data_type, ccd_number, ccd_side):
878
-
879
- # Write the information to FITS
880
-
881
- for ccd_side in self.fee_side:
882
-
883
- try:
884
- if self.data_arrays[ccd_side][ccd_number].size > 0:
885
- self.assemble_slice(ccd_number, ccd_side)
886
- self.frame_number[ccd_number][ccd_side] += 1
887
- except MissingSpWPacketsError as exc:
888
- logger.info(exc)
889
-
890
- # Get ready for the next exposure
891
-
892
- self.clear_for_next_exposure(ccd_number, ccd_side)
893
-
894
- def is_all_data_received(self, spw_packet_data_type: DataPacketType, ccd_number: int, ccd_side):
895
- """ Check if all data has been received for the current exposure.
896
-
897
- Args:
898
- - spw_packet_data_type: Last received data packet type.
899
- - ccd_number: CCD from which the last received data originates (1-4).
900
- - ccd_side: CCD side from which the last received data originates.
901
-
902
- Returns: True if all data for the current exposure has been received; False otherwise.
903
- """
904
-
905
- from egse.dpu import got_all_last_packets
906
-
907
- if spw_packet_data_type.last_packet:
908
-
909
- packet_type = spw_packet_data_type.packet_type
910
-
911
- if packet_type == PacketType.DATA_PACKET:
912
-
913
- if ccd_side == self.fee_side.E_SIDE:
914
-
915
- self.received_last_packet_flags[ccd_number][0] = True
916
-
917
- elif ccd_side == self.fee_side.F_SIDE:
918
-
919
- self.received_last_packet_flags[ccd_number][1] = True
920
-
921
- elif packet_type == PacketType.OVERSCAN_DATA:
922
-
923
- if ccd_side == self.fee_side.E_SIDE:
924
-
925
- self.received_last_packet_flags[ccd_number][2] = True
926
-
927
- elif ccd_side == self.fee_side.F_SIDE:
928
-
929
- self.received_last_packet_flags[ccd_number][3] = True
930
-
931
- return got_all_last_packets(self.received_last_packet_flags[ccd_number], self.expected_last_packet_flags)
932
-
933
- def got_all_last_packets(self, ccd_number: int, ccd_side):
934
- """ Check whether all the expected last-packet flags have been seen for the given CCD side.
935
-
936
- Args:
937
- - ccd_number: CCD from which the last received data originates (1-4).
938
- - ccd_side: CCD side from which the last received data originates.
939
-
940
- Returns: True if the actual and the expected last-packet flags match for the given CCD side of the given CCD;
941
- False otherwise.
942
- """
943
-
944
- received_last_packet_flags = self.received_last_packet_flags[ccd_number]
945
-
946
- if ccd_side == self.fee_side.E_SIDE:
947
-
948
- return received_last_packet_flags[0] == self.expected_last_packet_flags[0] \
949
- and received_last_packet_flags[2] == self.expected_last_packet_flags[2]
950
-
951
- elif ccd_side == self.fee_side.F_SIDE:
952
-
953
- return received_last_packet_flags[1] == self.expected_last_packet_flags[1] \
954
- and received_last_packet_flags[3] == self.expected_last_packet_flags[3]
955
-
956
- return False
957
-
958
- def assemble_slice(self, ccd_number: int, ccd_side):
959
- """ Assemble the data for the given CCD and write it to FITS.
960
-
961
- Args:
962
- - ccd_number: CCD identifier (1/2/3/4).
963
- """
964
-
965
- # Windowing (pattern) mode
966
-
967
- if self.is_windowing_mode:
968
-
969
- # self.assemble_slice_windowing_mode(ccd_number, ccd_side)
970
- pass
971
-
972
- # Full-image (pattern) mode
973
-
974
- else:
975
-
976
- self.assemble_slice_full_image_mode(ccd_number, ccd_side)
977
-
978
- # def assemble_slice_windowing_mode(self, ccd_number: int):
979
- # """ Assemble the data for the given CCD and write it to FITS, for windowing mode or windowing pattern mode.
980
- #
981
- # This consists of the following steps:
982
- #
983
- # - Create a 1D array, filled with NaNs, exactly big enough to fit the image and scan maps;
984
- # - Insert the data that was acquired (i.e. coming from the windows);
985
- # - Convert the 1D data arrays to a 2D data array, in which the image and the scan maps are still glued
986
- # together (do this for each transmitted CCD side);
987
- # - Extract for each transmitted CCD side the different regions from the 2D array (image, serial pre-scan,
988
- # serial over-scan, and parallel over-scan);
989
- # - Append the serial pre-scan of the transmitted CCD side(s) to the FITS file (after completing its header);
990
- # - Append the serial over-scan of the transmitted CCD side(s) to the FITS file (after completing its header),
991
- # if present;
992
- # - In case the F-side is transmitted, flips its image and parallel over-scan horizontally;
993
- # - In case both sides are transmitted, stitch the two sides together for the image and the parallel
994
- # over-scan;
995
- # - Append the parallel over-scan to the FITS file (after completing its header), if present;
996
- # - Append the image to the FITS file (after completing its header).
997
- #
998
- # Args:
999
- # - ccd_number: CCD identifier (1/2/3/4).
1000
- # """
1001
- #
1002
- # num_rows = self.v_end - self.v_start + 1
1003
- # num_columns = self.h_end + 1
1004
- #
1005
- # num_rows_image = min(CCD_SETTINGS.NUM_ROWS - 1, self.v_end) - self.v_start + 1
1006
- # num_rows_parallel_overscan = max(0, num_rows - num_rows_image)
1007
- #
1008
- # image = np.array([]).reshape(num_rows_image, 0)
1009
- # parallel_overscan = np.array([]).reshape(num_rows_parallel_overscan, 0)
1010
- #
1011
- # for ccd_side in fee_side:
1012
- #
1013
- # if ccd_side in self.data_arrays:
1014
- #
1015
- # data_array = np.array([float("nan")] * (num_rows * num_columns))
1016
- # data_array[self.windows[ccd_side][ccd_number]] = self.data_arrays[ccd_side][ccd_number]
1017
- #
1018
- # side_image, side_serial_prescan, side_serial_overscan, side_parallel_overscan = \
1019
- # self.extract_full_image_mode(data_array)
1020
- #
1021
- # # Serial pre-scan
1022
- #
1023
- # self.append_serial_prescan(side_serial_prescan, ccd_number, ccd_side)
1024
- #
1025
- # # Serial over-scan
1026
- #
1027
- # if self.has_serial_overscan:
1028
- #
1029
- # self.append_serial_overscan(side_serial_overscan, ccd_number, ccd_side)
1030
- #
1031
- # # For the F-side, the image and parallel over-scan must be flipped horizontally
1032
- #
1033
- # if ccd_side == fee_side.F_SIDE:
1034
- #
1035
- # side_image, side_parallel_overscan = self.flip_f_side_full_image_mode(side_image,
1036
- # side_parallel_overscan)
1037
- #
1038
- # # Image (the part that is on this side)
1039
- #
1040
- # image = np.hstack([image, side_image])
1041
- #
1042
- # # Parallel over-scan (the part that is on this side)
1043
- #
1044
- # if self.has_parallel_overscan:
1045
- #
1046
- # parallel_overscan = np.hstack([parallel_overscan, side_parallel_overscan])
1047
- #
1048
- # # Parallel over-scan
1049
- #
1050
- # if self.has_parallel_overscan:
1051
- #
1052
- # self.append_parallel_overscan(parallel_overscan, ccd_number)
1053
- #
1054
- # # Image
1055
- #
1056
- # self.append_image(image, ccd_number)
1057
-
1058
- def assemble_slice_full_image_mode(self, ccd_number: int, ccd_side):
1059
- """ Assemble the data for the given CCD and write it to FITS, for full-image mode or full-image pattern mode.
1060
-
1061
- This consists of the following steps:
1062
-
1063
- - Convert the 1D data arrays to a 2D data array, in which the image and the scan maps are still glued
1064
- together (do this for each transmitted CCD side);
1065
- - Extract for each transmitted CCD side the different regions from the 2D array (image, serial pre-scan,
1066
- serial over-scan, and parallel over-scan);
1067
- - Append the serial pre-scan of the transmitted CCD side(s) to the FITS file (after completing its header);
1068
- - Append the serial over-scan of the transmitted CCD side(s) to the FITS file (after completing its header),
1069
- if present;
1070
- - In case the F-side is transmitted, flips its image and parallel over-scan horizontally;
1071
- - In case both sides are transmitted, stitch the two sides together for the image and the parallel
1072
- over-scan;
1073
- - Append the parallel over-scan to the FITS file (after completing its header), if present;
1074
- - Append the image to the FITS file (after completing its header).
1075
-
1076
- Args:
1077
- - ccd_number: CCD identifier (1/2/3/4).
1078
- """
1079
-
1080
- num_rows = self.v_end - self.v_start + 1
1081
- num_columns = self.h_end + 1
1082
-
1083
- # Re-shape the 1D array to a 2D array and extract the image and scan maps
1084
-
1085
- try:
1086
- data_array = np.reshape(self.data_arrays[ccd_side][ccd_number], (num_rows, num_columns)) # 1D -> 2D
1087
- except ValueError as exc:
1088
- raise MissingSpWPacketsError(f"Incomplete SpW data for frame {self.frame_number[ccd_number][ccd_side]} "
1089
- f"for the {ccd_side.name[0]}-side off CCD{ccd_number}. Check the DPU "
1090
- f"Processing logs for more information.") from exc
1091
- side_image, side_serial_prescan, side_serial_overscan, side_parallel_overscan = \
1092
- self.extract_full_image_mode(data_array)
1093
-
1094
- # Serial pre-scan
1095
-
1096
- self.append_serial_prescan(side_serial_prescan, ccd_number, ccd_side)
1097
-
1098
- # Serial over-scan
1099
-
1100
- if self.has_serial_overscan:
1101
-
1102
- self.append_serial_overscan(side_serial_overscan, ccd_number, ccd_side)
1103
-
1104
- # Parallel over-scan
1105
-
1106
- if self.has_parallel_overscan:
1107
-
1108
- self.append_parallel_overscan(side_parallel_overscan, ccd_number, ccd_side)
1109
-
1110
- # Image
1111
-
1112
- self.append_image(side_image, ccd_number, ccd_side)
1113
-
1114
- def extract_full_image_mode(self, data_array):
1115
- """Extract the image and scan maps from the given data array.
1116
-
1117
- For the F-side, the image and parallel over-scan still have to be flipped horizontally.
1118
-
1119
- Args:
1120
- - data_array: 2D array in which the image data and scan maps are still glued together.
1121
-
1122
- Returns:
1123
- - Image data;
1124
- - Serial pre-scan;
1125
- - Serial over-scan;
1126
- - Parallel over-scan.
1127
- """
1128
-
1129
- # Calculate the following indices:
1130
- # - Column index of the first transmitted image column;
1131
- # - Column index of the last transmitted image column + 1 (if the serial over-scan is transmitted, this will
1132
- # be the column index of the first transmitted serial over-scan column);
1133
- # - Row index of the last transmitted image row + 1 (if the parallel over-scan is transmitted, this will be
1134
- # the row index of the first transmitted parallel over-scan row).
1135
-
1136
- start_column_image = CCD_SETTINGS.LENGTH_SERIAL_PRESCAN
1137
- end_column_image_plus_1 = min(CCD_SETTINGS.LENGTH_SERIAL_PRESCAN + CCD_SETTINGS.NUM_COLUMNS // 2, self.h_end + 1)
1138
- end_row_image_plus_1 = min(CCD_SETTINGS.NUM_ROWS - 1, self.v_end) - self.v_start + 1
1139
-
1140
- # Serial pre-scan (all rows, but only the first couple of rows)
1141
-
1142
- serial_prescan = data_array[:, 0: start_column_image]
1143
-
1144
- # Serial over-scan (all rows, omit the columns from the serial pre-scan and the serial over-scan)
1145
-
1146
- if self.has_serial_overscan:
1147
-
1148
- serial_overscan = data_array[:, end_column_image_plus_1:]
1149
-
1150
- else:
1151
-
1152
- serial_overscan = None
1153
-
1154
- # Image (omit the rows from the parallel over-scan
1155
- # and the columns from the serial pre-scan and the serial over-scan)
1156
-
1157
- image = data_array[0: end_row_image_plus_1, start_column_image: end_column_image_plus_1]
1158
-
1159
- # Parallel over-scan
1160
-
1161
- if self.has_parallel_overscan:
1162
-
1163
- parallel_overscan = data_array[end_row_image_plus_1:, start_column_image: end_column_image_plus_1]
1164
-
1165
- else:
1166
-
1167
- parallel_overscan = None
1168
-
1169
- return image, serial_prescan, serial_overscan, parallel_overscan
1170
-
1171
- def append_serial_prescan(self, serial_prescan, ccd_number: int, ccd_side: str):
1172
- """ Append the given serial pre-scan to the FITS file (after completing its header).
1173
-
1174
- Args:
1175
- - serial_prescan: Serial pre-scan.
1176
- - ccd_number: CCD identifier.
1177
- - ccd_side: CCD side.
1178
- """
1179
-
1180
- extension = f"SPRE_{ccd_number}_{self.fee_side(ccd_side).name[0]}"
1181
- self.serial_prescan_header["EXTNAME"] = extension
1182
- self.serial_prescan_header["EXTVER"] = self.frame_number[ccd_number][ccd_side]
1183
- self.serial_prescan_header["CCD_ID"] = (ccd_number, "CCD identifier",)
1184
- self.serial_prescan_header["SENSOR_SEL"] = (self.fee_side(ccd_side).name[0], "CCD side")
1185
- self.serial_prescan_header["DATE-OBS"] = (self.timestamp, "Timestamp for 1st frame",)
1186
- self.serial_prescan_header["FINETIME"] = (self.finetime, "Finetime representation of DATE-OBS",)
1187
-
1188
- if ccd_side in [self.fee_side.RIGHT_SIDE.name, self.fee_side.RIGHT_SIDE]:
1189
- serial_prescan = np.fliplr(serial_prescan)
1190
-
1191
- fits.append(str(self._filepath), serial_prescan, self.serial_prescan_header)
1192
-
1193
- def append_serial_overscan(self, serial_overscan, ccd_number: int, ccd_side: str):
1194
- """ Append the given serial over-scan to the FITS file (after completing its header).
1195
-
1196
- Args:
1197
- - serial_overscan: Serial over-scan.
1198
- - ccd_number: CCD identifier.
1199
- - ccd_side: CCD side.
1200
- """
1201
-
1202
- extension = f"SOVER_{ccd_number}_{self.fee_side(ccd_side).name[0]}"
1203
- self.serial_overscan_header["EXTNAME"] = extension
1204
- self.serial_overscan_header["EXTVER"] = self.frame_number[ccd_number][ccd_side]
1205
- self.serial_overscan_header["CCD_ID"] = (ccd_number, "CCD identifier",)
1206
- self.serial_overscan_header["SENSOR_SEL"] = (self.fee_side(ccd_side).name[0], "CCD side")
1207
- self.serial_overscan_header["NAXIS1"] = (serial_overscan.shape[1], "Number of columns in the serial over-scan",)
1208
- self.serial_overscan_header["DATE-OBS"] = (self.timestamp, "Timestamp for 1st frame",)
1209
- self.serial_overscan_header["FINETIME"] = (self.finetime, "Finetime representation of DATE-OBS")
1210
-
1211
- if ccd_side in [self.fee_side.RIGHT_SIDE.name, self.fee_side.RIGHT_SIDE]:
1212
- serial_overscan = np.fliplr(serial_overscan)
1213
-
1214
- fits.append(str(self._filepath), serial_overscan, self.serial_overscan_header)
1215
-
1216
- def append_parallel_overscan(self, parallel_overscan, ccd_number: int, ccd_side: str):
1217
- """ Append the given parallel over-scan to the FITS file (after completing its header).
1218
-
1219
- Args:
1220
- - parallel_overscan: Parallel over-scan.
1221
- - ccd_number: CCD identifier.
1222
- - ccd_side: CCD side.
1223
- """
1224
-
1225
- extension = f"POVER_{ccd_number}_{self.fee_side(ccd_side).name[0]}"
1226
- self.parallel_overscan_header["EXTNAME"] = extension
1227
- self.parallel_overscan_header["EXTVER"] = self.frame_number[ccd_number][ccd_side]
1228
- self.parallel_overscan_header["CCD_ID"] = (ccd_number, "CCD identifier",)
1229
- self.parallel_overscan_header["SENSOR_SEL"] = (self.fee_side(ccd_side).name[0], "CCD side")
1230
- self.parallel_overscan_header["NAXIS1"] = (parallel_overscan.shape[1],
1231
- "Number of columns in the parallel over-scan",)
1232
- self.parallel_overscan_header["DATE-OBS"] = (self.timestamp, "Timestamp for 1st frame",)
1233
- self.parallel_overscan_header["FINETIME"] = (self.finetime, "Finetime representation of DATE-OBS",)
1234
-
1235
- if ccd_side in [self.fee_side.RIGHT_SIDE.name, self.fee_side.RIGHT_SIDE]:
1236
- parallel_overscan = np.fliplr(parallel_overscan)
1237
-
1238
- fits.append(str(self._filepath), parallel_overscan, self.parallel_overscan_header)
1239
-
1240
- def append_image(self, image, ccd_number: int, ccd_side: str):
1241
- """ Append the given image to the FITS file (after completing its header).
1242
-
1243
- Args:
1244
- - image: Image.
1245
- - ccd_number: CCD identifier (1/2/3/4).
1246
- - ccd_side: CCD side.
1247
- """
1248
-
1249
- extension = f"IMAGE_{ccd_number}_{self.fee_side(ccd_side).name[0]}"
1250
- self.image_header["EXTNAME"] = extension
1251
- self.image_header["EXTVER"] = self.frame_number[ccd_number][ccd_side]
1252
- self.image_header["CCD_ID"] = (ccd_number, "CCD identifier",)
1253
- self.image_header["SENSOR_SEL"] = (self.fee_side(ccd_side).name[0], "CCD side")
1254
- self.image_header["NAXIS1"] = (image.shape[1], "Number of columns in the image",)
1255
-
1256
- ccd_orientation_degrees = CCD_SETTINGS.ORIENTATION[ccd_number - 1]
1257
- ccd_orientation_radians = radians(ccd_orientation_degrees)
1258
-
1259
- self.image_header["CROTA2"] = (ccd_orientation_degrees, "CCD orientation angle [degrees]",)
1260
-
1261
- cdelt = CCD_SETTINGS["PIXEL_SIZE"] / 1000.0 # Pixel size [mm]
1262
-
1263
- self.image_header["CD1_1"] = (cdelt * cos(ccd_orientation_radians),
1264
- "Pixel size x cos(CCD orientation angle)",)
1265
- self.image_header["CD1_2"] = (-cdelt * sin(ccd_orientation_radians),
1266
- "-Pixel size x sin(CCD orientation angle)",)
1267
- self.image_header["CD2_1"] = (cdelt * sin(ccd_orientation_radians),
1268
- "Pixel size x sin(CCD orientation angle)",)
1269
- self.image_header["CD2_2"] = (cdelt * cos(ccd_orientation_radians),
1270
- "Pixel size x cos(CCD orientation angle)",)
1271
-
1272
- zeropoint_x, zeropoint_y = -np.array(CCD_SETTINGS.ZEROPOINT)
1273
- crval1 = zeropoint_x * cos(ccd_orientation_radians) - zeropoint_y * sin(ccd_orientation_radians)
1274
- crval2 = zeropoint_x * sin(ccd_orientation_radians) + zeropoint_y * cos(ccd_orientation_radians)
1275
-
1276
- self.image_header["CRVAL1"] = (crval1, "FP x-coordinate of the CCD origin [mm]",)
1277
- self.image_header["CRVAL2"] = (crval2, "FP y-coordinate of the CCD origin [mm]",)
1278
-
1279
- self.image_header["CRPIX2"] = (-self.v_start,
1280
- "CCD origin row wrt 1st transmitted row",)
1281
-
1282
- if ccd_side in [self.fee_side.LEFT_SIDE.name, self.fee_side.LEFT_SIDE]:
1283
- self.image_header["CRPIX1"] = (0, "CCD origin column wrt lower left corner",)
1284
-
1285
- if ccd_side in [self.fee_side.RIGHT_SIDE.name, self.fee_side.RIGHT_SIDE]:
1286
- image = np.fliplr(image)
1287
- self.image_header["CRPIX1"] = (-CCD_SETTINGS.NUM_ROWS + image.shape[1],
1288
- "CCD origin column wrt lower left corner")
1289
-
1290
- self.image_header["DATE-OBS"] = (self.timestamp, "Timestamp for 1st frame",)
1291
- self.image_header["FINETIME"] = (self.finetime, "Finetime representation of DATE-OBS",)
1292
-
1293
- fits.append(str(self._filepath), image, self.image_header)
1294
-
1295
- def read(self, select=None):
1296
- """Returns a list of all entries in the persistence store.
1297
-
1298
- The list can be filtered based on a selection from the `select` argument which
1299
- should be a Callable object.
1300
-
1301
- Args:
1302
- - select (Callable): a filter function to narrow down the list of all entries.
1303
-
1304
- Returns: List or generator for all entries in the persistence store.
1305
- """
1306
- raise NotImplementedError("Persistence layers must implement a read method")
1307
-
1308
- def update(self, idx, data):
1309
-
1310
- pass
1311
-
1312
- def delete(self, idx):
1313
-
1314
- logger.warning("The delete functionality is not implemented for the CSV persistence layer.")
1315
-
1316
- def get_filepath(self):
1317
-
1318
- return self._filepath
1319
-
1320
-
1321
- class HDF5(PersistenceLayer):
1322
- extension = "hdf5"
1323
-
1324
- def __init__(self, filename, prep: dict = None):
1325
- """
1326
- The `prep` argument needs at least the following mandatory key:value pairs:
1327
-
1328
- * mode: the mode used for opening the file [default is 'r']
1329
-
1330
- """
1331
- # logger.debug(f"{h5py.version.hdf5_version=}")
1332
- self._filepath = Path(filename)
1333
- self._mode = prep.get("mode") or "r"
1334
- self._h5file: Optional[h5py.File] = None
1335
-
1336
- def __enter__(self):
1337
- self.open(mode=self._mode)
1338
- return self
1339
-
1340
- def __exit__(self, exc_type, exc_val, exc_tb):
1341
- self.close()
1342
-
1343
- def open(self, mode=None):
1344
- self._mode = mode or self._mode
1345
- logger.debug(f"Opening file {self._filepath} in mode '{self._mode}'")
1346
- self._h5file = h5py.File(self._filepath, mode=self._mode, driver='core')
1347
-
1348
- # File "h5py/h5f.pyx", line 554, in h5py.h5f.FileID.start_swmr_write
1349
- # RuntimeError: Unable to start swmr writing (file superblock version - should be at least 3)
1350
- # self._h5file.swmr_mode = True
1351
-
1352
- def close(self):
1353
- self._h5file.close()
1354
-
1355
- def exists(self):
1356
- return self._filepath.exists()
1357
-
1358
- def create(self, data):
1359
- """
1360
- Store the given data in the HDF5 file. The data argument shall be a dictionary where the
1361
- keys represent the group where the data shall be saved, and the value is the data to be
1362
- saved. When the key ends with ":ATTRS", then the value is a list of attributes to that
1363
- group. Values can be of different type and are processed if needed.
1364
-
1365
- An example data argument:
1366
-
1367
- {
1368
- "/10/timecode": tc_packet,
1369
- "/10/timecode:ATTRS": [("timestamp", timestamp)],
1370
- "/10/command/": f"{command.__name__}, {args=}",
1371
- "/10/register/": self.register_map.get_memory_map_as_ndarray()
1372
- }
1373
-
1374
- The example saves a Timecode packet in the group "/10/timecode" and attaches a timestamp
1375
- as an attribute called "timestamp" to the same group. It then adds a command string in
1376
- the "/10/command" group and finally adds a register memory map (an np.ndarray) in the group
1377
- "/10/register".
1378
-
1379
- Args:
1380
- data (dict): a dictionary containing the data that needs to be saved.
1381
-
1382
- Returns:
1383
- None.
1384
- """
1385
- for key, value in data.items():
1386
- if key.endswith(":ATTRS"):
1387
- a_key = key.split(":")[0]
1388
- for k, v in value:
1389
- self._h5file[a_key].attrs[k] = v
1390
- if isinstance(value, TimecodePacket):
1391
- self._h5file[key] = value.timecode
1392
- if isinstance(value, HousekeepingPacket):
1393
- self._h5file[key] = value.packet_as_ndarray
1394
- if isinstance(value, HousekeepingData):
1395
- self._h5file[key] = value.data_as_ndarray
1396
- if isinstance(value, DataDataPacket):
1397
- self._h5file[key] = value.packet_as_ndarray
1398
- if isinstance(value, OverscanDataPacket):
1399
- self._h5file[key] = value.packet_as_ndarray
1400
- if isinstance(value, (str, bytearray, np.ndarray)):
1401
-
1402
- # if we save a command, put it into a 'commands' group.
1403
- # This is a special case that is the result of issue #1461
1404
-
1405
- if 'command' in key:
1406
- idx = key.split('/')[1]
1407
- if idx in self._h5file and 'commands' in self._h5file[idx]:
1408
- last_idx = int(sorted(self._h5file[f"/{idx}/commands"].keys())[-1])
1409
- key = f"/{idx}/commands/{last_idx+1}"
1410
- else:
1411
- key = f"/{idx}/commands/0"
1412
-
1413
- self._h5file[key] = value
1414
-
1415
- def read(self, select=None):
1416
- """
1417
- Read information or data from the HDF5 file.
1418
-
1419
- The `select` argument can contain the following information:
1420
-
1421
- * the string 'number_of_groups': request to determine the number of top groups in
1422
- the HDF5 file.
1423
- * the string 'last_top_group': request the name/key of the last item in the top group.
1424
- The last item is the last element of the list of keys, sorted with natural order.
1425
-
1426
- Args:
1427
- select (str or dict): specify which information should be read
1428
-
1429
- Returns:
1430
- When 'number_of_groups', return an integer, when 'last_top_group' return a string.
1431
- """
1432
- if select == "number_of_groups":
1433
- return len(self._h5file.keys())
1434
- if select == "last_top_group":
1435
- keys = self._h5file.keys()
1436
-
1437
- logger.debug(f"{self._h5file.filename}: {keys=}")
1438
-
1439
- return 0 if len(keys) == 0 else natsort.natsorted(keys)[-1]
1440
-
1441
- # This following lines is a longer version of the previous two lines, keep them for
1442
- # debugging because I had problems and not yet sure what is the cause...
1443
-
1444
- # sorted_keys = natsort.natsorted(keys)
1445
- # logger.debug(f"{self._h5file.filename}: {sorted_keys=}")
1446
- # key = sorted_keys[-1]
1447
- # logger.debug(f"{key=}")
1448
- # return key
1449
-
1450
- def update(self, idx, data):
1451
-
1452
- pass
1453
-
1454
- def delete(self, idx):
1455
- pass
1456
-
1457
- def get_filepath(self):
1458
- return self._filepath
1459
-
1460
-
1461
- # class FITS(PersistenceLayer):
1462
- #
1463
- # """
1464
- # Persistence layer that saves (image) data in a FITS file. The images in the FITS file
1465
- # represent exposures of potentially different windows / sub-fields, which may even be
1466
- # located on different CCDs (but always of the same camera). The images are organised in
1467
- # FITS extensions, to represent the different windows. We assume that the images that
1468
- # belong to the same extension, have the same WCS (i.e. the map the same pixels on the same
1469
- # CCD), exposure time, etc.
1470
- # """
1471
- #
1472
- # extension = "fits"
1473
- #
1474
- # def __init__(self, filename, prep: dict = None):
1475
- #
1476
- # """
1477
- # Initialisation of the filepath, the array to keep track of the represented windows,
1478
- # and the base header with the information that is common to all images.
1479
- #
1480
- # :param filename: Name of the output FITS file.
1481
- # """
1482
- #
1483
- # self._filepath = Path(filename)
1484
- #
1485
- # # This array will be used to keep track of how many (different) windows are being
1486
- # # represented in the FITS file (this will be stored in the "NWINDOWS" keyword
1487
- # # in the header of the primary HDU)
1488
- #
1489
- # self.windows = np.array([])
1490
- #
1491
- # # Most of the information in the header will be the same for all images / windows. This
1492
- # # will be stored in the "base header".
1493
- #
1494
- # self.create_base_header()
1495
- #
1496
- # def create_base_header(self):
1497
- #
1498
- # """
1499
- # Creates a FITS header with the information that is common to all images of all windows. The
1500
- # header of the non-primary HDUs (i.e. for the images) will build further on this one.
1501
- # """
1502
- #
1503
- # self._base_header = fits.Header()
1504
- #
1505
- # self._base_header["SIMPLE"] = "T"
1506
- #
1507
- # # Dimensionality of the sub-field
1508
- #
1509
- # self._base_header["NAXIS"] = (2, "Dimensionality of the sub-field")
1510
- #
1511
- # # Focal length (this is needed for the conversion to field angles)
1512
- #
1513
- # self._base_header["FOCALLEN"] = (FOV_SETTINGS["FOCAL_LENGTH"], "Focal length [mm]")
1514
- #
1515
- # # Linear coordinate transformation from sub-field to focal-plane coordinates
1516
- #
1517
- # self._base_header["ctype1"] = ("LINEAR", "Linear coordinate transformation")
1518
- # self._base_header["ctype2"] = ("LINEAR", "Linear coordinate transformation")
1519
- #
1520
- # # Focal-plane coordinates are expressed in mm
1521
- #
1522
- # self._base_header["CUNIT1"] = ("MM", "Target unit in the column direction (mm)")
1523
- # self._base_header["CUNIT2"] = ("MM", "Target unit in the row direction (mm)")
1524
- #
1525
- # # Pixel size
1526
- #
1527
- # cdelt = CCD_SETTINGS["PIXEL_SIZE"] / 1000.0 # Pixel size [µm]
1528
- # self._base_header["CDELT1"] = (cdelt, "Pixel size in the x-direction [micron]")
1529
- # self._base_header["CDELT2"] = (cdelt, "Pixel size in the y-direction [micron]")
1530
- #
1531
- # # Additional keywords
1532
- #
1533
- # self._base_header["TELESCOP"] = (setup["camera_id"], "Camera ID")
1534
- # self._base_header["INSTRUME"] = (setup["camera_id"], "Camera ID")
1535
- # self._base_header["SITENAME"] = (setup["site_id"], "Name of the test site")
1536
- #
1537
- # # self._base_header["DATE-LOC"] = (
1538
- # # datetime.datetime.now.strftime("%Y-%m-%d %H:%M:%S"),
1539
- # # "Local time of observation"
1540
- # # )
1541
- #
1542
- # def open(self, mode=None):
1543
- #
1544
- # """
1545
- # Creates the FITS file and writes the header to the primary HDU (Header Data Unit).
1546
- # """
1547
- #
1548
- # # Create a FITS header that contains the information that is the same for all images
1549
- # # of all windows. This will be used as the header of the primary HDU.
1550
- #
1551
- # primary_header = self.create_primary_header()
1552
- #
1553
- # # The primary HDU contains only this header and no image data
1554
- #
1555
- # primary_hdu = fits.PrimaryHDU()
1556
- # primary_hdu.header = primary_header
1557
- #
1558
- # # The FITS file is created. If the filename is already in use, and exception
1559
- # # will be thrown.
1560
- #
1561
- # primary_hdu.writeto(self._filepath)
1562
- #
1563
- # def create_primary_header(self):
1564
- #
1565
- # """
1566
- # Creates the primary header (i.e. the header of the primary HDU). This contains information
1567
- # that is specific for the camera.
1568
- # """
1569
- #
1570
- # primary_header = fits.Header()
1571
- #
1572
- # primary_header["SIMPLE"] = "T"
1573
- #
1574
- # # Focal length [mm] (this is needed for the conversion to field angles)
1575
- #
1576
- # primary_header["FOCALLEN"] = (FOV_SETTINGS["FOCAL_LENGTH"], "Focal length [mm]")
1577
- #
1578
- # # Additional keywords
1579
- #
1580
- # primary_header["TELESCOP"] = (setup["camera_id"], "Camera ID")
1581
- # primary_header["INSTRUME"] = (setup["camera_id"], "Camera ID")
1582
- # primary_header["SITENAME"] = (setup["site_id"], "Name of the test site")
1583
- #
1584
- # # Number of windows
1585
- #
1586
- # primary_header["NWINDOWS"] = (0, "Number of windows")
1587
- #
1588
- # return primary_header
1589
- #
1590
- # def exists(self):
1591
- # return self._filepath.exists()
1592
- #
1593
- # def create(self, data):
1594
- #
1595
- # """
1596
- # Adds the given data to the FITS file. This acts as a new exposure for this specific
1597
- # window. We assume that the images that belong to the same extension, have the same
1598
- # WCS (i.e. the map the same pixels on the same CCD), exposure time, etc.
1599
- #
1600
- # :param data: Dictionary containing the image data and the information concerning
1601
- # the window over which it was acquired. This is organised in the following
1602
- # fashion:
1603
- # - "image": 2D numpy array with the image data;
1604
- # - "window": index of the window;
1605
- # - "zeropoint": pair (x, y) with the sub-field zeropoint [mm];
1606
- # - "exposure_time" exposure time [s];
1607
- # - "ccd": CCD code (1/2/3/4)
1608
- # """
1609
- #
1610
- # image = data["image"]
1611
- # window_index = data["window"]
1612
- # subfield_zeropoint = data["zeropoint"] # (x, y) [mm]
1613
- # exposure_time = data["exposure_time"]
1614
- # ccd_code = data["ccd"]
1615
- #
1616
- # # Copy + extend the primary header
1617
- #
1618
- # header = self.create_header(
1619
- # image, window_index, subfield_zeropoint, exposure_time, ccd_code
1620
- # )
1621
- #
1622
- # # Add this image (i.e. exposure) to the FITS file. It will end up in the
1623
- # # extension, dedicated to the window for which is was acquired (via the
1624
- # # "EXTNAME" keyword in the header).
1625
- #
1626
- # fits.append(self._filepath, image, header)
1627
- #
1628
- # # Keep track of how many windows are stored in the FITS file
1629
- #
1630
- # self.windows = np.append(self.windows, window_index)
1631
- # self.windows = np.unique(self.windows)
1632
- # fits.setval(self._filepath, "NWINDOWS", value=len(self.windows))
1633
- #
1634
- # def create_header(self, image, window_index, subfield_zeropoint, exposure_time, ccd_code):
1635
- #
1636
- # """
1637
- # Creates a FITS header for the given images. The base header is copied and information,
1638
- # specific for the given image, is added.
1639
- #
1640
- # :param image: 2D numpy array with the image data.
1641
- #
1642
- # :param window_index: Window for which the given image data represents a new exposure
1643
- # (which should be added to the FITS file).
1644
- #
1645
- # :param subfield_zeropoint: Pair (x, y) with the sub-field zeropoint [mm].
1646
- #
1647
- # :param exposure_time: Exposure time [s].
1648
- #
1649
- # :param ccd_code: CCD code (1/2/3/4).
1650
- # """
1651
- #
1652
- # from egse.coordinates import ccd_to_focal_plane_coordinates
1653
- #
1654
- # header = self._base_header.copy()
1655
- #
1656
- # # Dimensions of the sub-field
1657
- #
1658
- # header["NAXIS1"] = (image.shape[1], "Number of columns in the sub-field")
1659
- # header["NAXIS2"] = (image.shape[0], "Number of rows in the sub-field")
1660
- #
1661
- # # CCD origin + corresponding focal-plane coordinates
1662
- #
1663
- # # For the CCD origin, we know the coordinates in the source coordinate sytem
1664
- # # (i.e. the sub-field reference frame), from the sub-field zeropoint, and
1665
- # # can calculate the coordinates in the target coordinate system (i.e. in the
1666
- # # focal-plane reference frame). Hence, we use the CCD origin to be the
1667
- # # reference point of the coordinate transformation (from the sub-field
1668
- # # reference frame to the focal-plane reference frame):
1669
- # # - the coordinates of the reference point in the source reference frame
1670
- # # (i.e. in the sub-field reference frame) need to go in the CRPIXi
1671
- # # keywords;
1672
- # # - the coordinates of the reference point in the target reference frame
1673
- # # (i.e. in the focal-plane reference frame) need to go in the CRVALi
1674
- # # keywords.
1675
- #
1676
- # header["CRPIX1"] = (-subfield_zeropoint[0], "Sub-field column of the CCD origin [pixels]")
1677
- # header["CRPIX2"] = (-subfield_zeropoint[1], "Sub-field row of the CCD origin [pixels]")
1678
- #
1679
- # crval1, crval2 = ccd_to_focal_plane_coordinates(0, 0, ccd_code)
1680
- #
1681
- # header["CRVAL1"] = (crval1, "FP x-coordinate of the CCD origin [mm]")
1682
- # header["CRVAL2"] = (crval2, "FP y-coordinate of the CCD origin [mm]")
1683
- #
1684
- # # Pixel size
1685
- #
1686
- # cdelt = CCD_SETTINGS["PIXEL_SIZE"] / 1000.0 # Pixel size [µm]
1687
- #
1688
- # # Orientation angle of the CCD
1689
- #
1690
- # self._base_header["crota2"] = (
1691
- # CCD_SETTINGS["ORIENTATION"][ccd_code - 1],
1692
- # "CCD orientation angle [degrees]",
1693
- # )
1694
- #
1695
- # ccd_orientation_angle_radians = math.radians(CCD_SETTINGS["ORIENTATION"][ccd_code - 1])
1696
- # self._base_header["cd1_1"] = (
1697
- # cdelt * math.cos(ccd_orientation_angle_radians),
1698
- # "Pixel size x cos(CCD orientation angle)",
1699
- # )
1700
- # self._base_header["cd1_2"] = (
1701
- # -cdelt * math.sin(ccd_orientation_angle_radians),
1702
- # "-Pixel size x sin(CCD orientation angle)",
1703
- # )
1704
- # self._base_header["cd2_1"] = (
1705
- # cdelt * math.sin(ccd_orientation_angle_radians),
1706
- # "Pixel size x sin(CCD orientation angle)",
1707
- # )
1708
- # self._base_header["cd2_2"] = (
1709
- # cdelt * math.cos(ccd_orientation_angle_radians),
1710
- # "Pixel size x cos(CCD orientation angle)",
1711
- # )
1712
- #
1713
- # # Additional keywords
1714
- #
1715
- # header["TELESCOP"] = (setup["camera_id"], "Camera ID")
1716
- # header["INSTRUME"] = (setup["camera_id"], "Camera ID")
1717
- # header["SITENAME"] = (setup["site_id"], "Name of the test site")
1718
- # header["EXPOSURE"] = (exposure_time, "Exposure time [s]")
1719
- # # header["DATE-LOC"] = (
1720
- # # datetime.datetime.now.strftime("%Y-%m-%d %H:%M:%S"),
1721
- # # "Local time of observation"
1722
- # # )
1723
- # header["CCDNAME"] = (ccd_code, "CCD code")
1724
- #
1725
- # # Using this keyword, the image will end up in the correct extension
1726
- #
1727
- # header["EXTNAME"] = "WINDOW" + str(window_index)
1728
- #
1729
- # return header
1730
- #
1731
- # def update(self, idx, data):
1732
- #
1733
- # """
1734
- # Replaces a specific exposure of a specific window by the given image (the header
1735
- # is preserved). We assume that the images that belong to the same extension, have
1736
- # the same WCS (i.e. the map the same pixels on the same CCD), exposure time, etc.
1737
- #
1738
- # :param idx: Number of the exposure for which the image data has to be updated.
1739
- #
1740
- # :param data: Dictionary containing the image data and the information concerning
1741
- # the window over which it was acquired. This is organised in the following
1742
- # fashion:
1743
- # - "image": 2D numpy array with the image data;
1744
- # - "window": index of the window;
1745
- # (- "zeropoint": pair (x, y) with the sub-field zeropoint [mm];)
1746
- # (- "exposure_time" exposure time [s];)
1747
- # (- "ccd": CCD code (1/2/3/4))
1748
- # """
1749
- #
1750
- # image = data["image"]
1751
- # window_index = data["window"]
1752
- #
1753
- # extension = "WINDOW" + str(window_index)
1754
- #
1755
- # fits.update(self._filepath, image, extension, idx)
1756
- #
1757
- # def close(self):
1758
- #
1759
- # """
1760
- # Closing the FITS file is not implemented here, because all write methods do this
1761
- # automatically.
1762
- # """
1763
- #
1764
- # logger.warning("The close functionality is not implemented for the FITS persistence layer.")
1765
- #
1766
- # def read(self, select=None):
1767
- #
1768
- # """
1769
- # Reads the FITS file, yielding the HDUs, based on the given selection filter.
1770
- #
1771
- # :param select: Selection filter. Currently not used yet.
1772
- # """
1773
- #
1774
- # def generator_function():
1775
- #
1776
- # with fits.open(self._filepath) as open_file:
1777
- #
1778
- # for data in open_file:
1779
- #
1780
- # yield data
1781
- #
1782
- # return generator_function()
1783
- #
1784
- # def delete(self, idx):
1785
- #
1786
- # """
1787
- # Deleting individual exposures is not implemented here.
1788
- # """
1789
- #
1790
- # logger.warning(
1791
- # "The delete functionality is not implemented for the FITS persistence layer."
1792
- # )
1793
-
1794
- def parts(data, delimiter=",", quote_char='"', keep_quote_char=False):
1795
- compos = []
1796
- part = ''
1797
- skip = False
1798
- for character in data:
1799
- if (
1800
- character == delimiter
1801
- and skip
1802
- or character not in [delimiter, quote_char]
1803
- ):
1804
- part += character
1805
- elif character == delimiter:
1806
- compos.append(part)
1807
- part = ''
1808
- else:
1809
- skip = not skip
1810
- if keep_quote_char: part += character
1811
- if part:
1812
- compos.append(part)
1813
-
1814
- return compos
1815
-
1816
-
1817
- # TODO:
1818
- # it might be useful to remember the last 10 or 100 lines and have a dedicated read
1819
- # function that returns these line quickly without the need to open the file.
1820
-
1821
-
1822
- class CSV1(PersistenceLayer):
1823
- """A Persistence Layer that saves data in a CSV file.
1824
-
1825
- This class uses a custom implementation.
1826
- """
1827
-
1828
- extension = "csv"
1829
-
1830
- def __init__(self, filename, prep: dict = None):
1831
- """
1832
- The `prep` argument is a dictionary that contains information to initialise this
1833
- persistence layer. The CSV uses the following:
1834
-
1835
- * column_names: a list or tuple with the names of the column headers
1836
- * mode: the mode in which the file shall be opened
1837
-
1838
- Args:
1839
- filename: a str or Path that represents the name of the file
1840
- prep (dict): preparation information to initialise the persistence layer
1841
- """
1842
- prep = prep or {}
1843
- self._filepath = Path(filename)
1844
- self._column_names = prep.get("column_names") or []
1845
- self._mode = prep.get("mode") or "r"
1846
- self._quote_char = prep.get("quote_char") or "|"
1847
- self._delimiter = prep.get("delimiter") or ","
1848
- self._fd = None
1849
- self._regex = re.compile(rf"\\.|[{re.escape(self._quote_char)}{re.escape(self._delimiter)}']", re.DOTALL)
1850
-
1851
- def is_open(self):
1852
- # we don't want to expose the file descriptor
1853
- return bool(self._fd)
1854
-
1855
- def open(self, mode=None):
1856
- """Opens the CSV file and writes the header if column_names are provided."""
1857
- self._mode = mode or self._mode
1858
- logger.debug(f"Opening file {self._filepath} in mode '{self._mode}'")
1859
- self._fd = self._filepath.open(mode=self._mode)
1860
- if self._column_names and self._mode == "w":
1861
- self._fd.write(",".join(self._column_names))
1862
- self._fd.write("\n")
1863
- return self
1864
-
1865
- def close(self):
1866
- logger.debug(f"Closing file {self._filepath}")
1867
- self._fd.close()
1868
- self._fd = None
1869
-
1870
- def exists(self):
1871
- return self._filepath.exists()
1872
-
1873
- def __enter__(self):
1874
- # Only open the file when not opened before. Remember if the file was open.
1875
- self._context_fd = self._fd
1876
- return self if self._fd else self.open(mode=self._mode)
1877
-
1878
- def __exit__(self, exc_type, exc_val, exc_tb):
1879
- # only close the file if it was opened by the context manager
1880
- self._context_fd or self.close()
1881
- # propagate any exception to the caller, i.e. don't return True
1882
- # return True
1883
-
1884
- def create(self, data):
1885
- """Write a line in the CSV file with the given numbers separated by commas.
1886
-
1887
- The `data` argument can be a list or a tuple in which case the numbers are just joined
1888
- to form a comma separated line. The `data` argument can also be a `dict`, in which
1889
- case the column_names are used to order the values from the dictionary.
1890
- The keys in the dictionary shall match the entries in the column_names.
1891
-
1892
- Args:
1893
- data: the input data to create the line
1894
- Raises:
1895
- IOError when the CSV file was not opened before.
1896
- """
1897
-
1898
- def quote(value):
1899
- if self._delimiter in value:
1900
- return f"{self._quote_char}{value}{self._quote_char}"
1901
- else:
1902
- return value
1903
-
1904
- if self._fd:
1905
- if isinstance(data, (list, tuple)):
1906
-
1907
- data = self._delimiter.join([quote(str(x)) for x in data])
1908
-
1909
- elif isinstance(data, dict):
1910
-
1911
- if not self._column_names:
1912
- logger.error("Cannot write ordered dictionary data, no column names provided.")
1913
- return
1914
-
1915
- # Extract the values from the dictionary and sort them according to the column_names
1916
-
1917
- data_list = [(k, v) for k, v in data.items()]
1918
- sorted_data_list = sorted(data_list, key=lambda x: self._column_names.index(x[0]))
1919
- data = self._delimiter.join([quote(str(x[1])) for x in sorted_data_list])
1920
-
1921
- self._fd.write(data)
1922
- data.endswith("\n") or self._fd.write("\n")
1923
- self._fd.flush()
1924
- else:
1925
- raise IOError(
1926
- "You try to write to a file which has not been opened yet, "
1927
- "first call the open method or use the context manager."
1928
- )
1929
-
1930
- def read(self, select=None):
1931
- def generator_function():
1932
- while True:
1933
- line = self._fd.readline().rstrip()
1934
- if line:
1935
- yield parts(line, self._delimiter, self._quote_char)
1936
- else:
1937
- break
1938
-
1939
- return generator_function()
1940
-
1941
- def update(self, line_no, data):
1942
- logger.warning("The update functionality is not implemented for the CSV persistence layer.")
1943
-
1944
- def delete(self, line_no):
1945
- logger.warning("The delete functionality is not implemented for the CSV persistence layer.")
1946
-
1947
- def get_filepath(self):
1948
- return self._filepath
1949
-
1950
-
1951
- class CSV2(PersistenceLayer):
1952
- """A Persistence Layer that saves data in a CSV file."""
1953
-
1954
- extension = "csv"
1955
-
1956
- def __init__(self, filename, prep: dict = None):
1957
- """
1958
- The `prep` argument is a dictionary that contains information to initialise this
1959
- persistence layer. The CSV initialisation uses the following:
1960
-
1961
- * column_names: a list or tuple with the names of the column headers
1962
- * mode: the mode in which the file shall be opened
1963
-
1964
- Args:
1965
- filename: a str or Path that represents the name of the file
1966
- prep (dict): preparation information to initialise the persistence layer
1967
- """
1968
- prep = prep or {}
1969
- self._filepath = Path(filename)
1970
- self._column_names = prep.get("column_names") or []
1971
- self._mode = prep.get("mode") or "r"
1972
- self._quote_char = prep.get("quote_char") or "|"
1973
- self._delimiter = prep.get("delimiter") or ","
1974
- self._fd = None
1975
-
1976
- def __enter__(self):
1977
- self._context_fd = self._fd
1978
- return self if self._fd else self.open(mode=self._mode)
1979
-
1980
- def __exit__(self, exc_type, exc_val, exc_tb):
1981
- # only close the file if it was opened by the context manager
1982
- self._context_fd or self.close()
1983
- # propagate any exception to the caller, i.e. don't return True
1984
- # return True
1985
-
1986
- def exists(self):
1987
- return self._filepath.exists()
1988
-
1989
- def is_open(self):
1990
- # we don't want to expose the file descriptor
1991
- return bool(self._fd)
1992
-
1993
- def open(self, mode=None):
1994
- """Opens the CSV file and writes the header if column_names are provided."""
1995
- self._mode = mode or self._mode
1996
- logger.debug(f"Opening file {self._filepath} in mode '{self._mode}'")
1997
- self._fd = self._filepath.open(mode=self._mode)
1998
- if self._column_names and self._mode == "w":
1999
- writer = csv.DictWriter(self._fd, fieldnames=self._column_names)
2000
- writer.writeheader()
2001
- return self
2002
-
2003
- def close(self):
2004
- logger.debug(f"Closing file {self._filepath}")
2005
- self._fd.close()
2006
- self._fd = None
2007
-
2008
- def create(self, data):
2009
- """Write a line in the CSV file.
2010
-
2011
- The `data` argument can be a list or a tuple in which case the numbers are just joined
2012
- to form a comma separated line. The `data` argument can also be a `dict`, in which
2013
- case the column_names is used to order the values from the dictionary.
2014
- The keys in the dictionary shall match the entries in the column_names, but if there are
2015
- extra keys in the dictionary, they will be silently ignored.
2016
-
2017
- Args:
2018
- data: the input data to create the line
2019
- Raises:
2020
- IOError when the CSV file was not opened before.
2021
- """
2022
- if not self._fd:
2023
- raise IOError(
2024
- "You try to write to a file which has not been opened yet, "
2025
- "first call the open method or use the context manager."
2026
- )
2027
- if isinstance(data, (list, tuple)):
2028
- writer = csv.writer(
2029
- self._fd,
2030
- delimiter=self._delimiter,
2031
- quotechar=self._quote_char, quoting=csv.QUOTE_MINIMAL,
2032
- )
2033
- writer.writerow(data)
2034
- elif isinstance(data, dict):
2035
- if not self._column_names:
2036
- logger.error("Cannot write ordered dictionary data, no column names provided.")
2037
- return
2038
-
2039
- writer = csv.DictWriter(
2040
- self._fd,
2041
- fieldnames=self._column_names, extrasaction="ignore",
2042
- delimiter=self._delimiter,
2043
- quotechar=self._quote_char, quoting=csv.QUOTE_MINIMAL,
2044
- )
2045
- writer.writerow(data)
2046
- else:
2047
- self._fd.write(data)
2048
- data.endswith("\n") or self._fd.write("\n")
2049
-
2050
- self._fd.flush()
2051
-
2052
- def read(self, select=None):
2053
- csv_reader = csv.reader(self._fd, delimiter=self._delimiter, quotechar=self._quote_char)
2054
-
2055
- def generator_function():
2056
- if self._column_names:
2057
- yield next(csv_reader)
2058
-
2059
- for line in csv_reader:
2060
- yield line
2061
-
2062
- return generator_function()
2063
-
2064
- def update(self, line_no, data):
2065
- logger.warning("The update functionality is not implemented for the CSV persistence layer.")
2066
-
2067
- def delete(self, line_no):
2068
- logger.warning("The delete functionality is not implemented for the CSV persistence layer.")
2069
-
2070
- def get_filepath(self):
2071
- return self._filepath
2072
-
2073
-
2074
- class TXT(PersistenceLayer):
2075
-
2076
- extension = "txt"
2077
-
2078
- def __init__(self, filename, prep: dict = None):
2079
- """
2080
- The `prep` argument is a dictionary that contains information to initialise this
2081
- persistence layer. The TXT initialisation uses the following:
2082
-
2083
- * mode: the mode in which the file shall be opened
2084
- * ending: a character sequence that is used to end the write action
2085
- * header: a header text that will be written when opening the file
2086
-
2087
- Args:
2088
- filename: a str or Path that represents the name of the file
2089
- prep (dict): preparation information to initialise the persistence layer
2090
- """
2091
- prep = prep or {}
2092
- self._filepath = Path(filename)
2093
- self._mode = prep.get("mode") or "r"
2094
- self._ending = prep.get("ending") or ""
2095
- self._header = prep.get("header") or ""
2096
- self._fd = None
2097
-
2098
- def open(self, mode=None):
2099
- """Opens the TXT file."""
2100
- self._mode = mode or self._mode
2101
- logger.debug(f"Opening file {self._filepath} in mode '{self._mode}'")
2102
- self._fd = self._filepath.open(mode=self._mode)
2103
- if self._header and self._mode == "w":
2104
- self.create(self._header)
2105
- return self
2106
-
2107
- def close(self):
2108
- logger.debug(f"Closing file {self._filepath}")
2109
- self._fd.close()
2110
- self._fd = None
2111
-
2112
- def exists(self):
2113
- return self._filepath.exists()
2114
-
2115
- def __enter__(self):
2116
- self._context_fd = self._fd
2117
- return self if self._fd else self.open(mode=self._mode)
2118
-
2119
- def __exit__(self, exc_type, exc_val, exc_tb):
2120
- # only close the file if it was opened by the context manager
2121
- self._context_fd or self.close()
2122
- return True
2123
-
2124
- def create(self, data):
2125
- data_str = str(data)
2126
- logger.log(5, f"Writing data: {data_str[:min(80, len(data_str))]}...")
2127
- if self._fd:
2128
- self._fd.write(str(data))
2129
- self._fd.write(self._ending)
2130
- self._fd.flush()
2131
- else:
2132
- raise IOError(
2133
- "You try to write to a file which has not been opened yet, "
2134
- "first call the open method or use the context manager."
2135
- )
2136
-
2137
- def read(self, select=None):
2138
- """Read lines form the file.
2139
-
2140
- The `select` argument can take the following values:
2141
-
2142
- * `select == "last_line"`: return the last line of the file as a string
2143
- * `select == ("contains", <string>)`: returns all the files that contain `<string>`
2144
- * `select == ("startswith", <string>)`: return all line that start with `<string>`
2145
-
2146
- Args:
2147
- select (str or dict): defines a selection / filter for reading the lines
2148
- Returns:
2149
- A list of lines from the file or the last line as a string.
2150
- """
2151
- if select == "last_line":
2152
- return read_last_line(self._filepath, max_line_length=4096)
2153
-
2154
- result = []
2155
-
2156
- if isinstance(select, tuple):
2157
- if select[0] == "contains":
2158
- with self._filepath.open(mode="r") as fd:
2159
- for line in fd:
2160
- if select[1] in line:
2161
- result.append(line.rstrip())
2162
- elif select[0] == "startswith":
2163
- with self._filepath.open(mode="r") as fd:
2164
- for line in fd:
2165
- if line.startswith(select[1]):
2166
- result.append(line.rstrip())
2167
- return result
2168
-
2169
- with self._filepath.open("r") as fd:
2170
- result = [line.rstrip() for line in fd]
2171
-
2172
- return result
2173
-
2174
- def update(self, idx, data):
2175
- logger.warning("The update functionality is not implemented for the TXT persistence layer.")
2176
-
2177
- def delete(self, idx):
2178
- logger.warning("The delete functionality is not implemented for the TXT persistence layer.")
2179
-
2180
- def get_filepath(self):
2181
- return self._filepath
2182
-
2183
-
2184
- class SQLite(PersistenceLayer):
2185
-
2186
- extension = "sqlite3"
2187
-
2188
- def __init__(self, filename: Union[str, Path], prep: dict = None):
2189
- self._filepath = Path(filename).with_suffix(f".{self.extension}")
2190
- self._prep = prep
2191
- self._connection: Optional[Connection] = None
2192
-
2193
- def __enter__(self):
2194
- self.open()
2195
- return self
2196
-
2197
- def __exit__(self, exc_type, exc_val, exc_tb):
2198
- self.close()
2199
-
2200
- def open(self, mode=None):
2201
- self._connection = sqlite3.connect(self._filepath)
2202
-
2203
- def close(self):
2204
- self._connection.close()
2205
-
2206
- def exists(self):
2207
- return self._filepath.exists()
2208
-
2209
- def _execute(self, statement, values=None):
2210
- with self._connection:
2211
- cursor = self._connection.cursor()
2212
- cursor.execute(statement, values or [])
2213
- return cursor
2214
-
2215
- def create_table(self, table_name, columns):
2216
- columns_with_types = [
2217
- f'{column_name} {data_type}'
2218
- for column_name, data_type in columns.items()
2219
- ]
2220
- self._execute(
2221
- f"""CREATE TABLE IF NOT EXISTS {table_name} ({', '.join(columns_with_types)});"""
2222
- )
2223
-
2224
- def drop_table(self, table_name):
2225
- self._execute(f"DROP TABLE {table_name};")
2226
-
2227
- def add_to_table(self, table_name, data):
2228
- placeholders = ', '.join('?' * len(data))
2229
- column_names = ', '.join(data.keys())
2230
- column_values = tuple(data.values())
2231
-
2232
- self._execute(
2233
- f"""INSERT INTO {table_name} ({column_names}) VALUES ({placeholders}); """,
2234
- column_values,
2235
- )
2236
-
2237
- def select_from_table(self, table_name: str, criteria: dict = None, order_by=None):
2238
- criteria = criteria or {}
2239
-
2240
- query = f"SELECT * FROM {table_name}"
2241
-
2242
- if criteria:
2243
- placeholders = [f'{column} = ?' for column in criteria.keys()]
2244
- select_criteria = ' AND '.join(placeholders)
2245
- query += f' WHERE {select_criteria}'
2246
-
2247
- if order_by:
2248
- query += f' ORDER BY {order_by}'
2249
-
2250
- return self._execute(
2251
- query,
2252
- tuple(criteria.values()),
2253
- )
2254
-
2255
- def delete_from_table(self, table_name, criteria):
2256
- placeholders = [f'{column} = ?' for column in criteria.keys()]
2257
- delete_criteria = ' AND '.join(placeholders)
2258
-
2259
- self._execute(
2260
- f"""DELETE FROM {table_name} WHERE {delete_criteria}; """, tuple(criteria.values()),
2261
- )
2262
-
2263
- def update_table(self, table_name, criteria, data):
2264
- update_placeholders = [f'{column} = ?' for column in criteria.keys()]
2265
- update_criteria = ' AND '.join(update_placeholders)
2266
- data_placeholders = ', '.join(f'{key} = ?' for key in data.keys())
2267
-
2268
- values = tuple(data.values()) + tuple(criteria.values())
2269
-
2270
- self._execute(
2271
- f"""UPDATE {table_name} SET {data_placeholders} WHERE {update_criteria};""",
2272
- values,
2273
- )
2274
-
2275
- def create(self, data):
2276
- # Should call add_to_table()
2277
- pass
2278
-
2279
- def read(self, select=None):
2280
- # Should call select_from_table()
2281
- pass
2282
-
2283
- def update(self, idx, data):
2284
- # Should call update_table()
2285
- pass
2286
-
2287
- def delete(self, idx):
2288
- # Should call delete_from_table
2289
- pass
2290
-
2291
- def get_filepath(self):
2292
- return self._filepath
2293
-
2294
-
2295
- CSV = CSV2