imap-processing 0.9.0__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (243) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +749 -442
  3. imap_processing/cdf/config/imap_glows_global_cdf_attrs.yaml +7 -0
  4. imap_processing/cdf/config/imap_glows_l1a_variable_attrs.yaml +8 -2
  5. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +0 -1
  6. imap_processing/cdf/config/imap_glows_l2_variable_attrs.yaml +358 -0
  7. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +59 -25
  8. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +22 -0
  9. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +32 -8
  10. imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +94 -5
  11. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +65 -37
  12. imap_processing/cdf/config/imap_swapi_variable_attrs.yaml +16 -1
  13. imap_processing/cdf/config/imap_swe_global_cdf_attrs.yaml +7 -0
  14. imap_processing/cdf/config/imap_swe_l1a_variable_attrs.yaml +14 -14
  15. imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +25 -24
  16. imap_processing/cdf/config/imap_swe_l2_variable_attrs.yaml +238 -0
  17. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +100 -92
  18. imap_processing/cdf/utils.py +2 -2
  19. imap_processing/cli.py +45 -9
  20. imap_processing/codice/codice_l1a.py +104 -58
  21. imap_processing/codice/constants.py +111 -155
  22. imap_processing/codice/data/esa_sweep_values.csv +256 -256
  23. imap_processing/codice/data/lo_stepping_values.csv +128 -128
  24. imap_processing/ena_maps/ena_maps.py +519 -0
  25. imap_processing/ena_maps/utils/map_utils.py +145 -0
  26. imap_processing/ena_maps/utils/spatial_utils.py +226 -0
  27. imap_processing/glows/__init__.py +3 -0
  28. imap_processing/glows/ancillary/imap_glows_pipeline_settings_v001.json +52 -0
  29. imap_processing/glows/l1a/glows_l1a.py +72 -14
  30. imap_processing/glows/l1b/glows_l1b.py +2 -1
  31. imap_processing/glows/l1b/glows_l1b_data.py +25 -1
  32. imap_processing/glows/l2/glows_l2.py +324 -0
  33. imap_processing/glows/l2/glows_l2_data.py +156 -51
  34. imap_processing/hi/l1a/science_direct_event.py +57 -51
  35. imap_processing/hi/l1b/hi_l1b.py +43 -28
  36. imap_processing/hi/l1c/hi_l1c.py +225 -42
  37. imap_processing/hi/utils.py +20 -3
  38. imap_processing/hit/l0/constants.py +2 -2
  39. imap_processing/hit/l0/decom_hit.py +1 -1
  40. imap_processing/hit/l1a/hit_l1a.py +94 -13
  41. imap_processing/hit/l1b/hit_l1b.py +158 -9
  42. imap_processing/ialirt/l0/process_codicehi.py +156 -0
  43. imap_processing/ialirt/l0/process_codicelo.py +5 -2
  44. imap_processing/ialirt/packet_definitions/ialirt.xml +28 -20
  45. imap_processing/ialirt/packet_definitions/ialirt_codicehi.xml +241 -0
  46. imap_processing/ialirt/packet_definitions/ialirt_swapi.xml +170 -0
  47. imap_processing/ialirt/packet_definitions/ialirt_swe.xml +258 -0
  48. imap_processing/ialirt/process_ephemeris.py +72 -40
  49. imap_processing/idex/decode.py +241 -0
  50. imap_processing/idex/idex_l1a.py +143 -81
  51. imap_processing/idex/idex_l1b.py +244 -10
  52. imap_processing/lo/l0/lo_science.py +61 -0
  53. imap_processing/lo/l1a/lo_l1a.py +98 -10
  54. imap_processing/lo/l1b/lo_l1b.py +2 -2
  55. imap_processing/lo/l1c/lo_l1c.py +2 -2
  56. imap_processing/lo/packet_definitions/lo_xtce.xml +1082 -9178
  57. imap_processing/mag/l0/decom_mag.py +2 -2
  58. imap_processing/mag/l1a/mag_l1a.py +7 -7
  59. imap_processing/mag/l1a/mag_l1a_data.py +62 -30
  60. imap_processing/mag/l1b/mag_l1b.py +11 -6
  61. imap_processing/quality_flags.py +18 -3
  62. imap_processing/spice/geometry.py +149 -177
  63. imap_processing/spice/kernels.py +26 -26
  64. imap_processing/spice/spin.py +233 -0
  65. imap_processing/spice/time.py +96 -31
  66. imap_processing/swapi/l1/swapi_l1.py +60 -31
  67. imap_processing/swapi/packet_definitions/swapi_packet_definition.xml +363 -384
  68. imap_processing/swe/l1a/swe_l1a.py +8 -3
  69. imap_processing/swe/l1a/swe_science.py +24 -24
  70. imap_processing/swe/l1b/swe_l1b.py +2 -1
  71. imap_processing/swe/l1b/swe_l1b_science.py +181 -122
  72. imap_processing/swe/l2/swe_l2.py +337 -70
  73. imap_processing/swe/utils/swe_utils.py +28 -0
  74. imap_processing/tests/cdf/test_utils.py +2 -2
  75. imap_processing/tests/codice/conftest.py +20 -17
  76. imap_processing/tests/codice/data/validation/imap_codice_l1a_hskp_20241110193622_v0.0.0.cdf +0 -0
  77. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-aggregated_20241110193700_v0.0.0.cdf +0 -0
  78. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-singles_20241110193700_v0.0.0.cdf +0 -0
  79. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-angular_20241110193700_v0.0.0.cdf +0 -0
  80. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-priority_20241110193700_v0.0.0.cdf +0 -0
  81. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-species_20241110193700_v0.0.0.cdf +0 -0
  82. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-angular_20241110193700_v0.0.0.cdf +0 -0
  83. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-priority_20241110193700_v0.0.0.cdf +0 -0
  84. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-species_20241110193700_v0.0.0.cdf +0 -0
  85. imap_processing/tests/codice/test_codice_l0.py +55 -121
  86. imap_processing/tests/codice/test_codice_l1a.py +147 -59
  87. imap_processing/tests/conftest.py +81 -22
  88. imap_processing/tests/ena_maps/test_ena_maps.py +309 -0
  89. imap_processing/tests/ena_maps/test_map_utils.py +286 -0
  90. imap_processing/tests/ena_maps/test_spatial_utils.py +161 -0
  91. imap_processing/tests/glows/conftest.py +7 -1
  92. imap_processing/tests/glows/test_glows_l1a_cdf.py +3 -7
  93. imap_processing/tests/glows/test_glows_l1a_data.py +34 -6
  94. imap_processing/tests/glows/test_glows_l1b_data.py +29 -17
  95. imap_processing/tests/glows/test_glows_l2.py +101 -0
  96. imap_processing/tests/hi/conftest.py +3 -3
  97. imap_processing/tests/hi/data/l1/imap_hi_l1b_45sensor-de_20250415_v999.cdf +0 -0
  98. imap_processing/tests/hi/data/l1/imap_his_pset-calibration-prod-config_20240101_v001.csv +31 -0
  99. imap_processing/tests/hi/test_hi_l1b.py +14 -9
  100. imap_processing/tests/hi/test_hi_l1c.py +136 -36
  101. imap_processing/tests/hi/test_l1a.py +0 -2
  102. imap_processing/tests/hi/test_science_direct_event.py +18 -14
  103. imap_processing/tests/hi/test_utils.py +16 -11
  104. imap_processing/tests/hit/helpers/__init__.py +0 -0
  105. imap_processing/tests/hit/helpers/l1_validation.py +405 -0
  106. imap_processing/tests/hit/test_data/sci_sample.ccsds +0 -0
  107. imap_processing/tests/hit/test_decom_hit.py +8 -10
  108. imap_processing/tests/hit/test_hit_l1a.py +117 -180
  109. imap_processing/tests/hit/test_hit_l1b.py +149 -55
  110. imap_processing/tests/hit/validation_data/hit_l1b_standard_sample2_nsrl_v4_3decimals.csv +62 -0
  111. imap_processing/tests/hit/validation_data/sci_sample_raw.csv +62 -0
  112. imap_processing/tests/ialirt/test_data/l0/20240827095047_SWE_IALIRT_packet.bin +0 -0
  113. imap_processing/tests/ialirt/test_data/l0/BinLog CCSDS_FRAG_TLM_20240826_152323Z_IALIRT_data_for_SDC.bin +0 -0
  114. imap_processing/tests/ialirt/test_data/l0/eu_SWP_IAL_20240826_152033.csv +644 -0
  115. imap_processing/tests/ialirt/test_data/l0/hi_fsw_view_1_ccsds.bin +0 -0
  116. imap_processing/tests/ialirt/test_data/l0/idle_export_eu.SWE_IALIRT_20240827_093852.csv +914 -0
  117. imap_processing/tests/ialirt/test_data/l0/imap_codice_l1a_hi-ialirt_20240523200000_v0.0.0.cdf +0 -0
  118. imap_processing/tests/ialirt/unit/test_process_codicehi.py +106 -0
  119. imap_processing/tests/ialirt/unit/test_process_ephemeris.py +33 -5
  120. imap_processing/tests/ialirt/unit/test_process_swapi.py +85 -0
  121. imap_processing/tests/ialirt/unit/test_process_swe.py +106 -0
  122. imap_processing/tests/idex/conftest.py +29 -1
  123. imap_processing/tests/idex/test_data/compressed_2023_102_14_24_55.pkts +0 -0
  124. imap_processing/tests/idex/test_data/non_compressed_2023_102_14_22_26.pkts +0 -0
  125. imap_processing/tests/idex/test_idex_l0.py +6 -3
  126. imap_processing/tests/idex/test_idex_l1a.py +151 -1
  127. imap_processing/tests/idex/test_idex_l1b.py +124 -2
  128. imap_processing/tests/lo/test_lo_l1a.py +62 -2
  129. imap_processing/tests/lo/test_lo_science.py +85 -0
  130. imap_processing/tests/lo/validation_data/Instrument_FM1_T104_R129_20240803_ILO_SPIN_EU.csv +2 -0
  131. imap_processing/tests/mag/conftest.py +16 -0
  132. imap_processing/tests/mag/test_mag_decom.py +6 -4
  133. imap_processing/tests/mag/test_mag_l1a.py +36 -7
  134. imap_processing/tests/mag/test_mag_l1b.py +55 -4
  135. imap_processing/tests/mag/test_mag_validation.py +148 -0
  136. imap_processing/tests/mag/validation/L1a/T001/all_p_ones.txt +19200 -0
  137. imap_processing/tests/mag/validation/L1a/T001/mag-l0-l1a-t001-in.bin +0 -0
  138. imap_processing/tests/mag/validation/L1a/T001/mag-l0-l1a-t001-out.csv +17 -0
  139. imap_processing/tests/mag/validation/L1a/T002/all_n_ones.txt +19200 -0
  140. imap_processing/tests/mag/validation/L1a/T002/mag-l0-l1a-t002-in.bin +0 -0
  141. imap_processing/tests/mag/validation/L1a/T002/mag-l0-l1a-t002-out.csv +17 -0
  142. imap_processing/tests/mag/validation/L1a/T003/field_like.txt +19200 -0
  143. imap_processing/tests/mag/validation/L1a/T003/mag-l0-l1a-t003-in.bin +0 -0
  144. imap_processing/tests/mag/validation/L1a/T003/mag-l0-l1a-t003-out.csv +17 -0
  145. imap_processing/tests/mag/validation/L1a/T004/field_like.txt +19200 -0
  146. imap_processing/tests/mag/validation/L1a/T004/mag-l0-l1a-t004-in.bin +0 -0
  147. imap_processing/tests/mag/validation/L1a/T004/mag-l0-l1a-t004-out.csv +17 -0
  148. imap_processing/tests/mag/validation/L1a/T005/field_like_range_change.txt +19200 -0
  149. imap_processing/tests/mag/validation/L1a/T005/mag-l0-l1a-t005-in.bin +0 -0
  150. imap_processing/tests/mag/validation/L1a/T005/mag-l0-l1a-t005-out.csv +17 -0
  151. imap_processing/tests/mag/validation/L1a/T006/hdr_field.txt +19200 -0
  152. imap_processing/tests/mag/validation/L1a/T006/mag-l0-l1a-t006-in.bin +0 -0
  153. imap_processing/tests/mag/validation/L1a/T006/mag-l0-l1a-t006-out.csv +17 -0
  154. imap_processing/tests/mag/validation/L1a/T007/hdr_field_and_range_change.txt +19200 -0
  155. imap_processing/tests/mag/validation/L1a/T007/mag-l0-l1a-t007-in.bin +0 -0
  156. imap_processing/tests/mag/validation/L1a/T007/mag-l0-l1a-t007-out.csv +17 -0
  157. imap_processing/tests/mag/validation/L1a/T008/field_like_range_change.txt +19200 -0
  158. imap_processing/tests/mag/validation/L1a/T008/mag-l0-l1a-t008-in.bin +0 -0
  159. imap_processing/tests/mag/validation/L1a/T008/mag-l0-l1a-t008-out.csv +17 -0
  160. imap_processing/tests/mag/validation/L1b/T009/data.bin +0 -0
  161. imap_processing/tests/mag/validation/L1b/T009/field_like_all_ranges.txt +19200 -0
  162. imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-in.csv +17 -0
  163. imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-magi-out.csv +17 -0
  164. imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-mago-out.csv +17 -0
  165. imap_processing/tests/mag/validation/L1b/T010/data.bin +0 -0
  166. imap_processing/tests/mag/validation/L1b/T010/field_like_all_ranges.txt +19200 -0
  167. imap_processing/tests/mag/validation/L1b/T010/mag-l1a-l1b-t010-in.csv +17 -0
  168. imap_processing/tests/mag/validation/L1b/T010/mag-l1a-l1b-t010-magi-out.csv +17 -0
  169. imap_processing/tests/mag/validation/L1b/T010/mag-l1a-l1b-t010-mago-out.csv +17 -0
  170. imap_processing/tests/mag/validation/L1b/T011/data.bin +0 -0
  171. imap_processing/tests/mag/validation/L1b/T011/field_like_all_ranges.txt +19200 -0
  172. imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-in.csv +17 -0
  173. imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-magi-out.csv +17 -0
  174. imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-mago-out.csv +17 -0
  175. imap_processing/tests/spice/test_geometry.py +128 -133
  176. imap_processing/tests/spice/test_kernels.py +37 -37
  177. imap_processing/tests/spice/test_spin.py +184 -0
  178. imap_processing/tests/spice/test_time.py +43 -20
  179. imap_processing/tests/swapi/test_swapi_l1.py +11 -10
  180. imap_processing/tests/swapi/test_swapi_l2.py +13 -3
  181. imap_processing/tests/swe/test_swe_l1a.py +1 -1
  182. imap_processing/tests/swe/test_swe_l1b.py +20 -3
  183. imap_processing/tests/swe/test_swe_l1b_science.py +54 -35
  184. imap_processing/tests/swe/test_swe_l2.py +148 -5
  185. imap_processing/tests/test_cli.py +39 -7
  186. imap_processing/tests/test_quality_flags.py +19 -19
  187. imap_processing/tests/test_utils.py +3 -2
  188. imap_processing/tests/ultra/test_data/l0/ultra45_raw_sc_ultrarawimg_withFSWcalcs_FM45_40P_Phi28p5_BeamCal_LinearScan_phi2850_theta-000_20240207T102740.csv +3314 -3314
  189. imap_processing/tests/ultra/test_data/mock_data.py +161 -0
  190. imap_processing/tests/ultra/unit/conftest.py +73 -0
  191. imap_processing/tests/ultra/unit/test_badtimes.py +58 -0
  192. imap_processing/tests/ultra/unit/test_cullingmask.py +87 -0
  193. imap_processing/tests/ultra/unit/test_de.py +61 -60
  194. imap_processing/tests/ultra/unit/test_ultra_l1a.py +3 -3
  195. imap_processing/tests/ultra/unit/test_ultra_l1b.py +51 -77
  196. imap_processing/tests/ultra/unit/test_ultra_l1b_annotated.py +5 -5
  197. imap_processing/tests/ultra/unit/test_ultra_l1b_culling.py +114 -0
  198. imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +86 -26
  199. imap_processing/tests/ultra/unit/test_ultra_l1c.py +1 -1
  200. imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +3 -3
  201. imap_processing/ultra/constants.py +11 -1
  202. imap_processing/ultra/l1a/ultra_l1a.py +2 -2
  203. imap_processing/ultra/l1b/badtimes.py +22 -5
  204. imap_processing/ultra/l1b/cullingmask.py +31 -5
  205. imap_processing/ultra/l1b/de.py +32 -37
  206. imap_processing/ultra/l1b/extendedspin.py +44 -20
  207. imap_processing/ultra/l1b/ultra_l1b.py +21 -22
  208. imap_processing/ultra/l1b/ultra_l1b_culling.py +190 -0
  209. imap_processing/ultra/l1b/ultra_l1b_extended.py +81 -30
  210. imap_processing/ultra/l1c/histogram.py +6 -2
  211. imap_processing/ultra/l1c/pset.py +6 -2
  212. imap_processing/ultra/l1c/ultra_l1c.py +2 -3
  213. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +4 -3
  214. imap_processing/ultra/utils/ultra_l1_utils.py +70 -14
  215. imap_processing/utils.py +2 -2
  216. {imap_processing-0.9.0.dist-info → imap_processing-0.11.0.dist-info}/METADATA +7 -2
  217. {imap_processing-0.9.0.dist-info → imap_processing-0.11.0.dist-info}/RECORD +235 -152
  218. imap_processing/tests/codice/data/eu_unit_lookup_table.csv +0 -101
  219. imap_processing/tests/codice/data/idle_export_eu.COD_NHK_20230822_122700 2.csv +0 -100
  220. imap_processing/tests/codice/data/idle_export_raw.COD_NHK_20230822_122700.csv +0 -100
  221. imap_processing/tests/codice/data/imap_codice_l0_raw_20241110_v001.pkts +0 -0
  222. imap_processing/tests/hi/test_data/l1a/imap_hi_l1a_45sensor-de_20250415_v000.cdf +0 -0
  223. imap_processing/tests/hit/test_data/sci_sample1.ccsds +0 -0
  224. imap_processing/tests/ultra/unit/test_spatial_utils.py +0 -125
  225. imap_processing/ultra/utils/spatial_utils.py +0 -221
  226. /imap_processing/tests/hi/{test_data → data}/l0/20231030_H45_APP_NHK.bin +0 -0
  227. /imap_processing/tests/hi/{test_data → data}/l0/20231030_H45_APP_NHK.csv +0 -0
  228. /imap_processing/tests/hi/{test_data → data}/l0/20231030_H45_SCI_CNT.bin +0 -0
  229. /imap_processing/tests/hi/{test_data → data}/l0/20231030_H45_SCI_DE.bin +0 -0
  230. /imap_processing/tests/hi/{test_data → data}/l0/H90_NHK_20241104.bin +0 -0
  231. /imap_processing/tests/hi/{test_data → data}/l0/H90_sci_cnt_20241104.bin +0 -0
  232. /imap_processing/tests/hi/{test_data → data}/l0/H90_sci_de_20241104.bin +0 -0
  233. /imap_processing/tests/hi/{test_data → data}/l0/README.txt +0 -0
  234. /imap_processing/tests/idex/{imap_idex_l0_raw_20231214_v001.pkts → test_data/imap_idex_l0_raw_20231214_v001.pkts} +0 -0
  235. /imap_processing/tests/idex/{impact_14_tof_high_data.txt → test_data/impact_14_tof_high_data.txt} +0 -0
  236. /imap_processing/tests/mag/{imap_mag_l1a_norm-magi_20251017_v001.cdf → validation/imap_mag_l1a_norm-magi_20251017_v001.cdf} +0 -0
  237. /imap_processing/tests/mag/{mag_l0_test_data.pkts → validation/mag_l0_test_data.pkts} +0 -0
  238. /imap_processing/tests/mag/{mag_l0_test_output.csv → validation/mag_l0_test_output.csv} +0 -0
  239. /imap_processing/tests/mag/{mag_l1_test_data.pkts → validation/mag_l1_test_data.pkts} +0 -0
  240. /imap_processing/tests/mag/{mag_l1a_test_output.csv → validation/mag_l1a_test_output.csv} +0 -0
  241. {imap_processing-0.9.0.dist-info → imap_processing-0.11.0.dist-info}/LICENSE +0 -0
  242. {imap_processing-0.9.0.dist-info → imap_processing-0.11.0.dist-info}/WHEEL +0 -0
  243. {imap_processing-0.9.0.dist-info → imap_processing-0.11.0.dist-info}/entry_points.txt +0 -0
@@ -1,5 +1,6 @@
1
1
  """IMAP-Hi direct event processing."""
2
2
 
3
+ import logging
3
4
  from collections import defaultdict
4
5
 
5
6
  import numpy as np
@@ -7,18 +8,20 @@ import numpy._typing as npt
7
8
  import xarray as xr
8
9
 
9
10
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
10
- from imap_processing.spice.time import met_to_j2000ns
11
+ from imap_processing.spice.time import met_to_ttj2000ns
11
12
 
12
- # TODO: read LOOKED_UP_DURATION_OF_TICK from
13
+ # TODO: read DE_CLOCK_TICK_US from
13
14
  # instrument status summary later. This value
14
15
  # is rarely change but want to be able to change
15
16
  # it if needed. It stores information about how
16
17
  # fast the time was ticking. It is in microseconds.
17
- LOOKED_UP_DURATION_OF_TICK = 1999
18
+ DE_CLOCK_TICK_US = 1999
19
+ DE_CLOCK_TICK_S = DE_CLOCK_TICK_US / 1e6
20
+ HALF_CLOCK_TICK_S = DE_CLOCK_TICK_S / 2
18
21
 
19
- SECOND_TO_NS = 1e9
20
- MILLISECOND_TO_NS = 1e6
21
- MICROSECOND_TO_NS = 1e3
22
+ MILLISECOND_TO_S = 1e-3
23
+
24
+ logger = logging.getLogger(__name__)
22
25
 
23
26
 
24
27
  def parse_direct_events(de_data: bytes) -> dict[str, npt.ArrayLike]:
@@ -100,23 +103,6 @@ def create_dataset(de_data_dict: dict[str, npt.ArrayLike]) -> xr.Dataset:
100
103
  dataset : xarray.Dataset
101
104
  Xarray dataset.
102
105
  """
103
- # Compute the meta-event MET in nanoseconds
104
- de_data_dict["meta_event_met"] = (
105
- np.array(de_data_dict.pop("meta_seconds")) * SECOND_TO_NS
106
- + np.array(de_data_dict.pop("meta_subseconds")) * MILLISECOND_TO_NS
107
- )
108
- # Compute the MET of each event in nanoseconds
109
- # event MET = meta_event_met + de_clock + 1/2 de_clock_tick
110
- # See Hi Algorithm Document section 2.2.5
111
- half_tick_ns = LOOKED_UP_DURATION_OF_TICK / 2 * MICROSECOND_TO_NS
112
- de_data_dict["event_met"] = (
113
- de_data_dict["meta_event_met"]
114
- + np.array(de_data_dict["de_tag"])
115
- * LOOKED_UP_DURATION_OF_TICK
116
- * MICROSECOND_TO_NS
117
- + half_tick_ns
118
- )
119
-
120
106
  # Load the CDF attributes
121
107
  attr_mgr = ImapCdfAttributes()
122
108
  attr_mgr.add_instrument_global_attrs("hi")
@@ -125,19 +111,45 @@ def create_dataset(de_data_dict: dict[str, npt.ArrayLike]) -> xr.Dataset:
125
111
  # check_schema=False keeps DEPEND_0 = '' from being auto added
126
112
  epoch_attrs = attr_mgr.get_variable_attributes("epoch", check_schema=False)
127
113
  epoch_attrs["CATDESC"] = (
128
- "Direct Event time, number of nanoseconds since J2000 with leap "
114
+ "CCSDS creation time, number of nanoseconds since J2000 with leap "
129
115
  "seconds included"
130
116
  )
131
117
  epoch = xr.DataArray(
132
- met_to_j2000ns(de_data_dict["event_met"] / 1e9),
118
+ met_to_ttj2000ns(de_data_dict["ccsds_met"]),
133
119
  name="epoch",
134
120
  dims=["epoch"],
135
121
  attrs=epoch_attrs,
136
122
  )
137
123
 
124
+ event_met_attrs = attr_mgr.get_variable_attributes(
125
+ "hi_de_event_met", check_schema=False
126
+ )
127
+ # For L1A DE, event_met is its own dimension, so we remove the DEPEND_0 attribute
128
+ _ = event_met_attrs.pop("DEPEND_0")
129
+
130
+ # Compute the meta-event MET in seconds
131
+ meta_event_met = (
132
+ np.array(de_data_dict["meta_seconds"]).astype(np.float64)
133
+ + np.array(de_data_dict["meta_subseconds"]) * MILLISECOND_TO_S
134
+ )
135
+ # Compute the MET of each event in seconds
136
+ # event MET = meta_event_met + de_clock
137
+ # See Hi Algorithm Document section 2.2.5
138
+ event_met_array = np.array(
139
+ meta_event_met[de_data_dict["ccsds_index"]]
140
+ + np.array(de_data_dict["de_tag"]) * DE_CLOCK_TICK_S,
141
+ dtype=event_met_attrs.pop("dtype"),
142
+ )
143
+ event_met = xr.DataArray(
144
+ event_met_array,
145
+ name="event_met",
146
+ dims=["event_met"],
147
+ attrs=event_met_attrs,
148
+ )
149
+
138
150
  de_global_attrs = attr_mgr.get_global_attributes("imap_hi_l1a_de_attrs")
139
151
  dataset = xr.Dataset(
140
- coords={"epoch": epoch},
152
+ coords={"epoch": epoch, "event_met": event_met},
141
153
  attrs=de_global_attrs,
142
154
  )
143
155
 
@@ -148,12 +160,10 @@ def create_dataset(de_data_dict: dict[str, npt.ArrayLike]) -> xr.Dataset:
148
160
  dtype = attrs.pop("dtype")
149
161
  dataset[var_name] = xr.DataArray(
150
162
  np.array(data, dtype=np.dtype(dtype)),
151
- dims="epoch",
163
+ dims=attrs["DEPEND_0"],
152
164
  attrs=attrs,
153
165
  )
154
166
 
155
- # TODO: figure out how to store information about
156
- # input data(one or more) it used to produce this dataset
157
167
  return dataset
158
168
 
159
169
 
@@ -179,32 +189,28 @@ def science_direct_event(packets_data: xr.Dataset) -> xr.Dataset:
179
189
  """
180
190
  de_data_dict: dict[str, list] = defaultdict(list)
181
191
 
182
- # Because DE_TOF is a variable length data,
183
- # I am using extend to add another list to the
184
- # end of the list. This way, I don't need to flatten
185
- # the list later.
192
+ # Add packet data to the dictionary, renaming some fields
193
+ # This is done first so that these variables are first in the CDF
194
+ for from_key, to_key in {
195
+ "shcoarse": "ccsds_met",
196
+ "src_seq_ctr": "src_seq_ctr",
197
+ "pkt_len": "pkt_len",
198
+ "last_spin_num": "last_spin_num",
199
+ "spin_invalids": "spin_invalids",
200
+ "esa_step_num": "esa_step",
201
+ "meta_seconds": "meta_seconds",
202
+ "meta_subseconds": "meta_subseconds",
203
+ }.items():
204
+ de_data_dict[to_key] = packets_data[from_key].data
205
+
206
+ # For each packet, parse the DE data and add it to the Pointing
207
+ # list of DE data usint `extend()`
186
208
  for i, data in enumerate(packets_data["de_tof"].data):
187
209
  parsed_de_data = parse_direct_events(data)
188
210
  for key, new_data in parsed_de_data.items():
189
211
  de_data_dict[key].extend(new_data)
190
-
191
- # add packet data to keep_packet_data dictionary, repeating values
192
- # for each direct event encoded in the current packet
193
- for from_key, to_key in {
194
- "shcoarse": "ccsds_met",
195
- "src_seq_ctr": "src_seq_ctr",
196
- "pkt_len": "pkt_len",
197
- "last_spin_num": "last_spin_num",
198
- "spin_invalids": "spin_invalids",
199
- "esa_step_num": "esa_step",
200
- "meta_seconds": "meta_seconds",
201
- "meta_subseconds": "meta_subseconds",
202
- }.items():
203
- # Repeat the ith packet from_key value N times, where N is the
204
- # number of events in the ith packet.
205
- de_data_dict[to_key].extend(
206
- [packets_data[from_key].data[i]] * len(parsed_de_data["de_tag"])
207
- )
212
+ # Record the ccsds packet index for each DE
213
+ de_data_dict["ccsds_index"].extend([i] * len(parsed_de_data["de_tag"]))
208
214
 
209
215
  # create dataset
210
216
  return create_dataset(de_data_dict)
@@ -9,6 +9,7 @@ import xarray as xr
9
9
  from imap_processing import imap_module_directory
10
10
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
11
11
  from imap_processing.cdf.utils import parse_filename_like
12
+ from imap_processing.hi.l1a.science_direct_event import HALF_CLOCK_TICK_S
12
13
  from imap_processing.hi.utils import (
13
14
  HIAPID,
14
15
  HiConstants,
@@ -17,11 +18,13 @@ from imap_processing.hi.utils import (
17
18
  )
18
19
  from imap_processing.spice.geometry import (
19
20
  SpiceFrame,
21
+ instrument_pointing,
22
+ )
23
+ from imap_processing.spice.spin import (
20
24
  get_instrument_spin_phase,
21
25
  get_spacecraft_spin_phase,
22
- instrument_pointing,
23
26
  )
24
- from imap_processing.spice.time import j2000ns_to_j2000s
27
+ from imap_processing.spice.time import met_to_sclkticks, sct_to_et
25
28
  from imap_processing.utils import convert_raw_to_eu
26
29
 
27
30
 
@@ -120,19 +123,30 @@ def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset:
120
123
  L1B direct event data.
121
124
  """
122
125
  l1b_dataset = l1a_dataset.copy()
126
+ l1b_dataset.update(de_esa_energy_step(l1b_dataset))
123
127
  l1b_dataset.update(compute_coincidence_type_and_time_deltas(l1b_dataset))
124
128
  l1b_dataset.update(de_nominal_bin_and_spin_phase(l1b_dataset))
125
129
  l1b_dataset.update(compute_hae_coordinates(l1b_dataset))
126
- l1b_dataset.update(de_esa_energy_step(l1b_dataset))
127
130
  l1b_dataset.update(
128
131
  create_dataset_variables(
129
132
  ["quality_flag"],
130
- l1b_dataset["epoch"].size,
133
+ l1b_dataset["event_met"].size,
131
134
  att_manager_lookup_str="hi_de_{0}",
132
135
  )
133
136
  )
134
137
  l1b_dataset = l1b_dataset.drop_vars(
135
- ["tof_1", "tof_2", "tof_3", "de_tag", "ccsds_met", "meta_event_met"]
138
+ [
139
+ "src_seq_ctr",
140
+ "pkt_len",
141
+ "last_spin_num",
142
+ "spin_invalids",
143
+ "meta_seconds",
144
+ "meta_subseconds",
145
+ "tof_1",
146
+ "tof_2",
147
+ "tof_3",
148
+ "de_tag",
149
+ ]
136
150
  )
137
151
 
138
152
  de_global_attrs = ATTR_MGR.get_global_attributes("imap_hi_l1b_de_attrs")
@@ -169,20 +183,19 @@ def compute_coincidence_type_and_time_deltas(
169
183
  "delta_t_bc1",
170
184
  "delta_t_c1c2",
171
185
  ],
172
- len(dataset.epoch),
186
+ len(dataset.event_met),
173
187
  att_manager_lookup_str="hi_de_{0}",
174
188
  )
175
- out_ds = dataset.assign(new_vars)
176
189
 
177
190
  # compute masks needed for coincidence type and delta t calculations
178
- a_first = out_ds.trigger_id.values == TriggerId.A
179
- b_first = out_ds.trigger_id.values == TriggerId.B
180
- c_first = out_ds.trigger_id.values == TriggerId.C
191
+ a_first = dataset.trigger_id.values == TriggerId.A
192
+ b_first = dataset.trigger_id.values == TriggerId.B
193
+ c_first = dataset.trigger_id.values == TriggerId.C
181
194
 
182
- tof1_valid = np.isin(out_ds.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True)
183
- tof2_valid = np.isin(out_ds.tof_2.values, HiConstants.TOF2_BAD_VALUES, invert=True)
195
+ tof1_valid = np.isin(dataset.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True)
196
+ tof2_valid = np.isin(dataset.tof_2.values, HiConstants.TOF2_BAD_VALUES, invert=True)
184
197
  tof1and2_valid = tof1_valid & tof2_valid
185
- tof3_valid = np.isin(out_ds.tof_3.values, HiConstants.TOF3_BAD_VALUES, invert=True)
198
+ tof3_valid = np.isin(dataset.tof_3.values, HiConstants.TOF3_BAD_VALUES, invert=True)
186
199
 
187
200
  # Table denoting how hit-first mask and valid TOF masks are used to set
188
201
  # coincidence type bitmask
@@ -193,12 +206,12 @@ def compute_coincidence_type_and_time_deltas(
193
206
  # | 2 | B | A,B | B,C1 | C1,C2 |
194
207
  # | 3 | C1 | A,C1 | B,C1 | C1,C2 |
195
208
  # Set coincidence type bitmask
196
- new_vars["coincidence_type"][a_first | tof1_valid] |= CoincidenceBitmap.A
209
+ new_vars["coincidence_type"][a_first | tof1_valid] |= np.uint8(CoincidenceBitmap.A)
197
210
  new_vars["coincidence_type"][
198
211
  b_first | (a_first & tof1_valid) | (c_first & tof2_valid)
199
- ] |= CoincidenceBitmap.B
200
- new_vars["coincidence_type"][c_first | tof2_valid] |= CoincidenceBitmap.C1
201
- new_vars["coincidence_type"][tof3_valid] |= CoincidenceBitmap.C2
212
+ ] |= np.uint8(CoincidenceBitmap.B)
213
+ new_vars["coincidence_type"][c_first | tof2_valid] |= np.uint8(CoincidenceBitmap.C1)
214
+ new_vars["coincidence_type"][tof3_valid] |= np.uint8(CoincidenceBitmap.C2)
202
215
 
203
216
  # Table denoting how TOF is interpreted for each Trigger ID
204
217
  # -----------------------------------------------------------------------
@@ -209,9 +222,9 @@ def compute_coincidence_type_and_time_deltas(
209
222
  # | 3 | C | t_a - t_c1 | t_b - t_c1 | t_c2 - t_c1 |
210
223
 
211
224
  # Prepare for delta_t calculations by converting TOF values to nanoseconds
212
- tof_1_ns = (out_ds.tof_1.values * HiConstants.TOF1_TICK_DUR).astype(np.int32)
213
- tof_2_ns = (out_ds.tof_2.values * HiConstants.TOF2_TICK_DUR).astype(np.int32)
214
- tof_3_ns = (out_ds.tof_3.values * HiConstants.TOF3_TICK_DUR).astype(np.int32)
225
+ tof_1_ns = (dataset.tof_1.values * HiConstants.TOF1_TICK_DUR).astype(np.int32)
226
+ tof_2_ns = (dataset.tof_2.values * HiConstants.TOF2_TICK_DUR).astype(np.int32)
227
+ tof_3_ns = (dataset.tof_3.values * HiConstants.TOF3_TICK_DUR).astype(np.int32)
215
228
 
216
229
  # # ********** delta_t_ab = (t_b - t_a) **********
217
230
  # Table: row 1, column 1
@@ -281,24 +294,23 @@ def de_nominal_bin_and_spin_phase(dataset: xr.Dataset) -> dict[str, xr.DataArray
281
294
  "spin_phase",
282
295
  "nominal_bin",
283
296
  ],
284
- len(dataset.epoch),
297
+ len(dataset.event_met),
285
298
  att_manager_lookup_str="hi_de_{0}",
286
299
  )
287
300
 
288
301
  # nominal_bin is the index number of the 90 4-degree bins that each DE would
289
302
  # be binned into in the histogram packet. The Hi histogram data is binned by
290
303
  # spacecraft spin-phase, not instrument spin-phase, so the same is done here.
291
- met_query_times = j2000ns_to_j2000s(dataset.event_met.values)
292
- imap_spin_phase = get_spacecraft_spin_phase(met_query_times)
304
+ # We have to add 1/2 clock tick to MET time before getting spin phase
305
+ met_seconds = dataset.event_met.values + HALF_CLOCK_TICK_S
306
+ imap_spin_phase = get_spacecraft_spin_phase(met_seconds)
293
307
  new_vars["nominal_bin"].values = np.asarray(imap_spin_phase * 360 / 4).astype(
294
308
  np.uint8
295
309
  )
296
310
 
297
311
  sensor_number = parse_sensor_number(dataset.attrs["Logical_source"])
298
312
  new_vars["spin_phase"].values = np.asarray(
299
- get_instrument_spin_phase(
300
- met_query_times, SpiceFrame[f"IMAP_HI_{sensor_number}"]
301
- )
313
+ get_instrument_spin_phase(met_seconds, SpiceFrame[f"IMAP_HI_{sensor_number}"])
302
314
  ).astype(np.float32)
303
315
  return new_vars
304
316
 
@@ -326,10 +338,13 @@ def compute_hae_coordinates(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
326
338
  "hae_latitude",
327
339
  "hae_longitude",
328
340
  ],
329
- len(dataset.epoch),
341
+ len(dataset.event_met),
330
342
  att_manager_lookup_str="hi_de_{0}",
331
343
  )
332
- et = j2000ns_to_j2000s(dataset.epoch.values)
344
+ # Per Section 2.2.5 of Algorithm Document, add 1/2 of tick duration
345
+ # to MET before computing pointing.
346
+ sclk_ticks = met_to_sclkticks(dataset.event_met.values + HALF_CLOCK_TICK_S)
347
+ et = sct_to_et(sclk_ticks)
333
348
  sensor_number = parse_sensor_number(dataset.attrs["Logical_source"])
334
349
  # TODO: For now, we are using SPICE to compute the look direction for each
335
350
  # direct event. This will eventually be replaced by the algorithm Paul
@@ -1,13 +1,23 @@
1
1
  """IMAP-HI l1c processing module."""
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import logging
6
+ from pathlib import Path
4
7
 
5
8
  import numpy as np
9
+ import pandas as pd
6
10
  import xarray as xr
7
11
 
8
12
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
9
13
  from imap_processing.cdf.utils import parse_filename_like
10
- from imap_processing.hi.utils import full_dataarray
14
+ from imap_processing.hi.utils import create_dataset_variables, full_dataarray
15
+ from imap_processing.spice.geometry import (
16
+ SpiceFrame,
17
+ frame_transform,
18
+ frame_transform_az_el,
19
+ )
20
+ from imap_processing.spice.time import ttj2000ns_to_et
11
21
 
12
22
  logger = logging.getLogger(__name__)
13
23
 
@@ -39,8 +49,8 @@ def hi_l1c(dependencies: list, data_version: str) -> xr.Dataset:
39
49
 
40
50
  # TODO: I am not sure what the input for Goodtimes will be so for now,
41
51
  # If the input is an xarray Dataset, do pset processing
42
- if len(dependencies) == 1 and isinstance(dependencies[0], xr.Dataset):
43
- l1c_dataset = generate_pset_dataset(dependencies[0])
52
+ if len(dependencies) == 2 and isinstance(dependencies[0], xr.Dataset):
53
+ l1c_dataset = generate_pset_dataset(*dependencies)
44
54
  else:
45
55
  raise NotImplementedError(
46
56
  "Input dependencies not recognized for l1c pset processing."
@@ -51,39 +61,78 @@ def hi_l1c(dependencies: list, data_version: str) -> xr.Dataset:
51
61
  return l1c_dataset
52
62
 
53
63
 
54
- def generate_pset_dataset(de_dataset: xr.Dataset) -> xr.Dataset:
64
+ def generate_pset_dataset(
65
+ de_dataset: xr.Dataset, calibration_prod_config_path: Path
66
+ ) -> xr.Dataset:
55
67
  """
56
- Will process IMAP-Hi l1b product into a l1c pset xarray dataset.
68
+ Generate IMAP-Hi l1c pset xarray dataset from l1b product.
57
69
 
58
70
  Parameters
59
71
  ----------
60
72
  de_dataset : xarray.Dataset
61
73
  IMAP-Hi l1b de product.
74
+ calibration_prod_config_path : Path
75
+ Calibration product configuration file.
62
76
 
63
77
  Returns
64
78
  -------
65
79
  pset_dataset : xarray.Dataset
66
80
  Ready to be written to CDF.
67
81
  """
82
+ logger.info(
83
+ f"Generating IMAP-Hi l1c pset dataset for product "
84
+ f"{de_dataset.attrs['Logical_file_id']}"
85
+ )
68
86
  logical_source_parts = parse_filename_like(de_dataset.attrs["Logical_source"])
69
- n_esa_step = de_dataset.esa_step.data.size
70
- pset_dataset = allocate_pset_dataset(n_esa_step, logical_source_parts["sensor"])
71
- # TODO: Stored epoch value needs to be consistent across ENA instruments.
72
- # SPDF says this should be the center of the time bin, but instrument
73
- # teams may disagree.
74
- pset_dataset.epoch.data[0] = de_dataset.epoch.data[0]
87
+ # read calibration product configuration file
88
+ config_df = CalibrationProductConfig.from_csv(calibration_prod_config_path)
89
+
90
+ pset_dataset = empty_pset_dataset(
91
+ de_dataset.esa_energy_step.data,
92
+ config_df.cal_prod_config.number_of_products,
93
+ logical_source_parts["sensor"],
94
+ )
95
+ # For ISTP, epoch should be the center of the time bin.
96
+ pset_dataset.epoch.data[0] = np.mean(de_dataset.epoch.data[[0, -1]]).astype(
97
+ np.int64
98
+ )
99
+ pset_et = ttj2000ns_to_et(pset_dataset.epoch.data[0])
100
+ # Calculate and add despun_z, hae_latitude, and hae_longitude variables to
101
+ # the pset_dataset
102
+ pset_dataset.update(pset_geometry(pset_et, logical_source_parts["sensor"]))
103
+
104
+ # TODO: The following section will go away as PSET algorithms to populate
105
+ # these variables are written.
106
+ attr_mgr = ImapCdfAttributes()
107
+ attr_mgr.add_instrument_global_attrs("hi")
108
+ attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
109
+ for var_name in [
110
+ "counts",
111
+ "exposure_times",
112
+ "background_rates",
113
+ "background_rates_uncertainty",
114
+ ]:
115
+ pset_dataset[var_name] = full_dataarray(
116
+ var_name,
117
+ attr_mgr.get_variable_attributes(f"hi_pset_{var_name}", check_schema=False),
118
+ pset_dataset.coords,
119
+ )
75
120
 
76
121
  return pset_dataset
77
122
 
78
123
 
79
- def allocate_pset_dataset(n_esa_steps: int, sensor_str: str) -> xr.Dataset:
124
+ def empty_pset_dataset(
125
+ l1b_energy_steps: np.ndarray, n_cal_prods: int, sensor_str: str
126
+ ) -> xr.Dataset:
80
127
  """
81
- Allocate an empty xarray.Dataset.
128
+ Allocate an empty xarray.Dataset with appropriate pset coordinates.
82
129
 
83
130
  Parameters
84
131
  ----------
85
- n_esa_steps : int
86
- Number of Electrostatic Analyzer steps to allocate.
132
+ l1b_energy_steps : np.ndarray
133
+ The array of esa_energy_step data from the L1B DE product.
134
+ n_cal_prods : int
135
+ Number of calibration products to allocate.
87
136
  sensor_str : str
88
137
  '45sensor' or '90sensor'.
89
138
 
@@ -99,32 +148,36 @@ def allocate_pset_dataset(n_esa_steps: int, sensor_str: str) -> xr.Dataset:
99
148
  # preallocate coordinates xr.DataArrays
100
149
  coords = dict()
101
150
  # epoch coordinate has only 1 entry for pointing set
151
+ epoch_attrs = attr_mgr.get_variable_attributes("epoch")
152
+ epoch_attrs.update(
153
+ attr_mgr.get_variable_attributes("hi_pset_epoch", check_schema=False)
154
+ )
102
155
  coords["epoch"] = xr.DataArray(
103
156
  np.empty(1, dtype=np.int64), # TODO: get dtype from cdf attrs?
104
157
  name="epoch",
105
158
  dims=["epoch"],
106
- attrs=attr_mgr.get_variable_attributes("epoch"),
159
+ attrs=epoch_attrs,
107
160
  )
161
+ # Create the esa_energy_step coordinate
108
162
  attrs = attr_mgr.get_variable_attributes(
109
163
  "hi_pset_esa_energy_step", check_schema=False
110
164
  ).copy()
111
165
  dtype = attrs.pop("dtype")
166
+ # Find the unique, non-zero esa_energy_steps from the L1B data
167
+ esa_energy_steps = np.array(sorted(set(l1b_energy_steps) - {0}), dtype=dtype)
112
168
  coords["esa_energy_step"] = xr.DataArray(
113
- np.full(n_esa_steps, attrs["FILLVAL"], dtype=dtype),
169
+ esa_energy_steps,
114
170
  name="esa_energy_step",
115
171
  dims=["esa_energy_step"],
116
172
  attrs=attrs,
117
173
  )
118
- # TODO: define calibration product number to coincidence type mapping and
119
- # use the number of calibration products here. I believe it will be 5
120
- # 0 for any, 1-4, for the number of detector hits.
121
- n_calibration_prod = 5
174
+
122
175
  attrs = attr_mgr.get_variable_attributes(
123
176
  "hi_pset_calibration_prod", check_schema=False
124
177
  ).copy()
125
178
  dtype = attrs.pop("dtype")
126
179
  coords["calibration_prod"] = xr.DataArray(
127
- np.arange(n_calibration_prod, dtype=dtype),
180
+ np.arange(n_cal_prods, dtype=dtype),
128
181
  name="calibration_prod",
129
182
  dims=["calibration_prod"],
130
183
  attrs=attrs,
@@ -141,27 +194,8 @@ def allocate_pset_dataset(n_esa_steps: int, sensor_str: str) -> xr.Dataset:
141
194
  attrs=attrs,
142
195
  )
143
196
 
144
- # Allocate the variables
197
+ # Allocate the coordinate label variables
145
198
  data_vars = dict()
146
- # despun_z is a 1x3 unit vector that does not have a DEPEND_1.
147
- # Define this dict to override the shape produced in full_dataarray
148
- var_shapes = {"despun_z": (1, 3)}
149
- for var_name in [
150
- "despun_z",
151
- "hae_latitude",
152
- "hae_longitude",
153
- "counts",
154
- "exposure_times",
155
- "background_rates",
156
- "background_rates_uncertainty",
157
- ]:
158
- data_vars[var_name] = full_dataarray(
159
- var_name,
160
- attr_mgr.get_variable_attributes(f"hi_pset_{var_name}", check_schema=False),
161
- coords,
162
- shape=var_shapes.get(var_name, None),
163
- )
164
-
165
199
  # Generate label variables
166
200
  data_vars["esa_energy_step_label"] = xr.DataArray(
167
201
  coords["esa_energy_step"].values.astype(str),
@@ -202,3 +236,152 @@ def allocate_pset_dataset(n_esa_steps: int, sensor_str: str) -> xr.Dataset:
202
236
  )
203
237
  dataset = xr.Dataset(data_vars=data_vars, coords=coords, attrs=pset_global_attrs)
204
238
  return dataset
239
+
240
+
241
+ def pset_geometry(pset_et: float, sensor_str: str) -> dict[str, xr.DataArray]:
242
+ """
243
+ Calculate PSET geometry variables.
244
+
245
+ Parameters
246
+ ----------
247
+ pset_et : float
248
+ Pointing set ephemeris time for which to calculate PSET geometry.
249
+ sensor_str : str
250
+ '45sensor' or '90sensor'.
251
+
252
+ Returns
253
+ -------
254
+ geometry_vars : dict[str, xarray.DataArray]
255
+ Keys are variable names and values are data arrays.
256
+ """
257
+ geometry_vars = create_dataset_variables(
258
+ ["despun_z"], (1, 3), att_manager_lookup_str="hi_pset_{0}"
259
+ )
260
+ despun_z = frame_transform(
261
+ pset_et,
262
+ np.array([0, 0, 1]),
263
+ SpiceFrame.IMAP_DPS,
264
+ SpiceFrame.ECLIPJ2000,
265
+ )
266
+ geometry_vars["despun_z"].values = despun_z[np.newaxis, :].astype(np.float32)
267
+
268
+ # Calculate hae_latitude and hae_longitude of the spin bins
269
+ # define the azimuth/elevation coordinates in the Pointing Frame (DPS)
270
+ # TODO: get the sensor's true elevation using SPICE?
271
+ el = 0 if "90" in sensor_str else -45
272
+ dps_az_el = np.array(
273
+ [
274
+ np.arange(0.05, 360, 0.1),
275
+ np.full(3600, el),
276
+ ]
277
+ ).T
278
+ hae_az_el = frame_transform_az_el(
279
+ pset_et, dps_az_el, SpiceFrame.IMAP_DPS, SpiceFrame.ECLIPJ2000, degrees=True
280
+ )
281
+
282
+ geometry_vars.update(
283
+ create_dataset_variables(
284
+ ["hae_latitude", "hae_longitude"],
285
+ (1, 3600),
286
+ att_manager_lookup_str="hi_pset_{0}",
287
+ )
288
+ )
289
+ geometry_vars["hae_longitude"].values = hae_az_el[:, 0].astype(np.float32)[
290
+ np.newaxis, :
291
+ ]
292
+ geometry_vars["hae_latitude"].values = hae_az_el[:, 1].astype(np.float32)[
293
+ np.newaxis, :
294
+ ]
295
+ return geometry_vars
296
+
297
+
298
+ @pd.api.extensions.register_dataframe_accessor("cal_prod_config")
299
+ class CalibrationProductConfig:
300
+ """
301
+ Register custom accessor for calibration product configuration DataFrames.
302
+
303
+ Parameters
304
+ ----------
305
+ pandas_obj : pandas.DataFrame
306
+ Object to run validation and use accessor functions on.
307
+ """
308
+
309
+ index_columns = (
310
+ "cal_prod_num",
311
+ "esa_energy_step",
312
+ )
313
+ required_columns = (
314
+ "coincidence_type_list",
315
+ "tof_ab_low",
316
+ "tof_ab_high",
317
+ "tof_ac1_low",
318
+ "tof_ac1_high",
319
+ "tof_bc1_low",
320
+ "tof_bc1_high",
321
+ "tof_c1c2_low",
322
+ "tof_c1c2_high",
323
+ )
324
+
325
+ def __init__(self, pandas_obj: pd.DataFrame) -> None:
326
+ self._validate(pandas_obj)
327
+ self._obj = pandas_obj
328
+
329
+ def _validate(self, df: pd.DataFrame) -> None:
330
+ """
331
+ Validate the current configuration.
332
+
333
+ Parameters
334
+ ----------
335
+ df : pandas.DataFrame
336
+ Object to validate.
337
+
338
+ Raises
339
+ ------
340
+ AttributeError : If the dataframe does not pass validation.
341
+ """
342
+ for index_name in self.index_columns:
343
+ if index_name in df.index:
344
+ raise AttributeError(
345
+ f"Required index {index_name} not present in dataframe."
346
+ )
347
+ # Verify that the Dataframe has all the required columns
348
+ for col in self.required_columns:
349
+ if col not in df.columns:
350
+ raise AttributeError(f"Required column {col} not present in dataframe.")
351
+ # TODO: Verify that the same ESA energy steps exist in all unique calibration
352
+ # product numbers
353
+
354
+ @classmethod
355
+ def from_csv(cls, path: Path) -> pd.DataFrame:
356
+ """
357
+ Read configuration CSV file into a pandas.DataFrame.
358
+
359
+ Parameters
360
+ ----------
361
+ path : Path
362
+ Location of the Calibration Product configuration CSV file.
363
+
364
+ Returns
365
+ -------
366
+ dataframe : pandas.DataFrame
367
+ Validated calibration product configuration data frame.
368
+ """
369
+ return pd.read_csv(
370
+ path,
371
+ index_col=cls.index_columns,
372
+ converters={"coincidence_type_list": lambda s: s.split("|")},
373
+ comment="#",
374
+ )
375
+
376
+ @property
377
+ def number_of_products(self) -> int:
378
+ """
379
+ Get the number of calibration products in the current configuration.
380
+
381
+ Returns
382
+ -------
383
+ number_of_products : int
384
+ The maximum number of calibration products defined in the list of
385
+ calibration product definitions.
386
+ """
387
+ return len(self._obj.index.unique(level="cal_prod_num"))