imap-processing 0.11.0__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (415) hide show
  1. imap_processing/__init__.py +11 -11
  2. imap_processing/_version.py +2 -2
  3. imap_processing/ccsds/ccsds_data.py +1 -2
  4. imap_processing/ccsds/excel_to_xtce.py +66 -18
  5. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +24 -40
  6. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +934 -42
  7. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +1846 -128
  8. imap_processing/cdf/config/imap_glows_global_cdf_attrs.yaml +0 -5
  9. imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +10 -11
  10. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +17 -19
  11. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +27 -14
  12. imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +106 -116
  13. imap_processing/cdf/config/imap_hit_l1b_variable_attrs.yaml +120 -145
  14. imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +14 -0
  15. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +25 -9
  16. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +6 -4
  17. imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +3 -3
  18. imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +0 -12
  19. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +1 -1
  20. imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +23 -20
  21. imap_processing/cdf/config/imap_mag_l1a_variable_attrs.yaml +361 -0
  22. imap_processing/cdf/config/imap_mag_l1b_variable_attrs.yaml +160 -0
  23. imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +160 -0
  24. imap_processing/cdf/config/imap_spacecraft_global_cdf_attrs.yaml +18 -0
  25. imap_processing/cdf/config/imap_spacecraft_variable_attrs.yaml +40 -0
  26. imap_processing/cdf/config/imap_swapi_global_cdf_attrs.yaml +1 -5
  27. imap_processing/cdf/config/imap_swapi_variable_attrs.yaml +22 -0
  28. imap_processing/cdf/config/imap_swe_global_cdf_attrs.yaml +12 -4
  29. imap_processing/cdf/config/imap_swe_l1a_variable_attrs.yaml +16 -2
  30. imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +64 -52
  31. imap_processing/cdf/config/imap_swe_l2_variable_attrs.yaml +71 -47
  32. imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +180 -19
  33. imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +5045 -41
  34. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +80 -17
  35. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +32 -57
  36. imap_processing/cdf/utils.py +52 -38
  37. imap_processing/cli.py +477 -233
  38. imap_processing/codice/codice_l1a.py +466 -131
  39. imap_processing/codice/codice_l1b.py +51 -152
  40. imap_processing/codice/constants.py +1360 -569
  41. imap_processing/codice/decompress.py +2 -6
  42. imap_processing/ena_maps/ena_maps.py +1103 -146
  43. imap_processing/ena_maps/utils/coordinates.py +19 -0
  44. imap_processing/ena_maps/utils/map_utils.py +14 -17
  45. imap_processing/ena_maps/utils/spatial_utils.py +55 -52
  46. imap_processing/glows/l1a/glows_l1a.py +28 -99
  47. imap_processing/glows/l1a/glows_l1a_data.py +2 -2
  48. imap_processing/glows/l1b/glows_l1b.py +1 -4
  49. imap_processing/glows/l1b/glows_l1b_data.py +1 -3
  50. imap_processing/glows/l2/glows_l2.py +2 -5
  51. imap_processing/hi/l1a/hi_l1a.py +54 -29
  52. imap_processing/hi/l1a/histogram.py +0 -1
  53. imap_processing/hi/l1a/science_direct_event.py +6 -8
  54. imap_processing/hi/l1b/hi_l1b.py +111 -82
  55. imap_processing/hi/l1c/hi_l1c.py +416 -32
  56. imap_processing/hi/utils.py +58 -12
  57. imap_processing/hit/ancillary/imap_hit_l1b-to-l2-sector-dt0-factors_20250219_v002.csv +81 -0
  58. imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt0-factors_20250219_v002.csv +205 -0
  59. imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt1-factors_20250219_v002.csv +205 -0
  60. imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt2-factors_20250219_v002.csv +205 -0
  61. imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt3-factors_20250219_v002.csv +205 -0
  62. imap_processing/hit/ancillary/imap_hit_l1b-to-l2-summed-dt0-factors_20250219_v002.csv +68 -0
  63. imap_processing/hit/hit_utils.py +235 -5
  64. imap_processing/hit/l0/constants.py +20 -11
  65. imap_processing/hit/l0/decom_hit.py +21 -5
  66. imap_processing/hit/l1a/hit_l1a.py +71 -75
  67. imap_processing/hit/l1b/constants.py +321 -0
  68. imap_processing/hit/l1b/hit_l1b.py +377 -67
  69. imap_processing/hit/l2/constants.py +318 -0
  70. imap_processing/hit/l2/hit_l2.py +723 -0
  71. imap_processing/hit/packet_definitions/hit_packet_definitions.xml +1323 -71
  72. imap_processing/ialirt/l0/mag_l0_ialirt_data.py +155 -0
  73. imap_processing/ialirt/l0/parse_mag.py +374 -0
  74. imap_processing/ialirt/l0/process_swapi.py +69 -0
  75. imap_processing/ialirt/l0/process_swe.py +548 -0
  76. imap_processing/ialirt/packet_definitions/ialirt.xml +216 -208
  77. imap_processing/ialirt/packet_definitions/ialirt_codicehi.xml +1 -1
  78. imap_processing/ialirt/packet_definitions/ialirt_codicelo.xml +1 -1
  79. imap_processing/ialirt/packet_definitions/ialirt_mag.xml +115 -0
  80. imap_processing/ialirt/packet_definitions/ialirt_swapi.xml +14 -14
  81. imap_processing/ialirt/utils/grouping.py +114 -0
  82. imap_processing/ialirt/utils/time.py +29 -0
  83. imap_processing/idex/atomic_masses.csv +22 -0
  84. imap_processing/idex/decode.py +2 -2
  85. imap_processing/idex/idex_constants.py +33 -0
  86. imap_processing/idex/idex_l0.py +22 -8
  87. imap_processing/idex/idex_l1a.py +81 -51
  88. imap_processing/idex/idex_l1b.py +13 -39
  89. imap_processing/idex/idex_l2a.py +823 -0
  90. imap_processing/idex/idex_l2b.py +120 -0
  91. imap_processing/idex/idex_variable_unpacking_and_eu_conversion.csv +11 -11
  92. imap_processing/idex/packet_definitions/idex_housekeeping_packet_definition.xml +9130 -0
  93. imap_processing/lo/l0/lo_science.py +7 -2
  94. imap_processing/lo/l1a/lo_l1a.py +1 -5
  95. imap_processing/lo/l1b/lo_l1b.py +702 -29
  96. imap_processing/lo/l1b/tof_conversions.py +11 -0
  97. imap_processing/lo/l1c/lo_l1c.py +1 -4
  98. imap_processing/mag/constants.py +51 -0
  99. imap_processing/mag/imap_mag_sdc_configuration_v001.py +8 -0
  100. imap_processing/mag/l0/decom_mag.py +10 -3
  101. imap_processing/mag/l1a/mag_l1a.py +23 -19
  102. imap_processing/mag/l1a/mag_l1a_data.py +35 -10
  103. imap_processing/mag/l1b/mag_l1b.py +259 -50
  104. imap_processing/mag/l1c/interpolation_methods.py +388 -0
  105. imap_processing/mag/l1c/mag_l1c.py +621 -17
  106. imap_processing/mag/l2/mag_l2.py +140 -0
  107. imap_processing/mag/l2/mag_l2_data.py +288 -0
  108. imap_processing/quality_flags.py +1 -0
  109. imap_processing/spacecraft/packet_definitions/scid_x252.xml +538 -0
  110. imap_processing/spacecraft/quaternions.py +121 -0
  111. imap_processing/spice/geometry.py +19 -22
  112. imap_processing/spice/kernels.py +0 -276
  113. imap_processing/spice/pointing_frame.py +257 -0
  114. imap_processing/spice/repoint.py +149 -0
  115. imap_processing/spice/spin.py +38 -33
  116. imap_processing/spice/time.py +24 -0
  117. imap_processing/swapi/l1/swapi_l1.py +20 -12
  118. imap_processing/swapi/l2/swapi_l2.py +116 -5
  119. imap_processing/swapi/swapi_utils.py +32 -0
  120. imap_processing/swe/l1a/swe_l1a.py +44 -12
  121. imap_processing/swe/l1a/swe_science.py +13 -13
  122. imap_processing/swe/l1b/swe_l1b.py +898 -23
  123. imap_processing/swe/l2/swe_l2.py +75 -136
  124. imap_processing/swe/packet_definitions/swe_packet_definition.xml +1121 -1
  125. imap_processing/swe/utils/swe_constants.py +64 -0
  126. imap_processing/swe/utils/swe_utils.py +85 -28
  127. imap_processing/tests/ccsds/test_data/expected_output.xml +40 -1
  128. imap_processing/tests/ccsds/test_excel_to_xtce.py +24 -21
  129. imap_processing/tests/cdf/test_data/imap_instrument2_global_cdf_attrs.yaml +0 -2
  130. imap_processing/tests/cdf/test_utils.py +14 -16
  131. imap_processing/tests/codice/conftest.py +44 -33
  132. imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-counters-aggregated_20241110193700_v0.0.0.cdf +0 -0
  133. imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-counters-singles_20241110193700_v0.0.0.cdf +0 -0
  134. imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-ialirt_20241110193700_v0.0.0.cdf +0 -0
  135. imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-omni_20241110193700_v0.0.0.cdf +0 -0
  136. imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-pha_20241110193700_v0.0.0.cdf +0 -0
  137. imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-priorities_20241110193700_v0.0.0.cdf +0 -0
  138. imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-sectored_20241110193700_v0.0.0.cdf +0 -0
  139. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-aggregated_20241110193700_v0.0.0.cdf +0 -0
  140. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-singles_20241110193700_v0.0.0.cdf +0 -0
  141. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
  142. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-angular_20241110193700_v0.0.0.cdf +0 -0
  143. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-priority_20241110193700_v0.0.0.cdf +0 -0
  144. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-species_20241110193700_v0.0.0.cdf +0 -0
  145. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-pha_20241110193700_v0.0.0.cdf +0 -0
  146. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-angular_20241110193700_v0.0.0.cdf +0 -0
  147. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-priority_20241110193700_v0.0.0.cdf +0 -0
  148. imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-species_20241110193700_v0.0.0.cdf +0 -0
  149. imap_processing/tests/codice/test_codice_l1a.py +126 -53
  150. imap_processing/tests/codice/test_codice_l1b.py +6 -7
  151. imap_processing/tests/codice/test_decompress.py +4 -4
  152. imap_processing/tests/conftest.py +239 -27
  153. imap_processing/tests/ena_maps/conftest.py +51 -0
  154. imap_processing/tests/ena_maps/test_ena_maps.py +1068 -110
  155. imap_processing/tests/ena_maps/test_map_utils.py +66 -43
  156. imap_processing/tests/ena_maps/test_spatial_utils.py +17 -21
  157. imap_processing/tests/glows/conftest.py +10 -14
  158. imap_processing/tests/glows/test_glows_decom.py +4 -4
  159. imap_processing/tests/glows/test_glows_l1a_cdf.py +6 -27
  160. imap_processing/tests/glows/test_glows_l1a_data.py +6 -8
  161. imap_processing/tests/glows/test_glows_l1b.py +11 -11
  162. imap_processing/tests/glows/test_glows_l1b_data.py +5 -5
  163. imap_processing/tests/glows/test_glows_l2.py +2 -8
  164. imap_processing/tests/hi/conftest.py +1 -1
  165. imap_processing/tests/hi/data/l0/H45_diag_fee_20250208.bin +0 -0
  166. imap_processing/tests/hi/data/l0/H45_diag_fee_20250208_verify.csv +205 -0
  167. imap_processing/tests/hi/test_hi_l1b.py +22 -27
  168. imap_processing/tests/hi/test_hi_l1c.py +249 -18
  169. imap_processing/tests/hi/test_l1a.py +35 -7
  170. imap_processing/tests/hi/test_science_direct_event.py +3 -3
  171. imap_processing/tests/hi/test_utils.py +24 -2
  172. imap_processing/tests/hit/helpers/l1_validation.py +74 -73
  173. imap_processing/tests/hit/test_data/hskp_sample.ccsds +0 -0
  174. imap_processing/tests/hit/test_data/imap_hit_l0_raw_20100105_v001.pkts +0 -0
  175. imap_processing/tests/hit/test_decom_hit.py +5 -1
  176. imap_processing/tests/hit/test_hit_l1a.py +32 -36
  177. imap_processing/tests/hit/test_hit_l1b.py +300 -81
  178. imap_processing/tests/hit/test_hit_l2.py +716 -0
  179. imap_processing/tests/hit/test_hit_utils.py +184 -7
  180. imap_processing/tests/hit/validation_data/hit_l1b_standard_sample2_nsrl_v4_3decimals.csv +62 -62
  181. imap_processing/tests/hit/validation_data/hskp_sample_eu_3_6_2025.csv +89 -0
  182. imap_processing/tests/hit/validation_data/hskp_sample_raw.csv +89 -88
  183. imap_processing/tests/hit/validation_data/sci_sample_raw.csv +1 -1
  184. imap_processing/tests/ialirt/data/l0/461971383-404.bin +0 -0
  185. imap_processing/tests/ialirt/data/l0/461971384-405.bin +0 -0
  186. imap_processing/tests/ialirt/data/l0/461971385-406.bin +0 -0
  187. imap_processing/tests/ialirt/data/l0/461971386-407.bin +0 -0
  188. imap_processing/tests/ialirt/data/l0/461971387-408.bin +0 -0
  189. imap_processing/tests/ialirt/data/l0/461971388-409.bin +0 -0
  190. imap_processing/tests/ialirt/data/l0/461971389-410.bin +0 -0
  191. imap_processing/tests/ialirt/data/l0/461971390-411.bin +0 -0
  192. imap_processing/tests/ialirt/data/l0/461971391-412.bin +0 -0
  193. imap_processing/tests/ialirt/data/l0/sample_decoded_i-alirt_data.csv +383 -0
  194. imap_processing/tests/ialirt/unit/test_decom_ialirt.py +16 -81
  195. imap_processing/tests/ialirt/unit/test_grouping.py +81 -0
  196. imap_processing/tests/ialirt/unit/test_parse_mag.py +223 -0
  197. imap_processing/tests/ialirt/unit/test_process_codicehi.py +3 -3
  198. imap_processing/tests/ialirt/unit/test_process_codicelo.py +3 -10
  199. imap_processing/tests/ialirt/unit/test_process_ephemeris.py +4 -4
  200. imap_processing/tests/ialirt/unit/test_process_hit.py +3 -3
  201. imap_processing/tests/ialirt/unit/test_process_swapi.py +24 -16
  202. imap_processing/tests/ialirt/unit/test_process_swe.py +319 -6
  203. imap_processing/tests/ialirt/unit/test_time.py +16 -0
  204. imap_processing/tests/idex/conftest.py +127 -6
  205. imap_processing/tests/idex/test_data/imap_idex_l0_raw_20231218_v001.pkts +0 -0
  206. imap_processing/tests/idex/test_data/imap_idex_l0_raw_20241206_v001.pkts +0 -0
  207. imap_processing/tests/idex/test_data/imap_idex_l0_raw_20250108_v001.pkts +0 -0
  208. imap_processing/tests/idex/test_data/impact_14_tof_high_data.txt +4508 -4508
  209. imap_processing/tests/idex/test_idex_l0.py +33 -11
  210. imap_processing/tests/idex/test_idex_l1a.py +92 -21
  211. imap_processing/tests/idex/test_idex_l1b.py +106 -27
  212. imap_processing/tests/idex/test_idex_l2a.py +399 -0
  213. imap_processing/tests/idex/test_idex_l2b.py +93 -0
  214. imap_processing/tests/lo/test_cdfs/imap_lo_l1a_de_20241022_v002.cdf +0 -0
  215. imap_processing/tests/lo/test_cdfs/imap_lo_l1a_spin_20241022_v002.cdf +0 -0
  216. imap_processing/tests/lo/test_lo_l1a.py +3 -3
  217. imap_processing/tests/lo/test_lo_l1b.py +515 -6
  218. imap_processing/tests/lo/test_lo_l1c.py +1 -1
  219. imap_processing/tests/lo/test_lo_science.py +7 -7
  220. imap_processing/tests/lo/test_star_sensor.py +1 -1
  221. imap_processing/tests/mag/conftest.py +120 -2
  222. imap_processing/tests/mag/test_mag_decom.py +5 -4
  223. imap_processing/tests/mag/test_mag_l1a.py +51 -7
  224. imap_processing/tests/mag/test_mag_l1b.py +40 -59
  225. imap_processing/tests/mag/test_mag_l1c.py +354 -19
  226. imap_processing/tests/mag/test_mag_l2.py +130 -0
  227. imap_processing/tests/mag/test_mag_validation.py +247 -26
  228. imap_processing/tests/mag/validation/L1b/T009/MAGScience-normal-(2,2)-8s-20250204-16h39.csv +17 -0
  229. imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-magi-out.csv +16 -16
  230. imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-mago-out.csv +16 -16
  231. imap_processing/tests/mag/validation/L1b/T010/MAGScience-normal-(2,2)-8s-20250206-12h05.csv +17 -0
  232. imap_processing/tests/mag/validation/L1b/T011/MAGScience-normal-(2,2)-8s-20250204-16h08.csv +17 -0
  233. imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-magi-out.csv +16 -16
  234. imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-mago-out.csv +16 -16
  235. imap_processing/tests/mag/validation/L1b/T012/MAGScience-normal-(2,2)-8s-20250204-16h08.csv +17 -0
  236. imap_processing/tests/mag/validation/L1b/T012/data.bin +0 -0
  237. imap_processing/tests/mag/validation/L1b/T012/field_like_all_ranges.txt +19200 -0
  238. imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-cal.cdf +0 -0
  239. imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-in.csv +17 -0
  240. imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-magi-out.csv +17 -0
  241. imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-mago-out.csv +17 -0
  242. imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-magi-normal-in.csv +1217 -0
  243. imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-magi-normal-out.csv +1857 -0
  244. imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-mago-normal-in.csv +1217 -0
  245. imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-mago-normal-out.csv +1857 -0
  246. imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-magi-normal-in.csv +1217 -0
  247. imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-magi-normal-out.csv +1793 -0
  248. imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-mago-normal-in.csv +1217 -0
  249. imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-mago-normal-out.csv +1793 -0
  250. imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-magi-burst-in.csv +2561 -0
  251. imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-magi-normal-in.csv +961 -0
  252. imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-magi-normal-out.csv +1539 -0
  253. imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-mago-normal-in.csv +1921 -0
  254. imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-mago-normal-out.csv +2499 -0
  255. imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-magi-normal-in.csv +865 -0
  256. imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-magi-normal-out.csv +1196 -0
  257. imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-mago-normal-in.csv +1729 -0
  258. imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-mago-normal-out.csv +3053 -0
  259. imap_processing/tests/mag/validation/L2/imap_mag_l1b_norm-mago_20251017_v002.cdf +0 -0
  260. imap_processing/tests/mag/validation/calibration/imap_mag_l1b-calibration_20240229_v001.cdf +0 -0
  261. imap_processing/tests/mag/validation/calibration/imap_mag_l2-calibration-matrices_20251017_v004.cdf +0 -0
  262. imap_processing/tests/mag/validation/calibration/imap_mag_l2-offsets-norm_20251017_20251017_v001.cdf +0 -0
  263. imap_processing/tests/spacecraft/data/SSR_2024_190_20_08_12_0483851794_2_DA_apid0594_1packet.pkts +0 -0
  264. imap_processing/tests/spacecraft/test_quaternions.py +71 -0
  265. imap_processing/tests/spice/test_data/fake_repoint_data.csv +5 -0
  266. imap_processing/tests/spice/test_data/fake_spin_data.csv +11 -11
  267. imap_processing/tests/spice/test_geometry.py +9 -12
  268. imap_processing/tests/spice/test_kernels.py +1 -200
  269. imap_processing/tests/spice/test_pointing_frame.py +185 -0
  270. imap_processing/tests/spice/test_repoint.py +121 -0
  271. imap_processing/tests/spice/test_spin.py +50 -9
  272. imap_processing/tests/spice/test_time.py +14 -0
  273. imap_processing/tests/swapi/lut/imap_swapi_esa-unit-conversion_20250211_v000.csv +73 -0
  274. imap_processing/tests/swapi/lut/imap_swapi_lut-notes_20250211_v000.csv +1025 -0
  275. imap_processing/tests/swapi/test_swapi_l1.py +13 -11
  276. imap_processing/tests/swapi/test_swapi_l2.py +180 -8
  277. imap_processing/tests/swe/l0_data/2024051010_SWE_HK_packet.bin +0 -0
  278. imap_processing/tests/swe/l0_data/2024051011_SWE_CEM_RAW_packet.bin +0 -0
  279. imap_processing/tests/swe/l0_validation_data/idle_export_eu.SWE_APP_HK_20240510_092742.csv +49 -0
  280. imap_processing/tests/swe/l0_validation_data/idle_export_eu.SWE_CEM_RAW_20240510_092742.csv +593 -0
  281. imap_processing/tests/swe/lut/checker-board-indices.csv +24 -0
  282. imap_processing/tests/swe/lut/imap_swe_esa-lut_20250301_v000.csv +385 -0
  283. imap_processing/tests/swe/lut/imap_swe_l1b-in-flight-cal_20240510_20260716_v000.csv +3 -0
  284. imap_processing/tests/swe/test_swe_l1a.py +20 -2
  285. imap_processing/tests/swe/test_swe_l1a_cem_raw.py +52 -0
  286. imap_processing/tests/swe/test_swe_l1a_hk.py +68 -0
  287. imap_processing/tests/swe/test_swe_l1a_science.py +3 -3
  288. imap_processing/tests/swe/test_swe_l1b.py +162 -24
  289. imap_processing/tests/swe/test_swe_l2.py +153 -91
  290. imap_processing/tests/test_cli.py +171 -88
  291. imap_processing/tests/test_utils.py +140 -17
  292. imap_processing/tests/ultra/data/l0/FM45_UltraFM45_Functional_2024-01-22T0105_20240122T010548.CCSDS +0 -0
  293. imap_processing/tests/ultra/data/l0/ultra45_raw_sc_ultraimgrates_20220530_00.csv +164 -0
  294. imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultrarawimg_withFSWcalcs_FM45_40P_Phi28p5_BeamCal_LinearScan_phi2850_theta-000_20240207T102740.csv +3243 -3243
  295. imap_processing/tests/ultra/data/mock_data.py +369 -0
  296. imap_processing/tests/ultra/unit/conftest.py +115 -89
  297. imap_processing/tests/ultra/unit/test_badtimes.py +4 -4
  298. imap_processing/tests/ultra/unit/test_cullingmask.py +8 -6
  299. imap_processing/tests/ultra/unit/test_de.py +14 -13
  300. imap_processing/tests/ultra/unit/test_decom_apid_880.py +27 -76
  301. imap_processing/tests/ultra/unit/test_decom_apid_881.py +54 -11
  302. imap_processing/tests/ultra/unit/test_decom_apid_883.py +12 -10
  303. imap_processing/tests/ultra/unit/test_decom_apid_896.py +202 -55
  304. imap_processing/tests/ultra/unit/test_lookup_utils.py +23 -1
  305. imap_processing/tests/ultra/unit/test_spacecraft_pset.py +77 -0
  306. imap_processing/tests/ultra/unit/test_ultra_l1a.py +98 -305
  307. imap_processing/tests/ultra/unit/test_ultra_l1b.py +60 -14
  308. imap_processing/tests/ultra/unit/test_ultra_l1b_annotated.py +2 -2
  309. imap_processing/tests/ultra/unit/test_ultra_l1b_culling.py +26 -27
  310. imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +239 -70
  311. imap_processing/tests/ultra/unit/test_ultra_l1c.py +5 -5
  312. imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +114 -83
  313. imap_processing/tests/ultra/unit/test_ultra_l2.py +230 -0
  314. imap_processing/ultra/constants.py +1 -1
  315. imap_processing/ultra/l0/decom_tools.py +27 -39
  316. imap_processing/ultra/l0/decom_ultra.py +168 -204
  317. imap_processing/ultra/l0/ultra_utils.py +152 -136
  318. imap_processing/ultra/l1a/ultra_l1a.py +55 -271
  319. imap_processing/ultra/l1b/badtimes.py +1 -4
  320. imap_processing/ultra/l1b/cullingmask.py +2 -6
  321. imap_processing/ultra/l1b/de.py +116 -57
  322. imap_processing/ultra/l1b/extendedspin.py +20 -18
  323. imap_processing/ultra/l1b/lookup_utils.py +72 -9
  324. imap_processing/ultra/l1b/ultra_l1b.py +36 -16
  325. imap_processing/ultra/l1b/ultra_l1b_culling.py +66 -30
  326. imap_processing/ultra/l1b/ultra_l1b_extended.py +297 -94
  327. imap_processing/ultra/l1c/histogram.py +2 -6
  328. imap_processing/ultra/l1c/spacecraft_pset.py +84 -0
  329. imap_processing/ultra/l1c/ultra_l1c.py +8 -9
  330. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +206 -108
  331. imap_processing/ultra/l2/ultra_l2.py +299 -0
  332. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_LeftSlit.csv +526 -0
  333. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_RightSlit.csv +526 -0
  334. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_LeftSlit.csv +526 -0
  335. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +526 -0
  336. imap_processing/ultra/lookup_tables/FM45_Startup1_ULTRA_IMGPARAMS_20240719.csv +2 -2
  337. imap_processing/ultra/lookup_tables/FM90_Startup1_ULTRA_IMGPARAMS_20240719.csv +2 -0
  338. imap_processing/ultra/packet_definitions/README.md +38 -0
  339. imap_processing/ultra/packet_definitions/ULTRA_SCI_COMBINED.xml +15302 -482
  340. imap_processing/ultra/utils/ultra_l1_utils.py +31 -12
  341. imap_processing/utils.py +69 -29
  342. {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/METADATA +10 -6
  343. imap_processing-0.13.0.dist-info/RECORD +578 -0
  344. imap_processing/cdf/config/imap_mag_l1_variable_attrs.yaml +0 -237
  345. imap_processing/hi/l1a/housekeeping.py +0 -27
  346. imap_processing/hi/l1b/hi_eng_unit_convert_table.csv +0 -154
  347. imap_processing/swe/l1b/swe_esa_lookup_table.csv +0 -1441
  348. imap_processing/swe/l1b/swe_l1b_science.py +0 -652
  349. imap_processing/tests/codice/data/imap_codice_l1a_hi-counters-aggregated_20240429_v001.cdf +0 -0
  350. imap_processing/tests/codice/data/imap_codice_l1a_hi-counters-singles_20240429_v001.cdf +0 -0
  351. imap_processing/tests/codice/data/imap_codice_l1a_hi-omni_20240429_v001.cdf +0 -0
  352. imap_processing/tests/codice/data/imap_codice_l1a_hi-sectored_20240429_v001.cdf +0 -0
  353. imap_processing/tests/codice/data/imap_codice_l1a_hskp_20100101_v001.cdf +0 -0
  354. imap_processing/tests/codice/data/imap_codice_l1a_lo-counters-aggregated_20240429_v001.cdf +0 -0
  355. imap_processing/tests/codice/data/imap_codice_l1a_lo-counters-singles_20240429_v001.cdf +0 -0
  356. imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-angular_20240429_v001.cdf +0 -0
  357. imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-priority_20240429_v001.cdf +0 -0
  358. imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-species_20240429_v001.cdf +0 -0
  359. imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-angular_20240429_v001.cdf +0 -0
  360. imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-priority_20240429_v001.cdf +0 -0
  361. imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-species_20240429_v001.cdf +0 -0
  362. imap_processing/tests/codice/data/imap_codice_l1b_hi-counters-aggregated_20240429_v001.cdf +0 -0
  363. imap_processing/tests/codice/data/imap_codice_l1b_hi-counters-singles_20240429_v001.cdf +0 -0
  364. imap_processing/tests/codice/data/imap_codice_l1b_hi-omni_20240429_v001.cdf +0 -0
  365. imap_processing/tests/codice/data/imap_codice_l1b_hi-sectored_20240429_v001.cdf +0 -0
  366. imap_processing/tests/codice/data/imap_codice_l1b_hskp_20100101_v001.cdf +0 -0
  367. imap_processing/tests/codice/data/imap_codice_l1b_lo-counters-aggregated_20240429_v001.cdf +0 -0
  368. imap_processing/tests/codice/data/imap_codice_l1b_lo-counters-singles_20240429_v001.cdf +0 -0
  369. imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-angular_20240429_v001.cdf +0 -0
  370. imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-priority_20240429_v001.cdf +0 -0
  371. imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-species_20240429_v001.cdf +0 -0
  372. imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-angular_20240429_v001.cdf +0 -0
  373. imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-priority_20240429_v001.cdf +0 -0
  374. imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-species_20240429_v001.cdf +0 -0
  375. imap_processing/tests/hi/data/l1/imap_hi_l1b_45sensor-de_20250415_v999.cdf +0 -0
  376. imap_processing/tests/hit/PREFLIGHT_raw_record_2023_256_15_59_04_apid1251.pkts +0 -0
  377. imap_processing/tests/hit/PREFLIGHT_raw_record_2023_256_15_59_04_apid1252.pkts +0 -0
  378. imap_processing/tests/hit/validation_data/hskp_sample_eu.csv +0 -89
  379. imap_processing/tests/hit/validation_data/sci_sample_raw1.csv +0 -29
  380. imap_processing/tests/idex/test_data/imap_idex_l0_raw_20231214_v001.pkts +0 -0
  381. imap_processing/tests/lo/test_cdfs/imap_lo_l1a_de_20100101_v001.cdf +0 -0
  382. imap_processing/tests/lo/test_cdfs/imap_lo_l1a_spin_20100101_v001.cdf +0 -0
  383. imap_processing/tests/swe/test_swe_l1b_science.py +0 -84
  384. imap_processing/tests/ultra/test_data/mock_data.py +0 -161
  385. imap_processing/ultra/l1c/pset.py +0 -40
  386. imap_processing/ultra/lookup_tables/dps_sensitivity45.cdf +0 -0
  387. imap_processing-0.11.0.dist-info/RECORD +0 -488
  388. /imap_processing/idex/packet_definitions/{idex_packet_definition.xml → idex_science_packet_definition.xml} +0 -0
  389. /imap_processing/tests/ialirt/{test_data → data}/l0/20240827095047_SWE_IALIRT_packet.bin +0 -0
  390. /imap_processing/tests/ialirt/{test_data → data}/l0/BinLog CCSDS_FRAG_TLM_20240826_152323Z_IALIRT_data_for_SDC.bin +0 -0
  391. /imap_processing/tests/ialirt/{test_data → data}/l0/IALiRT Raw Packet Telemetry.txt +0 -0
  392. /imap_processing/tests/ialirt/{test_data → data}/l0/apid01152.tlm +0 -0
  393. /imap_processing/tests/ialirt/{test_data → data}/l0/eu_SWP_IAL_20240826_152033.csv +0 -0
  394. /imap_processing/tests/ialirt/{test_data → data}/l0/hi_fsw_view_1_ccsds.bin +0 -0
  395. /imap_processing/tests/ialirt/{test_data → data}/l0/hit_ialirt_sample.ccsds +0 -0
  396. /imap_processing/tests/ialirt/{test_data → data}/l0/hit_ialirt_sample.csv +0 -0
  397. /imap_processing/tests/ialirt/{test_data → data}/l0/idle_export_eu.SWE_IALIRT_20240827_093852.csv +0 -0
  398. /imap_processing/tests/ialirt/{test_data → data}/l0/imap_codice_l1a_hi-ialirt_20240523200000_v0.0.0.cdf +0 -0
  399. /imap_processing/tests/ialirt/{test_data → data}/l0/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
  400. /imap_processing/{mag/l1b → tests/spacecraft}/__init__.py +0 -0
  401. /imap_processing/{swe/l1b/engineering_unit_convert_table.csv → tests/swe/lut/imap_swe_eu-conversion_20240510_v000.csv} +0 -0
  402. /imap_processing/tests/ultra/{test_data → data}/l0/FM45_40P_Phi28p5_BeamCal_LinearScan_phi28.50_theta-0.00_20240207T102740.CCSDS +0 -0
  403. /imap_processing/tests/ultra/{test_data → data}/l0/FM45_7P_Phi0.0_BeamCal_LinearScan_phi0.04_theta-0.01_20230821T121304.CCSDS +0 -0
  404. /imap_processing/tests/ultra/{test_data → data}/l0/FM45_TV_Cycle6_Hot_Ops_Front212_20240124T063837.CCSDS +0 -0
  405. /imap_processing/tests/ultra/{test_data → data}/l0/Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.CCSDS +0 -0
  406. /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_auxdata_Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.csv +0 -0
  407. /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_enaphxtofhangimg_FM45_TV_Cycle6_Hot_Ops_Front212_20240124T063837.csv +0 -0
  408. /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultraimgrates_Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.csv +0 -0
  409. /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultrarawimgevent_FM45_7P_Phi00_BeamCal_LinearScan_phi004_theta-001_20230821T121304.csv +0 -0
  410. /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E1.cdf +0 -0
  411. /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E12.cdf +0 -0
  412. /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E24.cdf +0 -0
  413. {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/LICENSE +0 -0
  414. {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/WHEEL +0 -0
  415. {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/entry_points.txt +0 -0
imap_processing/cli.py CHANGED
@@ -1,5 +1,4 @@
1
1
  #!/usr/bin/env python3
2
- # ruff: noqa: PLR0913
3
2
  """
4
3
  Run the processing for a specific instrument & data level.
5
4
 
@@ -11,17 +10,21 @@ Examples
11
10
  imap_cli --instrument <instrument> --level <data_level>
12
11
  """
13
12
 
13
+ from __future__ import annotations
14
+
14
15
  import argparse
15
16
  import logging
17
+ import re
16
18
  import sys
17
19
  from abc import ABC, abstractmethod
18
- from json import loads
19
20
  from pathlib import Path
20
21
  from typing import final
21
- from urllib.error import HTTPError
22
22
 
23
23
  import imap_data_access
24
24
  import xarray as xr
25
+ from imap_data_access.processing_input import (
26
+ ProcessingInputCollection,
27
+ )
25
28
 
26
29
  import imap_processing
27
30
  from imap_processing._version import __version__, __version_tuple__ # noqa: F401
@@ -44,6 +47,7 @@ from imap_processing.hi.l1b import hi_l1b
44
47
  from imap_processing.hi.l1c import hi_l1c
45
48
  from imap_processing.hit.l1a.hit_l1a import hit_l1a
46
49
  from imap_processing.hit.l1b.hit_l1b import hit_l1b
50
+ from imap_processing.hit.l2.hit_l2 import hit_l2
47
51
  from imap_processing.idex.idex_l1a import PacketParser
48
52
  from imap_processing.idex.idex_l1b import idex_l1b
49
53
  from imap_processing.lo.l1a import lo_l1a
@@ -52,8 +56,11 @@ from imap_processing.lo.l1c import lo_l1c
52
56
  from imap_processing.mag.l1a.mag_l1a import mag_l1a
53
57
  from imap_processing.mag.l1b.mag_l1b import mag_l1b
54
58
  from imap_processing.mag.l1c.mag_l1c import mag_l1c
59
+ from imap_processing.mag.l2.mag_l2 import mag_l2
60
+ from imap_processing.spacecraft import quaternions
55
61
  from imap_processing.swapi.l1.swapi_l1 import swapi_l1
56
62
  from imap_processing.swapi.l2.swapi_l2 import swapi_l2
63
+ from imap_processing.swapi.swapi_utils import read_swapi_lut_table
57
64
  from imap_processing.swe.l1a.swe_l1a import swe_l1a
58
65
  from imap_processing.swe.l1b.swe_l1b import swe_l1b
59
66
  from imap_processing.ultra.l1a import ultra_l1a
@@ -73,14 +80,22 @@ def _parse_args() -> argparse.Namespace:
73
80
  --descriptor "all"
74
81
  --start-date "20231212"
75
82
  --version "v001"
76
- --dependency "[
77
- {
78
- 'instrument': 'mag',
79
- 'data_level': 'l0',
80
- 'descriptor': 'sci',
81
- 'version': 'v001',
82
- 'start_date': '20231212'
83
- }]"
83
+ --dependency '[
84
+ {
85
+ "type": "ancillary",
86
+ "files": [
87
+ "imap_mag_l1b-cal_20250101_v001.cdf",
88
+ "imap_mag_l1b-cal_20250103_20250104_v002.cdf"
89
+ ]
90
+ },
91
+ {
92
+ "type": "science",
93
+ "files": [
94
+ "imap_idex_l2_sci_20240312_v000.cdf",
95
+ "imap_idex_l2_sci_20240312_v001.cdf"
96
+ ]
97
+ }
98
+ ]'
84
99
  --upload-to-sdc
85
100
 
86
101
  Returns
@@ -92,17 +107,28 @@ def _parse_args() -> argparse.Namespace:
92
107
  "This command line program invokes the processing pipeline "
93
108
  "for a specific instrument and data level. Example usage: "
94
109
  '"imap_cli --instrument "mag" '
95
- '--data-level "l1a"'
96
- '--descriptor "all"'
97
- ' --start-date "20231212"'
98
- '--version "v001"'
110
+ '--data-level "l1a" '
111
+ '--descriptor "all" '
112
+ ' --start-date "20231212" '
113
+ '--repointing "repoint12345" '
114
+ '--version "v001" '
99
115
  '--dependency "['
100
- ' {"instrument": "mag",'
101
- ' "data_level": "l0",'
102
- ' "descriptor": "sci",'
103
- ' "version": "v001",'
104
- ' "start_date": "20231212"'
105
- '}]" --upload-to-sdc"'
116
+ " {"
117
+ ' "type": "ancillary",'
118
+ ' "files": ['
119
+ ' "imap_mag_l1b-cal_20250101_v001.cdf",'
120
+ ' "imap_mag_l1b-cal_20250103_20250104_v002.cdf"'
121
+ " ]"
122
+ " },"
123
+ " {"
124
+ ' "type": "science",'
125
+ ' "files": ['
126
+ ' "imap_idex_l2_sci_20240312_v000.cdf",'
127
+ ' "imap_idex_l2_sci_20240312_v001.cdf"'
128
+ " ]"
129
+ " }"
130
+ "]"
131
+ ' --upload-to-sdc"'
106
132
  )
107
133
  instrument_help = (
108
134
  "The instrument to process. Acceptable values are: "
@@ -118,11 +144,23 @@ def _parse_args() -> argparse.Namespace:
118
144
  )
119
145
  dependency_help = (
120
146
  "Dependency information in str format."
121
- "Example: '[{'instrument': 'mag',"
122
- "'data_level': 'l0',"
123
- "'descriptor': 'sci',"
124
- "'version': 'v001',"
125
- "'start_date': '20231212'}]"
147
+ "Example:"
148
+ "'["
149
+ " {"
150
+ ' "type": "ancillary",'
151
+ ' "files": ['
152
+ ' "imap_mag_l1b-cal_20250101_v001.cdf",'
153
+ ' "imap_mag_l1b-cal_20250103_20250104_v002.cdf"'
154
+ " ]"
155
+ " },"
156
+ " {"
157
+ ' "type": "science",'
158
+ ' "files": ['
159
+ ' "imap_idex_l2_sci_20240312_v000.cdf",'
160
+ ' "imap_idex_l2_sci_20240312_v001.cdf"'
161
+ " ]"
162
+ " }"
163
+ "]'"
126
164
  )
127
165
 
128
166
  parser = argparse.ArgumentParser(prog="imap_cli", description=description)
@@ -160,7 +198,7 @@ def _parse_args() -> argparse.Namespace:
160
198
  parser.add_argument(
161
199
  "--start-date",
162
200
  type=str,
163
- required=True,
201
+ required=False,
164
202
  help="Start time for the output data. Format: YYYYMMDD",
165
203
  )
166
204
 
@@ -168,10 +206,17 @@ def _parse_args() -> argparse.Namespace:
168
206
  "--end-date",
169
207
  type=str,
170
208
  required=False,
171
- help="End time for the output data. If not provided, start_time will be used "
209
+ help="DEPRECATED: Do not use this."
210
+ "End time for the output data. If not provided, start_time will be used "
172
211
  "for end_time. Format: YYYYMMDD",
173
212
  )
174
- # TODO: Will need to add some way of including pointing numbers
213
+ parser.add_argument(
214
+ "--repointing",
215
+ type=str,
216
+ required=False,
217
+ help="Repointing time for output data. Replaces start_time if both are "
218
+ "provided. Format: repoint#####",
219
+ )
175
220
 
176
221
  parser.add_argument(
177
222
  "--version",
@@ -217,6 +262,30 @@ def _validate_args(args: argparse.Namespace) -> None:
217
262
  " instrument, valid levels are: "
218
263
  f"{imap_processing.PROCESSING_LEVELS[args.instrument]}"
219
264
  )
265
+ if args.start_date is None and args.repointing is None:
266
+ raise ValueError(
267
+ "Either start_date or repointing must be provided. "
268
+ "Run 'imap_cli -h' for more information."
269
+ )
270
+
271
+ if (
272
+ args.start_date is not None
273
+ and not imap_data_access.ScienceFilePath.is_valid_date(args.start_date)
274
+ ):
275
+ raise ValueError(f"{args.start_date} is not a valid date, use format YYYYMMDD.")
276
+
277
+ if (
278
+ args.repointing is not None
279
+ and not imap_data_access.ScienceFilePath.is_valid_repointing(args.repointing)
280
+ ):
281
+ raise ValueError(
282
+ f"{args.repointing} is not a valid repointing, use format repoint#####."
283
+ )
284
+
285
+ if getattr(args, "end_date", None) is not None:
286
+ logger.warning(
287
+ "The end_date argument is deprecated and will be ignored. Do not use."
288
+ )
220
289
 
221
290
 
222
291
  class ProcessInstrument(ABC):
@@ -231,17 +300,41 @@ class ProcessInstrument(ABC):
231
300
  The descriptor of the data to process (e.g. ``sci``).
232
301
  dependency_str : str
233
302
  A string representation of the dependencies for the instrument in the
234
- format: "[{
235
- 'instrument': 'mag',
236
- 'data_level': 'l0',
237
- 'descriptor': 'sci',
238
- 'version': 'v00-01',
239
- 'start_date': '20231212'
240
- }]".
303
+ format:
304
+ '[
305
+ {
306
+ "type": "ancillary",
307
+ "files": [
308
+ "imap_mag_l1b-cal_20250101_v001.cdf",
309
+ "imap_mag_l1b-cal_20250103_20250104_v002.cdf"
310
+ ]
311
+ },
312
+ {
313
+ "type": "ancillary",
314
+ "files": [
315
+ "imap_mag_l1b-lut_20250101_v001.cdf",
316
+ ]
317
+ },
318
+ {
319
+ "type": "science",
320
+ "files": [
321
+ "imap_mag_l1a_norm-magi_20240312_v000.cdf",
322
+ "imap_mag_l1a_norm-magi_20240312_v001.cdf"
323
+ ]
324
+ },
325
+ {
326
+ "type": "science",
327
+ "files": [
328
+ "imap_idex_l2_sci_20240312_v000.cdf",
329
+ "imap_idex_l2_sci_20240312_v001.cdf"
330
+ ]
331
+ }
332
+ ]'
333
+ This is what ProcessingInputCollection.serialize() outputs.
241
334
  start_date : str
242
335
  The start date for the output data in YYYYMMDD format.
243
- end_date : str
244
- The end date for the output data in YYYYMMDD format.
336
+ repointing : str
337
+ The repointing for the output data in the format 'repoint#####'.
245
338
  version : str
246
339
  The version of the data in vXXX format.
247
340
  upload_to_sdc : bool
@@ -253,66 +346,22 @@ class ProcessInstrument(ABC):
253
346
  data_level: str,
254
347
  data_descriptor: str,
255
348
  dependency_str: str,
256
- start_date: str,
257
- end_date: str,
349
+ start_date: str | None,
350
+ repointing: str | None,
258
351
  version: str,
259
352
  upload_to_sdc: bool,
260
353
  ) -> None:
261
354
  self.data_level = data_level
262
355
  self.descriptor = data_descriptor
263
356
 
264
- # Convert string into a dictionary
265
- self.dependencies = loads(dependency_str.replace("'", '"'))
266
- self._dependency_list: list = []
357
+ self.dependency_str = dependency_str
267
358
 
268
359
  self.start_date = start_date
269
- self.end_date = end_date
270
- if not end_date:
271
- self.end_date = start_date
360
+ self.repointing = repointing
272
361
 
273
362
  self.version = version
274
363
  self.upload_to_sdc = upload_to_sdc
275
364
 
276
- def download_dependencies(self) -> list[Path]:
277
- """
278
- Download the dependencies for the instrument.
279
-
280
- Returns
281
- -------
282
- file_list : list[Path]
283
- A list of file paths to the downloaded dependencies.
284
- """
285
- file_list = []
286
- for dependency in self.dependencies:
287
- try:
288
- # TODO: Validate dep dict
289
- # TODO: determine what dependency information is optional
290
- return_query = imap_data_access.query(
291
- start_date=dependency["start_date"],
292
- end_date=dependency.get("end_date", None),
293
- instrument=dependency["instrument"],
294
- data_level=dependency["data_level"],
295
- version=dependency["version"],
296
- descriptor=dependency["descriptor"],
297
- )
298
- except HTTPError as e:
299
- raise ValueError(f"Unable to download files from {dependency}") from e
300
-
301
- if not return_query:
302
- raise FileNotFoundError(
303
- f"File not found for required dependency "
304
- f"{dependency} while attempting to create file."
305
- f"This should never occur "
306
- f"in normal processing."
307
- )
308
- file_list.extend(
309
- [
310
- imap_data_access.download(query_return["file_path"])
311
- for query_return in return_query
312
- ]
313
- )
314
- return file_list
315
-
316
365
  def upload_products(self, products: list[Path]) -> None:
317
366
  """
318
367
  Upload data products to the IMAP SDC.
@@ -347,10 +396,10 @@ class ProcessInstrument(ABC):
347
396
  logger.info("Beginning actual processing")
348
397
  products = self.do_processing(dependencies)
349
398
  logger.info("Beginning postprocessing (uploading data products)")
350
- self.post_processing(products)
399
+ self.post_processing(products, dependencies)
351
400
  logger.info("Processing complete")
352
401
 
353
- def pre_processing(self) -> list[Path]:
402
+ def pre_processing(self) -> ProcessingInputCollection:
354
403
  """
355
404
  Complete pre-processing.
356
405
 
@@ -360,24 +409,29 @@ class ProcessInstrument(ABC):
360
409
 
361
410
  Returns
362
411
  -------
363
- list[Path]
364
- List of dependencies downloaded from the IMAP SDC.
412
+ dependencies : ProcessingInputCollection
413
+ Object containing dependencies to process.
365
414
  """
366
- self._dependency_list = self.download_dependencies()
367
- return self._dependency_list
415
+ dependencies = ProcessingInputCollection()
416
+ dependencies.deserialize(self.dependency_str)
417
+ dependencies.download_all_files()
418
+ return dependencies
368
419
 
369
420
  @abstractmethod
370
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
421
+ def do_processing(
422
+ self, dependencies: ProcessingInputCollection
423
+ ) -> list[xr.Dataset]:
371
424
  """
372
425
  Abstract method that processes the IMAP processing steps.
373
426
 
374
- All child classes must implement this method. Input and outputs are
375
- typically lists of file paths but are free to any list.
427
+ All child classes must implement this method. Input is
428
+ object containing dependencies and output is
429
+ list of xr.Dataset containing processed data(s).
376
430
 
377
431
  Parameters
378
432
  ----------
379
- dependencies : list
380
- List of dependencies to process.
433
+ dependencies : ProcessingInputCollection
434
+ Object containing dependencies to process.
381
435
 
382
436
  Returns
383
437
  -------
@@ -386,7 +440,9 @@ class ProcessInstrument(ABC):
386
440
  """
387
441
  raise NotImplementedError
388
442
 
389
- def post_processing(self, datasets: list[xr.Dataset]) -> None:
443
+ def post_processing(
444
+ self, datasets: list[xr.Dataset], dependencies: ProcessingInputCollection
445
+ ) -> None:
390
446
  """
391
447
  Complete post-processing.
392
448
 
@@ -395,36 +451,64 @@ class ProcessInstrument(ABC):
395
451
  Child classes can override this method to customize the
396
452
  post-processing actions.
397
453
 
454
+ The values from start_date and/or repointing are used to generate the output
455
+ file name if supplied. All other filename fields are derived from the
456
+ dataset attributes.
457
+
398
458
  Parameters
399
459
  ----------
400
460
  datasets : list[xarray.Dataset]
401
461
  A list of datasets (products) produced by do_processing method.
462
+ dependencies : ProcessingInputCollection
463
+ Object containing dependencies to process.
402
464
  """
403
465
  if len(datasets) == 0:
404
466
  logger.info("No products to write to CDF file.")
405
467
  return
406
468
 
407
469
  logger.info("Writing products to local storage")
408
- logger.info("Parent files: %s", self._dependency_list)
409
470
 
410
- products = [
411
- write_cdf(dataset, parent_files=self._dependency_list)
412
- for dataset in datasets
413
- ]
471
+ logger.info("Dataset version: %s", self.version)
472
+ # Parent files used to create these datasets
473
+ # https://spdf.gsfc.nasa.gov/istp_guide/gattributes.html.
474
+ parent_files = [p.name for p in dependencies.get_file_paths()]
475
+ logger.info("Parent files: %s", parent_files)
476
+
477
+ # Format version to vXXX if not already in that format. Eg.
478
+ # If version is passed in as 1 or 001, it will be converted to v001.
479
+ r = re.compile(r"v\d{3}")
480
+ if not isinstance(self.version, str) or r.match(self.version) is None:
481
+ self.version = f"v{int(self.version):03d}" # vXXX
482
+
483
+ # Start date is either the start date or the repointing.
484
+ # if it is the repointing, default to using the first epoch in the file as
485
+ # start_date.
486
+ # If it is start_date, skip repointing in the output filename.
487
+
488
+ products = []
489
+ for ds in datasets:
490
+ ds.attrs["Data_version"] = self.version
491
+ ds.attrs["Repointing"] = self.repointing
492
+ ds.attrs["Start_date"] = self.start_date
493
+ ds.attrs["Parents"] = parent_files
494
+ products.append(write_cdf(ds))
495
+
414
496
  self.upload_products(products)
415
497
 
416
498
 
417
499
  class Codice(ProcessInstrument):
418
500
  """Process CoDICE."""
419
501
 
420
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
502
+ def do_processing(
503
+ self, dependencies: ProcessingInputCollection
504
+ ) -> list[xr.Dataset]:
421
505
  """
422
506
  Perform CoDICE specific processing.
423
507
 
424
508
  Parameters
425
509
  ----------
426
- dependencies : list
427
- List of dependencies to process.
510
+ dependencies : ProcessingInputCollection
511
+ Object containing dependencies to process.
428
512
 
429
513
  Returns
430
514
  -------
@@ -434,24 +518,27 @@ class Codice(ProcessInstrument):
434
518
  print(f"Processing CoDICE {self.data_level}")
435
519
  datasets: list[xr.Dataset] = []
436
520
 
521
+ dependency_list = dependencies.processing_input
437
522
  if self.data_level == "l1a":
438
- if len(dependencies) > 1:
523
+ if len(dependency_list) > 1:
439
524
  raise ValueError(
440
525
  f"Unexpected dependencies found for CoDICE L1a:"
441
- f"{dependencies}. Expected only one dependency."
526
+ f"{dependency_list}. Expected only one dependency."
442
527
  )
443
528
  # process data
444
- datasets = [codice_l1a.process_codice_l1a(dependencies[0], self.version)]
529
+ science_files = dependencies.get_file_paths(source="codice")
530
+ datasets = codice_l1a.process_codice_l1a(science_files[0])
445
531
 
446
532
  if self.data_level == "l1b":
447
- if len(dependencies) > 1:
533
+ if len(dependency_list) > 1:
448
534
  raise ValueError(
449
535
  f"Unexpected dependencies found for CoDICE L1b:"
450
- f"{dependencies}. Expected only one dependency."
536
+ f"{dependency_list}. Expected only one dependency."
451
537
  )
452
538
  # process data
453
- dependency = load_cdf(dependencies[0])
454
- datasets = [codice_l1b.process_codice_l1b(dependency, self.version)]
539
+ science_files = dependencies.get_file_paths(source="codice")
540
+ dependency = load_cdf(science_files[0])
541
+ datasets = [codice_l1b.process_codice_l1b(dependency)]
455
542
 
456
543
  return datasets
457
544
 
@@ -459,14 +546,16 @@ class Codice(ProcessInstrument):
459
546
  class Glows(ProcessInstrument):
460
547
  """Process GLOWS."""
461
548
 
462
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
549
+ def do_processing(
550
+ self, dependencies: ProcessingInputCollection
551
+ ) -> list[xr.Dataset]:
463
552
  """
464
553
  Perform GLOWS specific processing.
465
554
 
466
555
  Parameters
467
556
  ----------
468
- dependencies : list
469
- List of dependencies to process.
557
+ dependencies : ProcessingInputCollection
558
+ Object containing dependencies to process.
470
559
 
471
560
  Returns
472
561
  -------
@@ -476,31 +565,35 @@ class Glows(ProcessInstrument):
476
565
  print(f"Processing GLOWS {self.data_level}")
477
566
  datasets: list[xr.Dataset] = []
478
567
 
568
+ dependency_list = dependencies.processing_input
479
569
  if self.data_level == "l1a":
480
- if len(dependencies) > 1:
570
+ if len(dependency_list) > 1:
481
571
  raise ValueError(
482
572
  f"Unexpected dependencies found for GLOWS L1A:"
483
- f"{dependencies}. Expected only one input dependency."
573
+ f"{dependency_list}. Expected only one input dependency."
484
574
  )
485
- datasets = glows_l1a(dependencies[0], self.version)
575
+ science_files = dependencies.get_file_paths(source="glows")
576
+ datasets = glows_l1a(science_files[0])
486
577
 
487
578
  if self.data_level == "l1b":
488
- if len(dependencies) > 1:
579
+ if len(dependency_list) > 1:
489
580
  raise ValueError(
490
581
  f"Unexpected dependencies found for GLOWS L1B:"
491
- f"{dependencies}. Expected at least one input dependency."
582
+ f"{dependency_list}. Expected at least one input dependency."
492
583
  )
493
- input_dataset = load_cdf(dependencies[0])
494
- datasets = [glows_l1b(input_dataset, self.version)]
584
+ science_files = dependencies.get_file_paths(source="glows")
585
+ input_dataset = load_cdf(science_files[0])
586
+ datasets = [glows_l1b(input_dataset)]
495
587
 
496
588
  if self.data_level == "l2":
497
- if len(dependencies) > 1:
589
+ if len(dependency_list) > 1:
498
590
  raise ValueError(
499
591
  f"Unexpected dependencies found for GLOWS L2:"
500
- f"{dependencies}. Expected only one input dependency."
592
+ f"{dependency_list}. Expected only one input dependency."
501
593
  )
502
- input_dataset = load_cdf(dependencies[0])
503
- datasets = glows_l2(input_dataset, self.version)
594
+ science_files = dependencies.get_file_paths(source="glows")
595
+ input_dataset = load_cdf(science_files[0])
596
+ datasets = glows_l2(input_dataset)
504
597
 
505
598
  return datasets
506
599
 
@@ -508,14 +601,16 @@ class Glows(ProcessInstrument):
508
601
  class Hi(ProcessInstrument):
509
602
  """Process IMAP-Hi."""
510
603
 
511
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
604
+ def do_processing(
605
+ self, dependencies: ProcessingInputCollection
606
+ ) -> list[xr.Dataset]:
512
607
  """
513
608
  Perform IMAP-Hi specific processing.
514
609
 
515
610
  Parameters
516
611
  ----------
517
- dependencies : list
518
- List of dependencies to process.
612
+ dependencies : ProcessingInputCollection
613
+ Object containing dependencies to process.
519
614
 
520
615
  Returns
521
616
  -------
@@ -525,27 +620,34 @@ class Hi(ProcessInstrument):
525
620
  print(f"Processing IMAP-Hi {self.data_level}")
526
621
  datasets: list[xr.Dataset] = []
527
622
 
623
+ dependency_list = dependencies.processing_input
528
624
  if self.data_level == "l1a":
529
625
  # File path is expected output file path
530
- if len(dependencies) > 1:
626
+ if len(dependency_list) > 1:
531
627
  raise ValueError(
532
628
  f"Unexpected dependencies found for Hi L1A:"
533
- f"{dependencies}. Expected only one dependency."
629
+ f"{dependency_list}. Expected only one dependency."
534
630
  )
535
- datasets = hi_l1a.hi_l1a(dependencies[0], self.version)
631
+ science_files = dependencies.get_file_paths(source="hi")
632
+ datasets = hi_l1a.hi_l1a(science_files[0])
536
633
  elif self.data_level == "l1b":
537
- dependencies = [load_cdf(dependency) for dependency in dependencies]
538
- datasets = [hi_l1b.hi_l1b(dependencies[0], self.version)]
634
+ l0_files = dependencies.get_file_paths(source="hi", descriptor="raw")
635
+ if l0_files:
636
+ datasets = hi_l1b.hi_l1b(l0_files[0])
637
+ else:
638
+ l1a_files = dependencies.get_file_paths(source="hi")
639
+ datasets = hi_l1b.hi_l1b(load_cdf(l1a_files[0]))
539
640
  elif self.data_level == "l1c":
540
641
  # TODO: Add PSET calibration product config file dependency and remove
541
642
  # below injected dependency
542
- dependencies.append(
643
+ hi_dependencies = dependencies.get_file_paths(source="hi")
644
+ hi_dependencies.append(
543
645
  Path(__file__).parent
544
646
  / "tests/hi/test_data/l1"
545
647
  / "imap_his_pset-calibration-prod-config_20240101_v001.csv"
546
648
  )
547
- dependencies[0] = load_cdf(dependencies[0])
548
- datasets = [hi_l1c.hi_l1c(dependencies, self.version)]
649
+ hi_dependencies[0] = load_cdf(hi_dependencies[0])
650
+ datasets = hi_l1c.hi_l1c(hi_dependencies)
549
651
  else:
550
652
  raise NotImplementedError(
551
653
  f"Hi processing not implemented for level {self.data_level}"
@@ -556,14 +658,16 @@ class Hi(ProcessInstrument):
556
658
  class Hit(ProcessInstrument):
557
659
  """Process HIT."""
558
660
 
559
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
661
+ def do_processing(
662
+ self, dependencies: ProcessingInputCollection
663
+ ) -> list[xr.Dataset]:
560
664
  """
561
665
  Perform HIT specific processing.
562
666
 
563
667
  Parameters
564
668
  ----------
565
- dependencies : list
566
- List of dependencies to process.
669
+ dependencies : ProcessingInputCollection
670
+ Object containing dependencies to process.
567
671
 
568
672
  Returns
569
673
  -------
@@ -573,27 +677,47 @@ class Hit(ProcessInstrument):
573
677
  print(f"Processing HIT {self.data_level}")
574
678
  datasets: list[xr.Dataset] = []
575
679
 
680
+ dependency_list = dependencies.processing_input
576
681
  if self.data_level == "l1a":
577
- if len(dependencies) > 1:
682
+ if len(dependency_list) > 1:
578
683
  raise ValueError(
579
684
  f"Unexpected dependencies found for HIT L1A:"
580
- f"{dependencies}. Expected only one dependency."
685
+ f"{dependency_list}. Expected only one dependency."
581
686
  )
582
687
  # process data to L1A products
583
- datasets = hit_l1a(dependencies[0], self.version)
688
+ science_files = dependencies.get_file_paths(source="hit")
689
+ datasets = hit_l1a(science_files[0])
584
690
 
585
691
  elif self.data_level == "l1b":
692
+ if len(dependency_list) > 1:
693
+ raise ValueError(
694
+ f"Unexpected dependencies found for HIT L1B:"
695
+ f"{dependency_list}. Expected only one dependency."
696
+ )
586
697
  data_dict = {}
587
- for i, dependency in enumerate(dependencies):
588
- if self.dependencies[i]["data_level"] == "l0":
589
- # Add path to CCSDS file to process housekeeping
590
- data_dict["imap_hit_l0_raw"] = dependency
591
- else:
592
- # Add L1A datasets to process science data
593
- dataset = load_cdf(dependency)
594
- data_dict[dataset.attrs["Logical_source"]] = dataset
698
+ # TODO: Check this and update with new features as needed.
699
+ l0_files = dependencies.get_file_paths(source="hit", descriptor="raw")
700
+ l1a_files = dependencies.get_file_paths(source="hit")
701
+ if len(l0_files) > 0:
702
+ # Add path to CCSDS file to process housekeeping
703
+ data_dict["imap_hit_l0_raw"] = l0_files[0]
704
+ else:
705
+ # Add L1A dataset to process science data
706
+ l1a_dataset = load_cdf(l1a_files[0])
707
+ data_dict[l1a_dataset.attrs["Logical_source"]] = l1a_dataset
595
708
  # process data to L1B products
596
- datasets = hit_l1b(data_dict, self.version)
709
+ datasets = hit_l1b(data_dict)
710
+ elif self.data_level == "l2":
711
+ if len(dependency_list) > 1:
712
+ raise ValueError(
713
+ f"Unexpected dependencies found for HIT L2:"
714
+ f"{dependency_list}. Expected only one dependency."
715
+ )
716
+ # Add L1B dataset to process science data
717
+ science_files = dependencies.get_file_paths(source="hit")
718
+ l1b_dataset = load_cdf(science_files[0])
719
+ # process data to L2 products
720
+ datasets = hit_l2(l1b_dataset)
597
721
 
598
722
  return datasets
599
723
 
@@ -601,14 +725,16 @@ class Hit(ProcessInstrument):
601
725
  class Idex(ProcessInstrument):
602
726
  """Process IDEX."""
603
727
 
604
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
728
+ def do_processing(
729
+ self, dependencies: ProcessingInputCollection
730
+ ) -> list[xr.Dataset]:
605
731
  """
606
732
  Perform IDEX specific processing.
607
733
 
608
734
  Parameters
609
735
  ----------
610
- dependencies : list
611
- List of dependencies to process.
736
+ dependencies : ProcessingInputCollection
737
+ Object containing dependencies to process.
612
738
 
613
739
  Returns
614
740
  -------
@@ -618,37 +744,43 @@ class Idex(ProcessInstrument):
618
744
  print(f"Processing IDEX {self.data_level}")
619
745
  datasets: list[xr.Dataset] = []
620
746
 
747
+ dependency_list = dependencies.processing_input
621
748
  if self.data_level == "l1a":
622
- if len(dependencies) > 1:
749
+ if len(dependency_list) > 1:
623
750
  raise ValueError(
624
- f"Unexpected dependencies found for IDEX L1a:"
625
- f"{dependencies}. Expected only one dependency."
751
+ f"Unexpected dependencies found for IDEX L1A:"
752
+ f"{dependency_list}. Expected only one dependency."
626
753
  )
627
- # read CDF file
628
- datasets = [PacketParser(dependencies[0], self.version).data]
754
+ # get l0 file
755
+ science_files = dependencies.get_file_paths(source="idex")
756
+ datasets = PacketParser(science_files[0]).data
629
757
  elif self.data_level == "l1b":
630
- if len(dependencies) > 1:
758
+ if len(dependency_list) > 1:
631
759
  raise ValueError(
632
- f"Unexpected dependencies found for IDEX L1b:"
633
- f"{dependencies}. Expected only one dependency."
760
+ f"Unexpected dependencies found for IDEX L1B:"
761
+ f"{dependency_list}. Expected only one science dependency."
634
762
  )
763
+ # get CDF file
764
+ science_files = dependencies.get_file_paths(source="idex")
635
765
  # process data
636
- dependency = load_cdf(dependencies[0])
637
- datasets = [idex_l1b(dependency, self.version)]
766
+ dependency = load_cdf(science_files[0])
767
+ datasets = [idex_l1b(dependency)]
638
768
  return datasets
639
769
 
640
770
 
641
771
  class Lo(ProcessInstrument):
642
772
  """Process IMAP-Lo."""
643
773
 
644
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
774
+ def do_processing(
775
+ self, dependencies: ProcessingInputCollection
776
+ ) -> list[xr.Dataset]:
645
777
  """
646
778
  Perform IMAP-Lo specific processing.
647
779
 
648
780
  Parameters
649
781
  ----------
650
- dependencies : list
651
- List of dependencies to process.
782
+ dependencies : ProcessingInputCollection
783
+ Object containing dependencies to process.
652
784
 
653
785
  Returns
654
786
  -------
@@ -657,30 +789,39 @@ class Lo(ProcessInstrument):
657
789
  """
658
790
  print(f"Processing IMAP-Lo {self.data_level}")
659
791
  datasets: list[xr.Dataset] = []
792
+ dependency_list = dependencies.processing_input
660
793
  if self.data_level == "l1a":
661
794
  # L1A packet / products are 1 to 1. Should only have
662
795
  # one dependency file
663
- if len(dependencies) > 1:
796
+ if len(dependency_list) > 1:
664
797
  raise ValueError(
665
798
  f"Unexpected dependencies found for IMAP-Lo L1A:"
666
- f"{dependencies}. Expected only one dependency."
799
+ f"{dependency_list}. Expected only one dependency."
667
800
  )
668
- datasets = lo_l1a.lo_l1a(dependencies[0], self.version)
801
+ science_files = dependencies.get_file_paths(source="lo")
802
+ datasets = lo_l1a.lo_l1a(science_files[0])
669
803
 
670
804
  elif self.data_level == "l1b":
671
805
  data_dict = {}
672
- for dependency in dependencies:
673
- dataset = load_cdf(dependency)
806
+ # TODO: Check this and update with new features as needed.
807
+ for input_type in dependencies.processing_input:
808
+ science_files = dependencies.get_file_paths(
809
+ source="lo", descriptor=input_type.descriptor
810
+ )
811
+ dataset = load_cdf(science_files[0])
674
812
  data_dict[dataset.attrs["Logical_source"]] = dataset
675
- datasets = lo_l1b.lo_l1b(data_dict, self.version)
813
+ datasets = lo_l1b.lo_l1b(data_dict)
676
814
 
677
815
  elif self.data_level == "l1c":
678
816
  data_dict = {}
679
- for dependency in dependencies:
680
- dataset = load_cdf(dependency)
817
+ for input_type in dependencies.processing_input:
818
+ science_files = dependencies.get_file_paths(
819
+ source="lo", descriptor=input_type.descriptor
820
+ )
821
+ dataset = load_cdf(science_files[0])
681
822
  data_dict[dataset.attrs["Logical_source"]] = dataset
682
823
  # TODO: This is returning the wrong type
683
- datasets = lo_l1c.lo_l1c(data_dict, self.version)
824
+ datasets = lo_l1c.lo_l1c(data_dict)
684
825
 
685
826
  return datasets
686
827
 
@@ -688,14 +829,16 @@ class Lo(ProcessInstrument):
688
829
  class Mag(ProcessInstrument):
689
830
  """Process MAG."""
690
831
 
691
- def do_processing(self, dependencies: list[Path]) -> list[xr.Dataset]:
832
+ def do_processing(
833
+ self, dependencies: ProcessingInputCollection
834
+ ) -> list[xr.Dataset]:
692
835
  """
693
836
  Perform MAG specific processing.
694
837
 
695
838
  Parameters
696
839
  ----------
697
- dependencies : list[Path]
698
- List of dependencies to process.
840
+ dependencies : ProcessingInputCollection
841
+ Object containing dependencies to process.
699
842
 
700
843
  Returns
701
844
  -------
@@ -705,52 +848,119 @@ class Mag(ProcessInstrument):
705
848
  print(f"Processing MAG {self.data_level}")
706
849
  datasets: list[xr.Dataset] = []
707
850
 
851
+ dependency_list = dependencies.processing_input
852
+ science_files = dependencies.get_file_paths(source="mag")
708
853
  if self.data_level == "l1a":
709
854
  # File path is expected output file path
710
- if len(dependencies) > 1:
855
+ if len(dependency_list) > 1:
711
856
  raise ValueError(
712
857
  f"Unexpected dependencies found for MAG L1A:"
713
- f"{dependencies}. Expected only one dependency."
858
+ f"{dependency_list}. Expected only one dependency."
714
859
  )
715
860
  # TODO: Update this type
716
- datasets = mag_l1a(dependencies[0], data_version=self.version)
861
+
862
+ datasets = mag_l1a(science_files[0])
717
863
 
718
864
  if self.data_level == "l1b":
719
- if len(dependencies) > 1:
865
+ if len(dependency_list) > 1:
720
866
  raise ValueError(
721
867
  f"Unexpected dependencies found for MAG L1B:"
722
- f"{dependencies}. Expected only one dependency."
868
+ f"{dependency_list}. Expected only one dependency."
723
869
  )
724
- input_data = load_cdf(dependencies[0])
725
- datasets = [mag_l1b(input_data, self.version)]
870
+ input_data = load_cdf(science_files[0])
871
+ datasets = [mag_l1b(input_data)]
726
872
 
727
873
  if self.data_level == "l1c":
728
- # L1C depends on matching norm/burst files: eg burst-magi and norm-magi or
729
- # burst-mago and norm-mago
730
- if len(dependencies) != 2:
874
+ input_data = [load_cdf(dep) for dep in science_files]
875
+ # Input datasets can be in any order, and are validated within mag_l1c
876
+ if len(input_data) == 1:
877
+ datasets = [mag_l1c(input_data[0])]
878
+ elif len(input_data) == 2:
879
+ datasets = [mag_l1c(input_data[0], input_data[1])]
880
+ else:
731
881
  raise ValueError(
732
882
  f"Invalid dependencies found for MAG L1C:"
733
- f"{dependencies}. Expected two dependencies."
883
+ f"{dependencies}. Expected one or two dependencies."
734
884
  )
735
885
 
736
- input_data = [load_cdf(dep) for dep in dependencies]
737
- # Input datasets can be in any order
738
- datasets = [mag_l1c(input_data[0], input_data[1], self.version)]
886
+ if self.data_level == "l2":
887
+ # TODO: Overwrite dependencies with versions from offsets file
888
+ # TODO: Ensure that parent_files attribute works with that
889
+ input_data = load_cdf(science_files[0])
890
+ # TODO: use ancillary from input
891
+ calibration_dataset = load_cdf(
892
+ Path(__file__).parent
893
+ / "tests"
894
+ / "mag"
895
+ / "validation"
896
+ / "calibration"
897
+ / "imap_mag_l2-calibration-matrices_20251017_v004.cdf"
898
+ )
899
+
900
+ offset_dataset = load_cdf(
901
+ Path(__file__).parent
902
+ / "tests"
903
+ / "mag"
904
+ / "validation"
905
+ / "calibration"
906
+ / "imap_mag_l2-offsets-norm_20251017_20251017_v001.cdf"
907
+ )
908
+ # TODO: Test data missing
909
+ datasets = mag_l2(calibration_dataset, offset_dataset, input_data)
910
+
911
+ return datasets
912
+
913
+
914
+ class Spacecraft(ProcessInstrument):
915
+ """Process Spacecraft data."""
916
+
917
+ def do_processing(
918
+ self, dependencies: ProcessingInputCollection
919
+ ) -> list[xr.Dataset]:
920
+ """
921
+ Perform Spacecraft specific processing.
922
+
923
+ Parameters
924
+ ----------
925
+ dependencies : ProcessingInputCollection
926
+ Object containing dependencies to process.
927
+
928
+ Returns
929
+ -------
930
+ datasets : xr.Dataset
931
+ Xr.Dataset of products.
932
+ """
933
+ print(f"Processing Spacecraft {self.data_level}")
934
+
935
+ if self.data_level != "l1a":
936
+ raise NotImplementedError(
937
+ f"Spacecraft processing not implemented for level {self.data_level}"
938
+ )
739
939
 
940
+ # File path is expected output file path
941
+ input_files = dependencies.get_file_paths(source="spacecraft")
942
+ if len(input_files) > 1:
943
+ raise ValueError(
944
+ f"Unexpected dependencies found for Spacecraft L1A: "
945
+ f"{input_files}. Expected only one dependency."
946
+ )
947
+ datasets = list(quaternions.process_quaternions(input_files[0]))
740
948
  return datasets
741
949
 
742
950
 
743
951
  class Swapi(ProcessInstrument):
744
952
  """Process SWAPI."""
745
953
 
746
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
954
+ def do_processing(
955
+ self, dependencies: ProcessingInputCollection
956
+ ) -> list[xr.Dataset]:
747
957
  """
748
958
  Perform SWAPI specific processing.
749
959
 
750
960
  Parameters
751
961
  ----------
752
- dependencies : list
753
- List of dependencies to process.
962
+ dependencies : ProcessingInputCollection
963
+ Object containing dependencies to process.
754
964
 
755
965
  Returns
756
966
  -------
@@ -760,30 +970,53 @@ class Swapi(ProcessInstrument):
760
970
  print(f"Processing SWAPI {self.data_level}")
761
971
  datasets: list[xr.Dataset] = []
762
972
 
973
+ dependency_list = dependencies.processing_input
763
974
  if self.data_level == "l1":
764
975
  # For science, we expect l0 raw file and L1 housekeeping file
765
- if self.descriptor == "sci" and len(dependencies) != 2:
976
+ if self.descriptor == "sci" and len(dependency_list) != 2:
766
977
  raise ValueError(
767
978
  f"Unexpected dependencies found for SWAPI L1 science:"
768
- f"{dependencies}. Expected only two dependencies."
979
+ f"{dependency_list}. Expected only two dependencies."
769
980
  )
770
981
  # For housekeeping, we expect only L0 raw file
771
- if self.descriptor == "hk" and len(dependencies) != 1:
982
+ if self.descriptor == "hk" and len(dependency_list) != 1:
772
983
  raise ValueError(
773
984
  f"Unexpected dependencies found for SWAPI L1 housekeeping:"
774
- f"{dependencies}. Expected only one dependency."
985
+ f"{dependency_list}. Expected only one dependency."
775
986
  )
987
+
988
+ dependent_files = []
989
+ l0_files = dependencies.get_file_paths(descriptor="raw")
990
+ # TODO: handle multiples files as needed in the future
991
+ dependent_files.append(l0_files[0])
992
+
993
+ if self.descriptor == "sci":
994
+ # TODO: handle multiples files as needed in the future
995
+ hk_files = dependencies.get_file_paths(descriptor="hk")
996
+ dependent_files.append(hk_files[0])
997
+
776
998
  # process science or housekeeping data
777
- datasets = swapi_l1(dependencies, self.version)
999
+ datasets = swapi_l1(dependent_files)
778
1000
  elif self.data_level == "l2":
779
- if len(dependencies) > 1:
1001
+ if len(dependency_list) != 3:
780
1002
  raise ValueError(
781
1003
  f"Unexpected dependencies found for SWAPI L2:"
782
- f"{dependencies}. Expected only one dependency."
1004
+ f"{dependency_list}. Expected 3 dependencies."
783
1005
  )
784
1006
  # process data
785
- l1_dataset = load_cdf(dependencies[0])
786
- datasets = [swapi_l2(l1_dataset, self.version)]
1007
+ science_files = dependencies.get_file_paths(
1008
+ source="swapi", descriptor="sci"
1009
+ )
1010
+ esa_table_files = dependencies.get_file_paths(
1011
+ source="swapi", descriptor="esa-unit-conversion"
1012
+ )
1013
+ lut_notes_files = dependencies.get_file_paths(
1014
+ source="swapi", descriptor="lut-notes"
1015
+ )
1016
+ esa_table_df = read_swapi_lut_table(esa_table_files[0])
1017
+ lut_notes_df = read_swapi_lut_table(lut_notes_files[0])
1018
+ l1_dataset = load_cdf(science_files[0])
1019
+ datasets = [swapi_l2(l1_dataset, esa_table_df, lut_notes_df)]
787
1020
 
788
1021
  return datasets
789
1022
 
@@ -791,14 +1024,16 @@ class Swapi(ProcessInstrument):
791
1024
  class Swe(ProcessInstrument):
792
1025
  """Process SWE."""
793
1026
 
794
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
1027
+ def do_processing(
1028
+ self, dependencies: ProcessingInputCollection
1029
+ ) -> list[xr.Dataset]:
795
1030
  """
796
1031
  Perform SWE specific processing.
797
1032
 
798
1033
  Parameters
799
1034
  ----------
800
- dependencies : list
801
- List of dependencies to process.
1035
+ dependencies : ProcessingInputCollection
1036
+ Object containing dependencies to process.
802
1037
 
803
1038
  Returns
804
1039
  -------
@@ -808,26 +1043,32 @@ class Swe(ProcessInstrument):
808
1043
  print(f"Processing SWE {self.data_level}")
809
1044
  datasets: list[xr.Dataset] = []
810
1045
 
1046
+ dependency_list = dependencies.processing_input
811
1047
  if self.data_level == "l1a":
812
- if len(dependencies) > 1:
1048
+ if len(dependency_list) > 1:
813
1049
  raise ValueError(
814
1050
  f"Unexpected dependencies found for SWE L1A:"
815
- f"{dependencies}. Expected only one dependency."
1051
+ f"{dependency_list}. Expected only one dependency."
816
1052
  )
817
- datasets = swe_l1a(str(dependencies[0]), data_version=self.version)
1053
+ science_files = dependencies.get_file_paths(source="swe")
1054
+ datasets = swe_l1a(str(science_files[0]))
818
1055
  # Right now, we only process science data. Therefore,
819
1056
  # we expect only one dataset to be returned.
820
1057
 
821
1058
  elif self.data_level == "l1b":
822
- if len(dependencies) > 1:
1059
+ if len(dependency_list) != 4:
823
1060
  raise ValueError(
824
1061
  f"Unexpected dependencies found for SWE L1B:"
825
- f"{dependencies}. Expected only one dependency."
1062
+ f"{dependency_list}. Expected exactly four dependencies."
826
1063
  )
827
- # read CDF file
828
- l1a_dataset = load_cdf(dependencies[0])
829
- # TODO: read lookup table and in-flight calibration data here.
830
- datasets = swe_l1b(l1a_dataset, data_version=self.version)
1064
+
1065
+ science_files = dependencies.get_file_paths("swe", "sci")
1066
+ if len(science_files) > 1:
1067
+ raise ValueError(
1068
+ "Multiple science files processing is not supported for SWE L1B."
1069
+ )
1070
+
1071
+ datasets = swe_l1b(dependencies)
831
1072
  else:
832
1073
  print("Did not recognize data level. No processing done.")
833
1074
 
@@ -837,14 +1078,16 @@ class Swe(ProcessInstrument):
837
1078
  class Ultra(ProcessInstrument):
838
1079
  """Process IMAP-Ultra."""
839
1080
 
840
- def do_processing(self, dependencies: list) -> list[xr.Dataset]:
1081
+ def do_processing(
1082
+ self, dependencies: ProcessingInputCollection
1083
+ ) -> list[xr.Dataset]:
841
1084
  """
842
1085
  Perform IMAP-Ultra specific processing.
843
1086
 
844
1087
  Parameters
845
1088
  ----------
846
- dependencies : list
847
- List of dependencies to process.
1089
+ dependencies : ProcessingInputCollection
1090
+ Object containing dependencies to process.
848
1091
 
849
1092
  Returns
850
1093
  -------
@@ -854,29 +1097,30 @@ class Ultra(ProcessInstrument):
854
1097
  print(f"Processing IMAP-Ultra {self.data_level}")
855
1098
  datasets: list[xr.Dataset] = []
856
1099
 
1100
+ dependency_list = dependencies.processing_input
857
1101
  if self.data_level == "l1a":
858
1102
  # File path is expected output file path
859
- if len(dependencies) > 1:
1103
+ if len(dependency_list) > 1:
860
1104
  raise ValueError(
861
1105
  f"Unexpected dependencies found for ULTRA L1A:"
862
- f"{dependencies}. Expected only one dependency."
1106
+ f"{dependency_list}. Expected only one dependency."
863
1107
  )
864
-
865
- datasets = ultra_l1a.ultra_l1a(dependencies[0], self.version)
1108
+ science_files = dependencies.get_file_paths(source="ultra")
1109
+ datasets = ultra_l1a.ultra_l1a(science_files[0])
866
1110
 
867
1111
  elif self.data_level == "l1b":
868
1112
  data_dict = {}
869
- for dependency in dependencies:
870
- dataset = load_cdf(dependency)
1113
+ for dep in dependency_list:
1114
+ dataset = load_cdf(dep.imap_file_paths[0])
871
1115
  data_dict[dataset.attrs["Logical_source"]] = dataset
872
- datasets = ultra_l1b.ultra_l1b(data_dict, self.version)
1116
+ datasets = ultra_l1b.ultra_l1b(data_dict)
873
1117
 
874
1118
  elif self.data_level == "l1c":
875
1119
  data_dict = {}
876
- for dependency in dependencies:
877
- dataset = load_cdf(dependency)
1120
+ for dep in dependency_list:
1121
+ dataset = load_cdf(dep.imap_file_paths[0])
878
1122
  data_dict[dataset.attrs["Logical_source"]] = dataset
879
- datasets = ultra_l1c.ultra_l1c(data_dict, self.version)
1123
+ datasets = ultra_l1c.ultra_l1c(data_dict)
880
1124
 
881
1125
  return datasets
882
1126
 
@@ -899,7 +1143,7 @@ def main() -> None:
899
1143
  args.descriptor,
900
1144
  args.dependency,
901
1145
  args.start_date,
902
- args.end_date,
1146
+ args.repointing,
903
1147
  args.version,
904
1148
  args.upload_to_sdc,
905
1149
  )