imap-processing 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (172) hide show
  1. imap_processing/__init__.py +1 -1
  2. imap_processing/_version.py +2 -2
  3. imap_processing/ccsds/excel_to_xtce.py +36 -2
  4. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +1 -1
  5. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +145 -30
  6. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +36 -36
  7. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +136 -9
  8. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +14 -0
  9. imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +63 -1
  10. imap_processing/cdf/config/imap_hit_l1b_variable_attrs.yaml +9 -0
  11. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +14 -7
  12. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +577 -235
  13. imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +326 -0
  14. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +33 -23
  15. imap_processing/cdf/config/imap_mag_l1_variable_attrs.yaml +24 -28
  16. imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +1 -0
  17. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +137 -79
  18. imap_processing/cdf/config/imap_variable_schema.yaml +13 -0
  19. imap_processing/cdf/imap_cdf_manager.py +31 -27
  20. imap_processing/cdf/utils.py +3 -5
  21. imap_processing/cli.py +25 -14
  22. imap_processing/codice/codice_l1a.py +153 -63
  23. imap_processing/codice/constants.py +10 -10
  24. imap_processing/codice/decompress.py +10 -11
  25. imap_processing/codice/utils.py +1 -0
  26. imap_processing/glows/l1a/glows_l1a.py +1 -2
  27. imap_processing/glows/l1b/glows_l1b.py +3 -3
  28. imap_processing/glows/l1b/glows_l1b_data.py +59 -37
  29. imap_processing/glows/l2/glows_l2_data.py +123 -0
  30. imap_processing/hi/l1a/hi_l1a.py +4 -4
  31. imap_processing/hi/l1a/histogram.py +107 -109
  32. imap_processing/hi/l1a/science_direct_event.py +92 -225
  33. imap_processing/hi/l1b/hi_l1b.py +85 -11
  34. imap_processing/hi/l1c/hi_l1c.py +23 -1
  35. imap_processing/hi/packet_definitions/TLM_HI_COMBINED_SCI.xml +3994 -0
  36. imap_processing/hi/utils.py +1 -1
  37. imap_processing/hit/hit_utils.py +221 -0
  38. imap_processing/hit/l0/constants.py +118 -0
  39. imap_processing/hit/l0/decom_hit.py +100 -156
  40. imap_processing/hit/l1a/hit_l1a.py +170 -184
  41. imap_processing/hit/l1b/hit_l1b.py +33 -153
  42. imap_processing/ialirt/l0/process_codicelo.py +153 -0
  43. imap_processing/ialirt/l0/process_hit.py +5 -5
  44. imap_processing/ialirt/packet_definitions/ialirt_codicelo.xml +281 -0
  45. imap_processing/ialirt/process_ephemeris.py +212 -0
  46. imap_processing/idex/idex_l1a.py +65 -84
  47. imap_processing/idex/idex_l1b.py +192 -0
  48. imap_processing/idex/idex_variable_unpacking_and_eu_conversion.csv +33 -0
  49. imap_processing/idex/packet_definitions/idex_packet_definition.xml +97 -595
  50. imap_processing/lo/l0/decompression_tables/decompression_tables.py +17 -1
  51. imap_processing/lo/l0/lo_science.py +45 -13
  52. imap_processing/lo/l1a/lo_l1a.py +76 -8
  53. imap_processing/lo/packet_definitions/lo_xtce.xml +8344 -1849
  54. imap_processing/mag/l0/decom_mag.py +4 -3
  55. imap_processing/mag/l1a/mag_l1a.py +12 -13
  56. imap_processing/mag/l1a/mag_l1a_data.py +1 -2
  57. imap_processing/mag/l1b/mag_l1b.py +90 -7
  58. imap_processing/spice/geometry.py +156 -16
  59. imap_processing/spice/time.py +144 -2
  60. imap_processing/swapi/l1/swapi_l1.py +4 -4
  61. imap_processing/swapi/l2/swapi_l2.py +1 -1
  62. imap_processing/swapi/packet_definitions/swapi_packet_definition.xml +1535 -446
  63. imap_processing/swe/l1b/swe_l1b_science.py +8 -8
  64. imap_processing/swe/l2/swe_l2.py +134 -17
  65. imap_processing/tests/ccsds/test_data/expected_output.xml +2 -1
  66. imap_processing/tests/ccsds/test_excel_to_xtce.py +4 -4
  67. imap_processing/tests/cdf/test_imap_cdf_manager.py +0 -10
  68. imap_processing/tests/codice/conftest.py +1 -17
  69. imap_processing/tests/codice/data/imap_codice_l0_raw_20241110_v001.pkts +0 -0
  70. imap_processing/tests/codice/test_codice_l0.py +8 -2
  71. imap_processing/tests/codice/test_codice_l1a.py +127 -107
  72. imap_processing/tests/codice/test_codice_l1b.py +1 -0
  73. imap_processing/tests/codice/test_decompress.py +7 -7
  74. imap_processing/tests/conftest.py +100 -58
  75. imap_processing/tests/glows/conftest.py +6 -0
  76. imap_processing/tests/glows/test_glows_l1b.py +9 -9
  77. imap_processing/tests/glows/test_glows_l1b_data.py +9 -9
  78. imap_processing/tests/hi/test_data/l0/H90_NHK_20241104.bin +0 -0
  79. imap_processing/tests/hi/test_data/l0/H90_sci_cnt_20241104.bin +0 -0
  80. imap_processing/tests/hi/test_data/l0/H90_sci_de_20241104.bin +0 -0
  81. imap_processing/tests/hi/test_data/l1a/imap_hi_l1a_45sensor-de_20250415_v000.cdf +0 -0
  82. imap_processing/tests/hi/test_hi_l1b.py +73 -3
  83. imap_processing/tests/hi/test_hi_l1c.py +10 -2
  84. imap_processing/tests/hi/test_l1a.py +31 -58
  85. imap_processing/tests/hi/test_science_direct_event.py +58 -0
  86. imap_processing/tests/hi/test_utils.py +4 -3
  87. imap_processing/tests/hit/test_data/sci_sample1.ccsds +0 -0
  88. imap_processing/tests/hit/{test_hit_decom.py → test_decom_hit.py} +95 -36
  89. imap_processing/tests/hit/test_hit_l1a.py +299 -179
  90. imap_processing/tests/hit/test_hit_l1b.py +231 -24
  91. imap_processing/tests/hit/test_hit_utils.py +218 -0
  92. imap_processing/tests/hit/validation_data/hskp_sample_eu.csv +89 -0
  93. imap_processing/tests/hit/validation_data/sci_sample_raw1.csv +29 -0
  94. imap_processing/tests/ialirt/test_data/l0/apid01152.tlm +0 -0
  95. imap_processing/tests/ialirt/test_data/l0/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
  96. imap_processing/tests/ialirt/unit/test_process_codicelo.py +106 -0
  97. imap_processing/tests/ialirt/unit/test_process_ephemeris.py +109 -0
  98. imap_processing/tests/ialirt/unit/test_process_hit.py +9 -6
  99. imap_processing/tests/idex/conftest.py +2 -2
  100. imap_processing/tests/idex/imap_idex_l0_raw_20231214_v001.pkts +0 -0
  101. imap_processing/tests/idex/impact_14_tof_high_data.txt +4444 -4444
  102. imap_processing/tests/idex/test_idex_l0.py +4 -4
  103. imap_processing/tests/idex/test_idex_l1a.py +8 -2
  104. imap_processing/tests/idex/test_idex_l1b.py +126 -0
  105. imap_processing/tests/lo/test_lo_l1a.py +7 -16
  106. imap_processing/tests/lo/test_lo_science.py +69 -5
  107. imap_processing/tests/lo/test_pkts/imap_lo_l0_raw_20240803_v002.pkts +0 -0
  108. imap_processing/tests/lo/validation_data/Instrument_FM1_T104_R129_20240803_ILO_SCI_DE_dec_DN_with_fills.csv +1999 -0
  109. imap_processing/tests/mag/imap_mag_l1a_norm-magi_20251017_v001.cdf +0 -0
  110. imap_processing/tests/mag/test_mag_l1b.py +97 -7
  111. imap_processing/tests/spice/test_data/imap_ena_sim_metakernel.template +3 -1
  112. imap_processing/tests/spice/test_geometry.py +115 -9
  113. imap_processing/tests/spice/test_time.py +135 -6
  114. imap_processing/tests/swapi/test_swapi_decom.py +75 -69
  115. imap_processing/tests/swapi/test_swapi_l1.py +4 -4
  116. imap_processing/tests/swe/conftest.py +33 -0
  117. imap_processing/tests/swe/l1_validation/swe_l0_unpacked-data_20240510_v001_VALIDATION_L1B_v3.dat +4332 -0
  118. imap_processing/tests/swe/test_swe_l1b.py +29 -8
  119. imap_processing/tests/swe/test_swe_l2.py +64 -8
  120. imap_processing/tests/test_utils.py +2 -2
  121. imap_processing/tests/ultra/test_data/l0/ultra45_raw_sc_ultrarawimg_withFSWcalcs_FM45_40P_Phi28p5_BeamCal_LinearScan_phi2850_theta-000_20240207T102740.csv +3314 -3314
  122. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E12.cdf +0 -0
  123. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E24.cdf +0 -0
  124. imap_processing/tests/ultra/unit/test_de.py +113 -0
  125. imap_processing/tests/ultra/unit/test_spatial_utils.py +125 -0
  126. imap_processing/tests/ultra/unit/test_ultra_l1b.py +27 -3
  127. imap_processing/tests/ultra/unit/test_ultra_l1b_annotated.py +31 -10
  128. imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +55 -35
  129. imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +10 -68
  130. imap_processing/ultra/constants.py +12 -3
  131. imap_processing/ultra/l1b/de.py +168 -30
  132. imap_processing/ultra/l1b/ultra_l1b_annotated.py +24 -10
  133. imap_processing/ultra/l1b/ultra_l1b_extended.py +46 -80
  134. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +60 -144
  135. imap_processing/ultra/utils/spatial_utils.py +221 -0
  136. {imap_processing-0.7.0.dist-info → imap_processing-0.9.0.dist-info}/METADATA +15 -14
  137. {imap_processing-0.7.0.dist-info → imap_processing-0.9.0.dist-info}/RECORD +142 -139
  138. imap_processing/cdf/cdf_attribute_manager.py +0 -322
  139. imap_processing/cdf/config/shared/default_global_cdf_attrs_schema.yaml +0 -246
  140. imap_processing/cdf/config/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  141. imap_processing/hi/l0/decom_hi.py +0 -24
  142. imap_processing/hi/packet_definitions/hi_packet_definition.xml +0 -482
  143. imap_processing/hit/l0/data_classes/housekeeping.py +0 -240
  144. imap_processing/hit/l0/data_classes/science_packet.py +0 -259
  145. imap_processing/hit/l0/utils/hit_base.py +0 -57
  146. imap_processing/tests/cdf/shared/default_global_cdf_attrs_schema.yaml +0 -246
  147. imap_processing/tests/cdf/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  148. imap_processing/tests/cdf/test_cdf_attribute_manager.py +0 -353
  149. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-aggregated_20240429_v001.pkts +0 -0
  150. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-singles_20240429_v001.pkts +0 -0
  151. imap_processing/tests/codice/data/imap_codice_l0_hi-omni_20240429_v001.pkts +0 -0
  152. imap_processing/tests/codice/data/imap_codice_l0_hi-pha_20240429_v001.pkts +0 -0
  153. imap_processing/tests/codice/data/imap_codice_l0_hi-sectored_20240429_v001.pkts +0 -0
  154. imap_processing/tests/codice/data/imap_codice_l0_hskp_20100101_v001.pkts +0 -0
  155. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-aggregated_20240429_v001.pkts +0 -0
  156. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-singles_20240429_v001.pkts +0 -0
  157. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-angular_20240429_v001.pkts +0 -0
  158. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-priority_20240429_v001.pkts +0 -0
  159. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-species_20240429_v001.pkts +0 -0
  160. imap_processing/tests/codice/data/imap_codice_l0_lo-pha_20240429_v001.pkts +0 -0
  161. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-angular_20240429_v001.pkts +0 -0
  162. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-priority_20240429_v001.pkts +0 -0
  163. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-species_20240429_v001.pkts +0 -0
  164. imap_processing/tests/hi/test_decom.py +0 -55
  165. imap_processing/tests/hi/test_l1a_sci_de.py +0 -72
  166. imap_processing/tests/idex/imap_idex_l0_raw_20230725_v001.pkts +0 -0
  167. imap_processing/tests/mag/imap_mag_l1a_burst-magi_20231025_v001.cdf +0 -0
  168. /imap_processing/{hi/l0/__init__.py → tests/glows/test_glows_l2_data.py} +0 -0
  169. /imap_processing/tests/hit/test_data/{imap_hit_l0_hk_20100105_v001.pkts → imap_hit_l0_raw_20100105_v001.pkts} +0 -0
  170. {imap_processing-0.7.0.dist-info → imap_processing-0.9.0.dist-info}/LICENSE +0 -0
  171. {imap_processing-0.7.0.dist-info → imap_processing-0.9.0.dist-info}/WHEEL +0 -0
  172. {imap_processing-0.7.0.dist-info → imap_processing-0.9.0.dist-info}/entry_points.txt +0 -0
@@ -129,7 +129,9 @@ def generate_dataset(
129
129
  direction.astype(str),
130
130
  name="direction_label",
131
131
  dims=["direction_label"],
132
- attrs=attribute_manager.get_variable_attributes("direction_label"),
132
+ attrs=attribute_manager.get_variable_attributes(
133
+ "direction_label", check_schema=False
134
+ ),
133
135
  )
134
136
 
135
137
  # TODO: Epoch here refers to the start of the sample. Confirm that this is
@@ -154,11 +156,10 @@ def generate_dataset(
154
156
  coords={
155
157
  "epoch": epoch_time,
156
158
  "direction": direction,
157
- "direction_label": direction_label,
158
159
  },
159
160
  attrs=attribute_manager.get_global_attributes(logical_id),
160
161
  )
161
-
162
+ output["direction_label"] = direction_label
162
163
  output["raw_vectors"] = raw_vectors
163
164
 
164
165
  for key, value in support_data.items():
@@ -8,7 +8,6 @@ import numpy as np
8
8
  import xarray as xr
9
9
 
10
10
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
11
- from imap_processing.cdf.utils import J2000_EPOCH
12
11
  from imap_processing.mag.constants import DataMode, PrimarySensor
13
12
  from imap_processing.mag.l0 import decom_mag
14
13
  from imap_processing.mag.l0.mag_l0_data import MagL0
@@ -17,7 +16,7 @@ from imap_processing.mag.l1a.mag_l1a_data import (
17
16
  MagL1aPacketProperties,
18
17
  TimeTuple,
19
18
  )
20
- from imap_processing.spice.time import met_to_j2000ns
19
+ from imap_processing.spice.time import J2000_EPOCH, met_to_j2000ns
21
20
 
22
21
  logger = logging.getLogger(__name__)
23
22
 
@@ -158,7 +157,6 @@ def process_packets(
158
157
  "timedelta64[ns]"
159
158
  )
160
159
  ).astype("datetime64[D]")
161
-
162
160
  primary_packet_properties = MagL1aPacketProperties(
163
161
  mag_l0.SHCOARSE,
164
162
  primary_start_time,
@@ -284,9 +282,7 @@ def generate_dataset(
284
282
 
285
283
  # TODO: Just leave time in datetime64 type with vector as dtype object to avoid this
286
284
  # Get the timestamp from the end of the vector
287
- time_data = single_file_l1a.vectors[:, 4].astype(
288
- np.dtype("datetime64[ns]"), copy=False
289
- )
285
+ time_data = single_file_l1a.vectors[:, 4]
290
286
 
291
287
  compression = xr.DataArray(
292
288
  np.arange(2),
@@ -326,17 +322,21 @@ def generate_dataset(
326
322
  )
327
323
 
328
324
  direction_label = xr.DataArray(
329
- direction.astype(str),
325
+ direction.values.astype(str),
330
326
  name="direction_label",
331
327
  dims=["direction_label"],
332
- attrs=attribute_manager.get_variable_attributes("direction_label"),
328
+ attrs=attribute_manager.get_variable_attributes(
329
+ "direction_label", check_schema=False
330
+ ),
333
331
  )
334
332
 
335
333
  compression_label = xr.DataArray(
336
- compression.astype(str),
334
+ compression.values.astype(str),
337
335
  name="compression_label",
338
336
  dims=["compression_label"],
339
- attrs=attribute_manager.get_variable_attributes("compression_label"),
337
+ attrs=attribute_manager.get_variable_attributes(
338
+ "compression_label", check_schema=False
339
+ ),
340
340
  )
341
341
 
342
342
  output = xr.Dataset(
@@ -344,12 +344,11 @@ def generate_dataset(
344
344
  "epoch": epoch_time,
345
345
  "direction": direction,
346
346
  "compression": compression,
347
- "direction_label": direction_label,
348
- "compression_label": compression_label,
349
347
  },
350
348
  attrs=attribute_manager.get_global_attributes(logical_file_id),
351
349
  )
352
-
350
+ output["direction_label"] = direction_label
351
+ output["compression_label"] = compression_label
353
352
  output["vectors"] = vectors
354
353
  output["compression_flags"] = compression_flags
355
354
 
@@ -9,7 +9,6 @@ from math import floor
9
9
  import numpy as np
10
10
  import numpy.typing as npt
11
11
 
12
- from imap_processing.cdf.utils import J2000_EPOCH
13
12
  from imap_processing.mag.constants import (
14
13
  AXIS_COUNT,
15
14
  FIBONACCI_SEQUENCE,
@@ -17,7 +16,7 @@ from imap_processing.mag.constants import (
17
16
  MAX_FINE_TIME,
18
17
  RANGE_BIT_WIDTH,
19
18
  )
20
- from imap_processing.spice.time import met_to_j2000ns
19
+ from imap_processing.spice.time import J2000_EPOCH, met_to_j2000ns
21
20
 
22
21
 
23
22
  @dataclass
@@ -64,9 +64,11 @@ def mag_l1b_processing(input_dataset: xr.Dataset) -> xr.Dataset:
64
64
  """
65
65
  # TODO: There is a time alignment step that will add a lot of complexity.
66
66
  # This needs to be done once we have some SPICE time data.
67
+ mag_attributes = ImapCdfAttributes()
68
+ mag_attributes.add_instrument_variable_attrs("mag", "l1")
67
69
 
68
- dims = [["direction"]]
69
- new_dims = [["direction"]]
70
+ dims = [["direction"], ["compression"]]
71
+ new_dims = [["direction"], ["compression"]]
70
72
  # TODO: This should definitely be loaded from AWS
71
73
  calibration_dataset = load_cdf(
72
74
  Path(__file__).parent / "imap_calibration_mag_20240229_v01.cdf"
@@ -78,8 +80,9 @@ def mag_l1b_processing(input_dataset: xr.Dataset) -> xr.Dataset:
78
80
  calibration_matrix = calibration_dataset["MFITOURFI"]
79
81
 
80
82
  l1b_fields = xr.apply_ufunc(
81
- calibrate,
83
+ update_vector,
82
84
  input_dataset["vectors"],
85
+ input_dataset["compression_flags"],
83
86
  input_core_dims=dims,
84
87
  output_core_dims=new_dims,
85
88
  vectorize=True,
@@ -88,13 +91,93 @@ def mag_l1b_processing(input_dataset: xr.Dataset) -> xr.Dataset:
88
91
  )
89
92
 
90
93
  output_dataset = input_dataset.copy()
91
- output_dataset["vectors"] = l1b_fields
94
+ output_dataset["vectors"].data = l1b_fields[0].data
95
+
96
+ output_dataset["epoch"].attrs = mag_attributes.get_variable_attributes("epoch")
97
+ output_dataset["direction"].attrs = mag_attributes.get_variable_attributes(
98
+ "direction_attrs"
99
+ )
100
+ output_dataset["compression"].attrs = mag_attributes.get_variable_attributes(
101
+ "compression_attrs"
102
+ )
103
+ output_dataset["direction_label"].attrs = mag_attributes.get_variable_attributes(
104
+ "direction_label", check_schema=False
105
+ )
106
+ output_dataset["compression_label"].attrs = mag_attributes.get_variable_attributes(
107
+ "compression_label", check_schema=False
108
+ )
92
109
 
93
- # TODO add/update attributes
94
110
  return output_dataset
95
111
 
96
112
 
97
- def calibrate(
113
+ def update_vector(
114
+ input_vector: np.ndarray,
115
+ input_compression: np.ndarray,
116
+ calibration_matrix: xr.DataArray,
117
+ ) -> tuple[np.ndarray, np.ndarray]:
118
+ """
119
+ Apply calibration and compression scaling to vector.
120
+
121
+ This calls, in sequence, calibrate_vector and rescale_vector to apply L1B processing
122
+ to the input vector.
123
+
124
+ Parameters
125
+ ----------
126
+ input_vector : numpy.ndarray
127
+ One input vector to update, looking like (x, y, z, range).
128
+ input_compression : numpy.ndarray
129
+ Compression flags corresponding to the vector, looking like (is_compressed,
130
+ compression_width).
131
+ calibration_matrix : xr.DataArray
132
+ DataArray containing the full set of calibration matrices, for each range.
133
+ Size is ((3, 3, 4)).
134
+
135
+ Returns
136
+ -------
137
+ tuple[numpy.ndarray, numpy.ndarray]
138
+ Updated vector and the same compression flags.
139
+ """
140
+ vector = calibrate_vector(input_vector, calibration_matrix)
141
+ return rescale_vector(vector, input_compression), input_compression
142
+
143
+
144
+ def rescale_vector(
145
+ input_vector: np.ndarray, compression_flags: np.ndarray
146
+ ) -> np.ndarray:
147
+ """
148
+ Rescale vector based on compression flags.
149
+
150
+ If the first value of compression_flags is zero, this just returns the input_vector
151
+ unchanged. Otherwise, the vector is scaled using the compression width, which is
152
+ the second part of compression_flags.
153
+
154
+ The vector is scaled using the following equation:
155
+ M = 2 ^ (16-width)
156
+ output_vector = input_vector * M
157
+
158
+ Therefore, for a 16 bit width, the same vector is returned.
159
+
160
+ Parameters
161
+ ----------
162
+ input_vector : numpy.ndarray
163
+ One input vector to update, looking like (x, y, z, range).
164
+ compression_flags : numpy.ndarray
165
+ Compression flags corresponding to the vector, looking like (is_compressed,
166
+ compression_width).
167
+
168
+ Returns
169
+ -------
170
+ output_vector : numpy.ndarray
171
+ Updated vector.
172
+ """
173
+ if not compression_flags[0]:
174
+ return input_vector
175
+ else:
176
+ factor = np.float_power(2, (16 - compression_flags[1]))
177
+ return input_vector * factor # type: ignore
178
+
179
+
180
+ def calibrate_vector(
98
181
  input_vector: np.ndarray, calibration_matrix: xr.DataArray = None
99
182
  ) -> np.ndarray:
100
183
  """
@@ -119,7 +202,7 @@ def calibrate(
119
202
  updated_vector = input_vector.copy()
120
203
 
121
204
  updated_vector[:3] = np.matmul(
122
- input_vector[:3], calibration_matrix.values[:, :, int(input_vector[3])]
205
+ calibration_matrix.values[:, :, int(input_vector[3])], input_vector[:3]
123
206
  )
124
207
 
125
208
  return updated_vector
@@ -19,6 +19,7 @@ import numpy as np
19
19
  import numpy.typing as npt
20
20
  import pandas as pd
21
21
  import spiceypy as spice
22
+ from numpy.typing import NDArray
22
23
 
23
24
  from imap_processing.spice.kernels import ensure_spice
24
25
 
@@ -63,8 +64,6 @@ class SpiceFrame(IntEnum):
63
64
  IMAP_GLOWS = -43750
64
65
 
65
66
 
66
- # TODO: Update boresight for in-situ instruments
67
- # TODO: Confirm ENA boresight vectors
68
67
  BORESIGHT_LOOKUP = {
69
68
  SpiceFrame.IMAP_LO: np.array([0, -1, 0]),
70
69
  SpiceFrame.IMAP_HI_45: np.array([0, 1, 0]),
@@ -72,12 +71,12 @@ BORESIGHT_LOOKUP = {
72
71
  SpiceFrame.IMAP_ULTRA_45: np.array([0, 0, 1]),
73
72
  SpiceFrame.IMAP_ULTRA_90: np.array([0, 0, 1]),
74
73
  SpiceFrame.IMAP_MAG: np.array([0, 0, 1]),
75
- SpiceFrame.IMAP_SWE: np.array([0, 0, 1]),
76
- SpiceFrame.IMAP_SWAPI: np.array([0, 0, 1]),
74
+ SpiceFrame.IMAP_SWE: np.array([-1, 0, 0]),
75
+ SpiceFrame.IMAP_SWAPI: np.array([0, 1, 0]),
77
76
  SpiceFrame.IMAP_CODICE: np.array([0, 0, 1]),
78
- SpiceFrame.IMAP_HIT: np.array([0, 0, 1]),
79
- SpiceFrame.IMAP_IDEX: np.array([0, 0, 1]),
80
- SpiceFrame.IMAP_GLOWS: np.array([0, 0, 1]),
77
+ SpiceFrame.IMAP_HIT: np.array([0, 1, 0]),
78
+ SpiceFrame.IMAP_IDEX: np.array([0, 1, 0]),
79
+ SpiceFrame.IMAP_GLOWS: np.array([0, 0, -1]),
81
80
  }
82
81
 
83
82
 
@@ -303,8 +302,6 @@ def get_spacecraft_to_instrument_spin_phase_offset(instrument: SpiceFrame) -> fl
303
302
  return offset_lookup[instrument]
304
303
 
305
304
 
306
- @typing.no_type_check
307
- @ensure_spice
308
305
  def frame_transform(
309
306
  et: Union[float, npt.NDArray],
310
307
  position: npt.NDArray,
@@ -327,8 +324,11 @@ def frame_transform(
327
324
  Ephemeris time(s) corresponding to position(s).
328
325
  position : np.ndarray
329
326
  <x, y, z> vector or array of vectors in reference frame `from_frame`.
330
- A single position vector may be provided for multiple `et` query times
331
- but only a single position vector can be provided for a single `et`.
327
+ There are several possible shapes for the input position and et:
328
+ 1. A single position vector may be provided for multiple `et` query times
329
+ 2. A single `et` may be provided for multiple position vectors,
330
+ 3. The same number of `et` and position vectors may be provided.
331
+ But it is not allowed to have n position vectors and m `et`, where n != m.
332
332
  from_frame : SpiceFrame
333
333
  Reference frame of input vector(s).
334
334
  to_frame : SpiceFrame
@@ -350,11 +350,14 @@ def frame_transform(
350
350
  f"Invalid position shape: {position.shape}. "
351
351
  f"Each input position vector must have 3 elements."
352
352
  )
353
- if not len(position) == len(et):
354
- raise ValueError(
355
- "Mismatch in number of position vectors and Ephemeris times provided."
356
- f"Position has {len(position)} elements and et has {len(et)} elements."
357
- )
353
+ if not len(position) == np.asarray(et).size:
354
+ if np.asarray(et).size != 1:
355
+ raise ValueError(
356
+ "Mismatch in number of position vectors and "
357
+ "Ephemeris times provided."
358
+ f"Position has {len(position)} elements and et has "
359
+ f"{np.asarray(et).size} elements."
360
+ )
358
361
 
359
362
  # rotate will have shape = (3, 3) or (n, 3, 3)
360
363
  # position will have shape = (3,) or (n, 3)
@@ -369,6 +372,8 @@ def frame_transform(
369
372
  return result
370
373
 
371
374
 
375
+ @typing.no_type_check
376
+ @ensure_spice
372
377
  def get_rotation_matrix(
373
378
  et: Union[float, npt.NDArray],
374
379
  from_frame: SpiceFrame,
@@ -443,3 +448,138 @@ def instrument_pointing(
443
448
  if isinstance(et, typing.Collection):
444
449
  return np.rad2deg([spice.reclat(vec)[1:] for vec in pointing])
445
450
  return np.rad2deg(spice.reclat(pointing)[1:])
451
+
452
+
453
+ def basis_vectors(
454
+ et: Union[float, npt.NDArray],
455
+ from_frame: SpiceFrame,
456
+ to_frame: SpiceFrame,
457
+ ) -> npt.NDArray:
458
+ """
459
+ Get the basis vectors of the `from_frame` expressed in the `to_frame`.
460
+
461
+ The rotation matrix defining a frame transform are the basis vectors of
462
+ the `from_frame` expressed in the `to_frame`. This function just transposes
463
+ each rotation matrix retrieved from the `get_rotation_matrix` function so
464
+ that basis vectors can be indexed 0 for x, 1 for y, and 2 for z.
465
+
466
+ Parameters
467
+ ----------
468
+ et : float or np.ndarray
469
+ Ephemeris time(s) for which to get the rotation matrices.
470
+ from_frame : SpiceFrame
471
+ Reference frame to transform from.
472
+ to_frame : SpiceFrame
473
+ Reference frame to transform to.
474
+
475
+ Returns
476
+ -------
477
+ basis_vectors : np.ndarray
478
+ If `et` is a float, the returned basis vector matrix is of shape `(3, 3)`. If
479
+ `et` is a np.ndarray, the returned basis vector matrix is of shape `(n, 3, 3)`
480
+ where `n` matches the number of elements in et and basis vectors are the rows
481
+ of the 3 by 3 matrices.
482
+
483
+ Examples
484
+ --------
485
+ >>> from imap_processing.spice.geometry import basis_vectors
486
+ ... from imap_processing.spice.time import j2000ns_to_j2000s
487
+ ... et = j2000ns_to_j2000s(dataset.epoch.values)
488
+ ... basis_vectors = basis_vectors(
489
+ ... et, SpiceFrame.IMAP_SPACECRAFT, SpiceFrame.ECLIPJ2000
490
+ ... )
491
+ ... spacecraft_x = basis_vectors[:, 0]
492
+ ... spacecraft_y = basis_vectors[:, 1]
493
+ ... spacecraft_z = basis_vectors[:, 2]
494
+ """
495
+ return np.moveaxis(get_rotation_matrix(et, from_frame, to_frame), -1, -2)
496
+
497
+
498
+ def cartesian_to_spherical(
499
+ v: NDArray,
500
+ degrees: bool = True,
501
+ ) -> NDArray:
502
+ """
503
+ Convert cartesian coordinates to spherical coordinates.
504
+
505
+ Parameters
506
+ ----------
507
+ v : np.ndarray
508
+ A NumPy array with shape (n, 3) where each
509
+ row represents a vector
510
+ with x, y, z-components.
511
+ degrees : bool
512
+ If True, the azimuth and elevation angles are returned in degrees.
513
+ Defaults to True.
514
+
515
+ Returns
516
+ -------
517
+ spherical_coords : np.ndarray
518
+ A NumPy array with shape (n, 3), where each row contains
519
+ the spherical coordinates (r, azimuth, elevation):
520
+
521
+ - r : Distance of the point from the origin.
522
+ - azimuth : angle in the xy-plane
523
+ In degrees if degrees parameter is True (by default):
524
+ output range=[0, 360],
525
+ otherwise in radians if degrees parameter is False:
526
+ output range=[0, 2*pi].
527
+ - elevation : angle from the z-axis
528
+ In degrees if degrees parameter is True (by default):
529
+ output range=[0, 180],
530
+ otherwise in radians if degrees parameter is False:
531
+ output range=[-pi/2, pi/2].
532
+ """
533
+ # Magnitude of the velocity vector
534
+ magnitude_v = np.linalg.norm(v, axis=-1, keepdims=True)
535
+
536
+ vhat = v / magnitude_v
537
+
538
+ # Elevation angle (angle from the z-axis, range: [-pi/2, pi/2])
539
+ el = np.arcsin(vhat[..., 2])
540
+
541
+ # Azimuth angle (angle in the xy-plane, range: [0, 2*pi])
542
+ az = np.arctan2(vhat[..., 1], vhat[..., 0])
543
+
544
+ # Ensure azimuth is from 0 to 2PI
545
+ az = az % (2 * np.pi)
546
+
547
+ if degrees:
548
+ az = np.degrees(az)
549
+ el = np.degrees(el)
550
+
551
+ spherical_coords = np.stack((np.squeeze(magnitude_v), az, el), axis=-1)
552
+
553
+ return spherical_coords
554
+
555
+
556
+ def spherical_to_cartesian(spherical_coords: NDArray) -> NDArray:
557
+ """
558
+ Convert spherical coordinates to Cartesian coordinates.
559
+
560
+ Parameters
561
+ ----------
562
+ spherical_coords : np.ndarray
563
+ A NumPy array with shape (n, 3), where each row contains
564
+ the spherical coordinates (r, azimuth, elevation):
565
+
566
+ - r : Distance of the point from the origin.
567
+ - azimuth : angle in the xy-plane in radians [0, 2*pi].
568
+ - elevation : angle from the z-axis in radians [-pi/2, pi/2].
569
+
570
+ Returns
571
+ -------
572
+ cartesian_coords : np.ndarray
573
+ Cartesian coordinates.
574
+ """
575
+ r = spherical_coords[..., 0]
576
+ azimuth = spherical_coords[..., 1]
577
+ elevation = spherical_coords[..., 2]
578
+
579
+ x = r * np.cos(elevation) * np.cos(azimuth)
580
+ y = r * np.cos(elevation) * np.sin(azimuth)
581
+ z = r * np.sin(elevation)
582
+
583
+ cartesian_coords = np.stack((x, y, z), axis=-1)
584
+
585
+ return cartesian_coords
@@ -1,7 +1,7 @@
1
1
  """Time conversion functions that rely on SPICE."""
2
2
 
3
3
  import typing
4
- from collections.abc import Collection
4
+ from collections.abc import Collection, Iterable
5
5
  from typing import Union
6
6
 
7
7
  import numpy as np
@@ -13,6 +13,32 @@ from imap_processing.spice.kernels import ensure_spice
13
13
 
14
14
  TICK_DURATION = 2e-5 # 20 microseconds as defined in imap_sclk_0000.tsc
15
15
 
16
+ # Hard code the J2000 epoch. This allows for CDF epoch to be converted without
17
+ # use of SPICE though it should be noted that this results in a 5-second error
18
+ # due to the occurrence of 5 leap-seconds since the J2000 epoch.
19
+ # TODO: Implement a function for converting CDF epoch to UTC correctly.
20
+ # see github ticket #1208
21
+ # The UTC string was generated by:
22
+ # >>> spiceypy.et2utc(0, "ISOC", 9)
23
+ J2000_EPOCH = np.datetime64("2000-01-01T11:58:55.816072737", "ns")
24
+
25
+
26
+ def met_to_sclkticks(met: npt.ArrayLike) -> npt.NDArray[float]:
27
+ """
28
+ Convert Mission Elapsed Time (MET) to floating point spacecraft clock ticks.
29
+
30
+ Parameters
31
+ ----------
32
+ met : float, numpy.ndarray
33
+ Number of seconds since epoch according to the spacecraft clock.
34
+
35
+ Returns
36
+ -------
37
+ numpy.ndarray[float]
38
+ The mission elapsed time converted to nanoseconds since the J2000 epoch.
39
+ """
40
+ return np.asarray(met, dtype=float) / TICK_DURATION
41
+
16
42
 
17
43
  def met_to_j2000ns(
18
44
  met: npt.ArrayLike,
@@ -40,7 +66,7 @@ def met_to_j2000ns(
40
66
  "tick" which is defined to be 20 microseconds, according to the sclk kernel,
41
67
  it is preferable to use the higher accuracy method.
42
68
  """
43
- sclk_ticks = np.asarray(met, dtype=float) / TICK_DURATION
69
+ sclk_ticks = met_to_sclkticks(met)
44
70
  return np.asarray(_sct2e_wrapper(sclk_ticks) * 1e9, dtype=np.int64)
45
71
 
46
72
 
@@ -64,6 +90,53 @@ def j2000ns_to_j2000s(j2000ns: npt.ArrayLike) -> npt.NDArray[float]:
64
90
  return np.asarray(j2000ns, dtype=np.float64) / 1e9
65
91
 
66
92
 
93
+ @typing.no_type_check
94
+ @ensure_spice(time_kernels_only=True)
95
+ def met_to_utc(met: npt.ArrayLike, precision: int = 9) -> npt.NDArray[str]:
96
+ """
97
+ Convert mission elapsed time (MET) to UTC.
98
+
99
+ Parameters
100
+ ----------
101
+ met : float, numpy.ndarray
102
+ Number of seconds since epoch according to the spacecraft clock.
103
+ precision : int
104
+ The number of digits of precision to which fractional seconds
105
+ are to be computed.
106
+
107
+ Returns
108
+ -------
109
+ numpy.ndarray[str]
110
+ The mission elapsed time converted to UTC string. The UTC string(s)
111
+ returned will be of the form '1987-04-12T16:31:12.814' with the
112
+ fractional seconds precision as specified by the precision keyword.
113
+ """
114
+ sclk_ticks = met_to_sclkticks(met)
115
+ et = _sct2e_wrapper(sclk_ticks)
116
+ return spice.et2utc(et, "ISOC", prec=precision)
117
+
118
+
119
+ def met_to_datetime64(
120
+ met: npt.ArrayLike,
121
+ ) -> Union[np.datetime64, npt.NDArray[np.datetime64]]:
122
+ """
123
+ Convert mission elapsed time (MET) to datetime.datetime.
124
+
125
+ Parameters
126
+ ----------
127
+ met : float, numpy.ndarray
128
+ Number of seconds since epoch according to the spacecraft clock.
129
+
130
+ Returns
131
+ -------
132
+ numpy.ndarray[str]
133
+ The mission elapsed time converted to UTC string.
134
+ """
135
+ if isinstance(met, typing.Iterable):
136
+ return np.asarray([np.datetime64(utc) for utc in met_to_utc(met)])
137
+ return np.datetime64(met_to_utc(met))
138
+
139
+
67
140
  @typing.no_type_check
68
141
  @ensure_spice
69
142
  def _sct2e_wrapper(
@@ -90,3 +163,72 @@ def _sct2e_wrapper(
90
163
  return np.array([spice.sct2e(IMAP_SC_ID, s) for s in sclk_ticks])
91
164
  else:
92
165
  return spice.sct2e(IMAP_SC_ID, sclk_ticks)
166
+
167
+
168
+ @typing.no_type_check
169
+ @ensure_spice
170
+ def str_to_et(
171
+ time_str: Union[str, Iterable[str]],
172
+ ) -> Union[float, np.ndarray]:
173
+ """
174
+ Convert string to ephemeris time.
175
+
176
+ Decorated wrapper for spiceypy.str2et that vectorizes the function in addition
177
+ to wrapping with the @ensure_spice automatic kernel furnishing functionality.
178
+ https://spiceypy.readthedocs.io/en/main/documentation.html#spiceypy.spiceypy.str2et
179
+
180
+ Parameters
181
+ ----------
182
+ time_str : str or Iterable[str]
183
+ Input string(s) to be converted to ephemeris time.
184
+
185
+ Returns
186
+ -------
187
+ ephemeris_time: np.ndarray
188
+ Ephemeris time, seconds past J2000.
189
+ """
190
+ if isinstance(time_str, str):
191
+ return spice.str2et(time_str)
192
+ else:
193
+ return np.array([spice.str2et(t) for t in time_str])
194
+
195
+
196
+ @typing.no_type_check
197
+ @ensure_spice
198
+ def et_to_utc(
199
+ et: Union[float, Iterable[float]],
200
+ format_str: str = "ISOC",
201
+ precision: int = 3,
202
+ utclen: int = 24,
203
+ ) -> Union[str, np.ndarray]:
204
+ """
205
+ Convert ephemeris time to UTC.
206
+
207
+ Decorated wrapper for spiceypy.et2utc that vectorizes the function in addition
208
+ to wrapping with the @ensure_spice automatic kernel furnishing functionality.
209
+ https://spiceypy.readthedocs.io/en/main/documentation.html#spiceypy.spiceypy.et2utc
210
+
211
+ Parameters
212
+ ----------
213
+ et : float or Iterable[float]
214
+ Input ephemeris time(s) to be converted to UTC.
215
+ format_str : str
216
+ Format of the output time string. Default is "ISOC". All options:
217
+ "C" Calendar format, UTC.
218
+ "D" Day-of-Year format, UTC.
219
+ "J" Julian Date format, UTC.
220
+ "ISOC" ISO Calendar format, UTC.
221
+ "ISOD" ISO Day-of-Year format, UTC.
222
+ precision : int
223
+ Digits of precision in fractional seconds or days. Default is 3.
224
+ utclen : int
225
+ The length of the output string. Default is 24 (to accommodate the
226
+ "YYYY-MM-DDT00:00:00.000" format + 1). From the NAIF docs: if the output string
227
+ is expected to have `x` characters, utclen` must be x + 1.
228
+
229
+ Returns
230
+ -------
231
+ utc_time : str or np.ndarray
232
+ UTC time(s).
233
+ """
234
+ return spice.et2utc(et, format_str, precision, utclen)
@@ -39,7 +39,7 @@ def filter_good_data(full_sweep_sci: xr.Dataset) -> npt.NDArray:
39
39
  """
40
40
  # PLAN_ID for current sweep should all be one value and
41
41
  # SWEEP_TABLE should all be one value.
42
- plan_id = full_sweep_sci["plan_id_science"].data.reshape(-1, 12)
42
+ plan_id = full_sweep_sci["plan_id"].data.reshape(-1, 12)
43
43
  sweep_table = full_sweep_sci["sweep_table"].data.reshape(-1, 12)
44
44
 
45
45
  mode = full_sweep_sci["mode"].data.reshape(-1, 12)
@@ -72,7 +72,7 @@ def filter_good_data(full_sweep_sci: xr.Dataset) -> npt.NDArray:
72
72
  )
73
73
  logger.debug(
74
74
  "Plan ID should be same: "
75
- f"{full_sweep_sci['plan_id_science'].data[bad_cycle_indices]}"
75
+ f"{full_sweep_sci['plan_id'].data[bad_cycle_indices]}"
76
76
  )
77
77
  logger.debug(
78
78
  f"Mode Id should be 3(HVSCI): {full_sweep_sci['mode'].data[bad_cycle_indices]}"
@@ -480,7 +480,7 @@ def process_swapi_science(
480
480
  # ====================================================
481
481
  cdf_manager = ImapCdfAttributes()
482
482
  cdf_manager.add_instrument_global_attrs("swapi")
483
- cdf_manager.load_variable_attributes("imap_swapi_variable_attrs.yaml")
483
+ cdf_manager.add_instrument_variable_attrs(instrument="swapi", level=None)
484
484
 
485
485
  # ===================================================================
486
486
  # Quality flags
@@ -615,7 +615,7 @@ def process_swapi_science(
615
615
  attrs=cdf_manager.get_variable_attributes("sweep_table"),
616
616
  )
617
617
  dataset["plan_id"] = xr.DataArray(
618
- good_sweep_sci["plan_id_science"].data.reshape(total_full_sweeps, 12)[:, 0],
618
+ good_sweep_sci["plan_id"].data.reshape(total_full_sweeps, 12)[:, 0],
619
619
  name="plan_id",
620
620
  dims=["epoch"],
621
621
  attrs=cdf_manager.get_variable_attributes("plan_id"),
@@ -43,7 +43,7 @@ def swapi_l2(l1_dataset: xr.Dataset, data_version: str) -> xr.Dataset:
43
43
  # Load the CDF attributes
44
44
  cdf_manager = ImapCdfAttributes()
45
45
  cdf_manager.add_instrument_global_attrs("swapi")
46
- cdf_manager.load_variable_attributes("imap_swapi_variable_attrs.yaml")
46
+ cdf_manager.add_instrument_variable_attrs(instrument="swapi", level=None)
47
47
 
48
48
  # Copy over only certain variables from L1 to L2 dataset
49
49
  l1_data_keys = [