imap-processing 0.14.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (81) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +60 -35
  3. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +765 -287
  4. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +1577 -288
  5. imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +1004 -0
  6. imap_processing/cdf/config/imap_enamaps_l2-common_variable_attrs.yaml +28 -0
  7. imap_processing/cdf/config/imap_enamaps_l2-healpix_variable_attrs.yaml +1 -1
  8. imap_processing/cdf/config/imap_enamaps_l2-rectangular_variable_attrs.yaml +18 -0
  9. imap_processing/cdf/config/imap_glows_l2_variable_attrs.yaml +39 -3
  10. imap_processing/cdf/config/imap_ialirt_global_cdf_attrs.yaml +18 -0
  11. imap_processing/cdf/config/imap_ialirt_l1_variable_attrs.yaml +353 -0
  12. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +7 -0
  13. imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +11 -0
  14. imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +4 -0
  15. imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +7 -3
  16. imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +6 -0
  17. imap_processing/cdf/config/imap_mag_l2_variable_attrs.yaml +114 -0
  18. imap_processing/cdf/config/imap_swe_global_cdf_attrs.yaml +11 -5
  19. imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +23 -1
  20. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +4 -0
  21. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +2 -2
  22. imap_processing/cli.py +145 -80
  23. imap_processing/codice/codice_l1a.py +140 -84
  24. imap_processing/codice/codice_l1b.py +91 -18
  25. imap_processing/codice/codice_l2.py +81 -0
  26. imap_processing/codice/constants.py +68 -0
  27. imap_processing/ena_maps/ena_maps.py +43 -1
  28. imap_processing/glows/l2/glows_l2_data.py +3 -6
  29. imap_processing/hi/hi_l1a.py +447 -0
  30. imap_processing/hi/{l1b/hi_l1b.py → hi_l1b.py} +1 -1
  31. imap_processing/hi/{l1c/hi_l1c.py → hi_l1c.py} +21 -21
  32. imap_processing/hi/{l2/hi_l2.py → hi_l2.py} +13 -13
  33. imap_processing/hi/utils.py +6 -6
  34. imap_processing/hit/l1b/hit_l1b.py +30 -11
  35. imap_processing/ialirt/constants.py +38 -0
  36. imap_processing/ialirt/l0/parse_mag.py +1 -1
  37. imap_processing/ialirt/l0/process_codice.py +91 -0
  38. imap_processing/ialirt/l0/process_hit.py +12 -21
  39. imap_processing/ialirt/l0/process_swapi.py +172 -23
  40. imap_processing/ialirt/l0/process_swe.py +3 -10
  41. imap_processing/ialirt/utils/constants.py +62 -0
  42. imap_processing/ialirt/utils/create_xarray.py +135 -0
  43. imap_processing/idex/idex_l2c.py +9 -9
  44. imap_processing/lo/l1b/lo_l1b.py +6 -1
  45. imap_processing/lo/l1c/lo_l1c.py +22 -13
  46. imap_processing/lo/l2/lo_l2.py +213 -0
  47. imap_processing/mag/l1c/mag_l1c.py +8 -1
  48. imap_processing/mag/l2/mag_l2.py +6 -2
  49. imap_processing/mag/l2/mag_l2_data.py +7 -5
  50. imap_processing/swe/l1a/swe_l1a.py +6 -6
  51. imap_processing/swe/l1b/swe_l1b.py +70 -11
  52. imap_processing/ultra/l0/decom_ultra.py +1 -1
  53. imap_processing/ultra/l0/ultra_utils.py +0 -4
  54. imap_processing/ultra/l1b/badtimes.py +7 -3
  55. imap_processing/ultra/l1b/cullingmask.py +7 -2
  56. imap_processing/ultra/l1b/de.py +26 -12
  57. imap_processing/ultra/l1b/lookup_utils.py +8 -7
  58. imap_processing/ultra/l1b/ultra_l1b.py +59 -48
  59. imap_processing/ultra/l1b/ultra_l1b_culling.py +50 -18
  60. imap_processing/ultra/l1b/ultra_l1b_extended.py +4 -4
  61. imap_processing/ultra/l1c/helio_pset.py +53 -0
  62. imap_processing/ultra/l1c/spacecraft_pset.py +20 -12
  63. imap_processing/ultra/l1c/ultra_l1c.py +49 -26
  64. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +40 -2
  65. imap_processing/ultra/l2/ultra_l2.py +47 -2
  66. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +524 -526
  67. imap_processing/ultra/utils/ultra_l1_utils.py +51 -10
  68. {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/METADATA +2 -2
  69. {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/RECORD +72 -69
  70. imap_processing/hi/l1a/__init__.py +0 -0
  71. imap_processing/hi/l1a/hi_l1a.py +0 -98
  72. imap_processing/hi/l1a/histogram.py +0 -152
  73. imap_processing/hi/l1a/science_direct_event.py +0 -214
  74. imap_processing/hi/l1b/__init__.py +0 -0
  75. imap_processing/hi/l1c/__init__.py +0 -0
  76. imap_processing/hi/l2/__init__.py +0 -0
  77. imap_processing/ialirt/l0/process_codicehi.py +0 -156
  78. imap_processing/ialirt/l0/process_codicelo.py +0 -41
  79. {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/LICENSE +0 -0
  80. {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/WHEEL +0 -0
  81. {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/entry_points.txt +0 -0
@@ -620,6 +620,42 @@ class HiPointingSet(PointingSet):
620
620
  self.spatial_coords = ("spin_angle_bin",)
621
621
 
622
622
 
623
+ class LoPointingSet(PointingSet):
624
+ """
625
+ PointingSet object specific to Lo L1C PSet data.
626
+
627
+ Parameters
628
+ ----------
629
+ dataset : xarray.Dataset
630
+ Lo L1C pointing set data loaded in an xarray.DataArray.
631
+ """
632
+
633
+ def __init__(self, dataset: xr.Dataset):
634
+ super().__init__(dataset, spice_reference_frame=geometry.SpiceFrame.IMAP_DPS)
635
+ # TODO: Use spatial_utils.az_el_grid instead of
636
+ # manually creating the lon/lat values
637
+ inferred_spacing_deg = 360 / dataset.longitude.size
638
+ longitude_bin_centers = np.arange(
639
+ 0 + inferred_spacing_deg / 2, 360, inferred_spacing_deg
640
+ )
641
+ latitude_bin_centers = np.arange(
642
+ -2 + inferred_spacing_deg / 2, 2, inferred_spacing_deg
643
+ )
644
+
645
+ # Could be wrong about the order here
646
+ longitude_grid, latitude_grid = np.meshgrid(
647
+ longitude_bin_centers,
648
+ latitude_bin_centers,
649
+ indexing="ij",
650
+ )
651
+
652
+ longitude = longitude_grid.ravel()
653
+ latitude = latitude_grid.ravel()
654
+
655
+ self.az_el_points = np.column_stack((longitude, latitude))
656
+ self.spatial_coords = ("longitude", "latitude")
657
+
658
+
623
659
  # Define the Map classes
624
660
  class AbstractSkyMap(ABC):
625
661
  """
@@ -1119,7 +1155,7 @@ class RectangularSkyMap(AbstractSkyMap):
1119
1155
  )
1120
1156
  # Add the solid angle variable to the data_1d Dataset
1121
1157
  self.data_1d["solid_angle"] = xr.DataArray(
1122
- self.solid_angle_points[np.newaxis, :],
1158
+ self.solid_angle_points[np.newaxis, :].astype(np.float32),
1123
1159
  name="solid_angle",
1124
1160
  dims=[CoordNames.TIME.value, CoordNames.GENERIC_PIXEL.value],
1125
1161
  )
@@ -1423,6 +1459,12 @@ class HealpixSkyMap(AbstractSkyMap):
1423
1459
  {},
1424
1460
  coords={**self.spatial_coords},
1425
1461
  )
1462
+ # Add the solid angle variable to the data_1d Dataset
1463
+ self.data_1d["solid_angle"] = xr.DataArray(
1464
+ self.solid_angle_points[np.newaxis, :].astype(np.float32),
1465
+ name="solid_angle",
1466
+ dims=[CoordNames.TIME.value, CoordNames.GENERIC_PIXEL.value],
1467
+ )
1426
1468
  # return the data_1d as is, but with the pixel coordinate
1427
1469
  # renamed to CoordNames.HEALPIX_INDEX.value
1428
1470
  return self.data_1d.rename(
@@ -34,8 +34,6 @@ class DailyLightcurve:
34
34
  ecliptic latitude of bin centers [deg]
35
35
  number_of_bins : int
36
36
  number of bins in lightcurve
37
- raw_uncertainties : numpy.ndarray
38
- statistical uncertainties for raw histograms (sqrt of self.raw_histograms)
39
37
  l1b_data : xarray.Dataset
40
38
  L1B data filtered by good times, good angles, and good bins.
41
39
  """
@@ -52,7 +50,6 @@ class DailyLightcurve:
52
50
  ecliptic_lon: np.ndarray = field(init=False)
53
51
  ecliptic_lat: np.ndarray = field(init=False)
54
52
  number_of_bins: int = field(init=False)
55
- raw_uncertainties: np.ndarray = field(init=False)
56
53
  l1b_data: InitVar[xr.Dataset]
57
54
 
58
55
  def __post_init__(self, l1b_data: xr.Dataset) -> None:
@@ -76,14 +73,14 @@ class DailyLightcurve:
76
73
  self.exposure_times = self.calculate_exposure_times(
77
74
  l1b_data, exposure_times_per_timestamp
78
75
  )
79
- self.raw_uncertainties = np.sqrt(self.raw_histograms)
76
+ raw_uncertainties = np.sqrt(self.raw_histograms)
80
77
  self.photon_flux = np.zeros(len(self.raw_histograms))
81
78
  self.flux_uncertainties = np.zeros(len(self.raw_histograms))
82
79
 
83
80
  # TODO: Only where exposure counts != 0
84
81
  if len(self.exposure_times) != 0:
85
82
  self.photon_flux = self.raw_histograms / self.exposure_times
86
- self.flux_uncertainties = self.raw_uncertainties / self.exposure_times
83
+ self.flux_uncertainties = raw_uncertainties / self.exposure_times
87
84
 
88
85
  # TODO: Average this, or should they all be the same?
89
86
  self.spin_angle = np.average(l1b_data["imap_spin_angle_bin_cntr"].data, axis=0)
@@ -135,7 +132,7 @@ class DailyLightcurve:
135
132
  Sum of valid histograms across all timestamps.
136
133
  """
137
134
  histograms[histograms == -1] = 0
138
- return np.sum(histograms, axis=0)
135
+ return np.sum(histograms, axis=0, dtype=np.int64)
139
136
 
140
137
 
141
138
  @dataclass
@@ -0,0 +1,447 @@
1
+ """IMAP-HI L1A processing module."""
2
+
3
+ import logging
4
+ from collections import defaultdict
5
+ from pathlib import Path
6
+ from typing import Union
7
+
8
+ import numpy as np
9
+ import xarray as xr
10
+ from numpy import _typing as npt
11
+ from numpy._typing import NDArray
12
+
13
+ from imap_processing import imap_module_directory
14
+ from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
15
+ from imap_processing.hi.utils import HIAPID
16
+ from imap_processing.spice.time import met_to_ttj2000ns
17
+ from imap_processing.utils import packet_file_to_datasets
18
+
19
+ # TODO: read DE_CLOCK_TICK_US from
20
+ # instrument status summary later. This value
21
+ # is rarely change but want to be able to change
22
+ # it if needed. It stores information about how
23
+ # fast the time was ticking. It is in microseconds.
24
+ DE_CLOCK_TICK_US = 1999
25
+ DE_CLOCK_TICK_S = DE_CLOCK_TICK_US / 1e6
26
+ HALF_CLOCK_TICK_S = DE_CLOCK_TICK_S / 2
27
+
28
+ MILLISECOND_TO_S = 1e-3
29
+
30
+ # define the names of the 24 counter arrays
31
+ # contained in the histogram packet
32
+ QUALIFIED_COUNTERS = (
33
+ "ab_qualified",
34
+ "c1c2_qualified",
35
+ "ac1_qualified",
36
+ "bc1_qualified",
37
+ "abc1_qualified",
38
+ "ac1c2_qualified",
39
+ "bc1c2_qualified",
40
+ "abc1c2_qualified",
41
+ )
42
+ LONG_COUNTERS = (
43
+ "a_first_only",
44
+ "b_first_only",
45
+ "c_first_only",
46
+ "ab_long",
47
+ "c1c2_long",
48
+ "ac1_long",
49
+ "bc1_long",
50
+ "abc1_long",
51
+ "ac1c2_long",
52
+ "bc1c2_long",
53
+ "abc1c2_long",
54
+ )
55
+ TOTAL_COUNTERS = ("a_total", "b_total", "c_total", "fee_de_recd", "fee_de_sent")
56
+
57
+ logger = logging.getLogger(__name__)
58
+
59
+
60
+ def hi_l1a(packet_file_path: Union[str, Path]) -> list[xr.Dataset]:
61
+ """
62
+ Will process IMAP raw data to l1a.
63
+
64
+ Parameters
65
+ ----------
66
+ packet_file_path : str
67
+ Data packet file path.
68
+
69
+ Returns
70
+ -------
71
+ processed_data : list[xarray.Dataset]
72
+ List of processed xarray dataset.
73
+ """
74
+ datasets_by_apid = hi_packet_file_to_datasets(packet_file_path)
75
+
76
+ # Process science to l1a.
77
+ processed_data = []
78
+ for apid in datasets_by_apid:
79
+ try:
80
+ apid_enum = HIAPID(apid)
81
+ except ValueError as err:
82
+ raise RuntimeError(f"Encountered unexpected APID [{apid}]") from err
83
+
84
+ logger.info(f"Processing IMAP-Hi data for {apid_enum.name} packets")
85
+
86
+ if apid_enum in [HIAPID.H45_SCI_CNT, HIAPID.H90_SCI_CNT]:
87
+ data = finish_hist_dataset(datasets_by_apid[apid])
88
+ gattr_key = "imap_hi_l1a_hist_attrs"
89
+ elif apid_enum in [HIAPID.H45_SCI_DE, HIAPID.H90_SCI_DE]:
90
+ data = finish_de_dataset(datasets_by_apid[apid])
91
+ gattr_key = "imap_hi_l1a_de_attrs"
92
+ elif apid_enum in [HIAPID.H45_APP_NHK, HIAPID.H90_APP_NHK]:
93
+ data = datasets_by_apid[apid]
94
+ gattr_key = "imap_hi_l1a_hk_attrs"
95
+ elif apid_enum in [HIAPID.H45_DIAG_FEE, HIAPID.H90_DIAG_FEE]:
96
+ data = datasets_by_apid[apid]
97
+ gattr_key = "imap_hi_l1a_diagfee_attrs"
98
+
99
+ # Update dataset global attributes
100
+ attr_mgr = ImapCdfAttributes()
101
+ attr_mgr.add_instrument_global_attrs("hi")
102
+ data.attrs.update(attr_mgr.get_global_attributes(gattr_key))
103
+
104
+ # set the sensor string in Logical_source
105
+ sensor_str = apid_enum.sensor
106
+ data.attrs["Logical_source"] = data.attrs["Logical_source"].format(
107
+ sensor=sensor_str
108
+ )
109
+ processed_data.append(data)
110
+ return processed_data
111
+
112
+
113
+ def hi_packet_file_to_datasets(
114
+ packet_file_path: Union[str, Path], use_derived_value: bool = False
115
+ ) -> dict[int, xr.Dataset]:
116
+ """
117
+ Extract hi datasets from packet file.
118
+
119
+ Parameters
120
+ ----------
121
+ packet_file_path : str
122
+ L0 packet file path.
123
+ use_derived_value : bool
124
+ Whether to use the derived value from the XTCE definition. Default is False.
125
+
126
+ Returns
127
+ -------
128
+ datasets : dict[int, xarray.Dataset]
129
+ Dictionary of xarray datasets keyed by APID.
130
+ """
131
+ packet_def_file = (
132
+ imap_module_directory / "hi/packet_definitions/TLM_HI_COMBINED_SCI.xml"
133
+ )
134
+ datasets_by_apid = packet_file_to_datasets(
135
+ packet_file=packet_file_path,
136
+ xtce_packet_definition=packet_def_file,
137
+ use_derived_value=use_derived_value,
138
+ )
139
+ return datasets_by_apid
140
+
141
+
142
+ def finish_de_dataset(packets_data: xr.Dataset) -> xr.Dataset:
143
+ """
144
+ Unpack IMAP-Hi direct event data.
145
+
146
+ Processing step:
147
+
148
+ | 1. Break binary stream data into unit of 48-bits
149
+ | 2. Parse direct event data
150
+ | 5. Save the data into xarray dataset.
151
+
152
+ Parameters
153
+ ----------
154
+ packets_data : xarray.Dataset
155
+ Packets extracted into a dataset.
156
+
157
+ Returns
158
+ -------
159
+ dataset : xarray.Dataset
160
+ Xarray dataset.
161
+ """
162
+ de_data_dict: dict[str, list] = defaultdict(list)
163
+
164
+ # Add packet data to the dictionary, renaming some fields
165
+ # This is done first so that these variables are first in the CDF
166
+ for from_key, to_key in {
167
+ "shcoarse": "ccsds_met",
168
+ "src_seq_ctr": "src_seq_ctr",
169
+ "pkt_len": "pkt_len",
170
+ "last_spin_num": "last_spin_num",
171
+ "spin_invalids": "spin_invalids",
172
+ "esa_step_num": "esa_step",
173
+ "meta_seconds": "meta_seconds",
174
+ "meta_subseconds": "meta_subseconds",
175
+ }.items():
176
+ de_data_dict[to_key] = packets_data[from_key].data
177
+
178
+ # For each packet, parse the DE data and add it to the Pointing
179
+ # list of DE data usint `extend()`
180
+ for i, data in enumerate(packets_data["de_tof"].data):
181
+ parsed_de_data = parse_direct_events(data)
182
+ for key, new_data in parsed_de_data.items():
183
+ de_data_dict[key].extend(new_data)
184
+ # Record the ccsds packet index for each DE
185
+ de_data_dict["ccsds_index"].extend([i] * len(parsed_de_data["de_tag"]))
186
+
187
+ # create dataset
188
+ return create_de_dataset(de_data_dict)
189
+
190
+
191
+ def create_de_dataset(de_data_dict: dict[str, npt.ArrayLike]) -> xr.Dataset:
192
+ """
193
+ Create Hi L1A direct event xarray dataset.
194
+
195
+ Parameters
196
+ ----------
197
+ de_data_dict : Dict[list]
198
+ Dictionary of packet telemetry and direct event data lists.
199
+
200
+ Returns
201
+ -------
202
+ dataset : xarray.Dataset
203
+ Xarray dataset.
204
+ """
205
+ # Load the CDF attributes
206
+ attr_mgr = ImapCdfAttributes()
207
+ attr_mgr.add_instrument_global_attrs("hi")
208
+ attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
209
+
210
+ # check_schema=False keeps DEPEND_0 = '' from being auto added
211
+ epoch_attrs = attr_mgr.get_variable_attributes("epoch", check_schema=False)
212
+ epoch_attrs["CATDESC"] = (
213
+ "CCSDS creation time, number of nanoseconds since J2000 with leap "
214
+ "seconds included"
215
+ )
216
+ epoch = xr.DataArray(
217
+ met_to_ttj2000ns(de_data_dict["ccsds_met"]),
218
+ name="epoch",
219
+ dims=["epoch"],
220
+ attrs=epoch_attrs,
221
+ )
222
+
223
+ event_met_attrs = attr_mgr.get_variable_attributes(
224
+ "hi_de_event_met", check_schema=False
225
+ )
226
+ # For L1A DE, event_met is its own dimension, so we remove the DEPEND_0 attribute
227
+ _ = event_met_attrs.pop("DEPEND_0")
228
+
229
+ # Compute the meta-event MET in seconds
230
+ meta_event_met = (
231
+ np.array(de_data_dict["meta_seconds"]).astype(np.float64)
232
+ + np.array(de_data_dict["meta_subseconds"]) * MILLISECOND_TO_S
233
+ )
234
+ # Compute the MET of each event in seconds
235
+ # event MET = meta_event_met + de_clock
236
+ # See Hi Algorithm Document section 2.2.5
237
+ event_met_array = np.array(
238
+ meta_event_met[de_data_dict["ccsds_index"]]
239
+ + np.array(de_data_dict["de_tag"]) * DE_CLOCK_TICK_S,
240
+ dtype=event_met_attrs.pop("dtype"),
241
+ )
242
+ event_met = xr.DataArray(
243
+ event_met_array,
244
+ name="event_met",
245
+ dims=["event_met"],
246
+ attrs=event_met_attrs,
247
+ )
248
+
249
+ dataset = xr.Dataset(
250
+ coords={"epoch": epoch, "event_met": event_met},
251
+ )
252
+
253
+ for var_name, data in de_data_dict.items():
254
+ attrs = attr_mgr.get_variable_attributes(
255
+ f"hi_de_{var_name}", check_schema=False
256
+ ).copy()
257
+ dtype = attrs.pop("dtype")
258
+ dataset[var_name] = xr.DataArray(
259
+ np.array(data, dtype=np.dtype(dtype)),
260
+ dims=attrs["DEPEND_0"],
261
+ attrs=attrs,
262
+ )
263
+
264
+ return dataset
265
+
266
+
267
+ def parse_direct_events(de_data: bytes) -> dict[str, npt.ArrayLike]:
268
+ """
269
+ Parse event data from a binary blob.
270
+
271
+ IMAP-Hi direct event data information is stored in
272
+ 48-bits as follows:
273
+
274
+ | Read 48-bits into 16, 2, 10, 10, 10, bits. Each of these breaks
275
+ | down as:
276
+ |
277
+ | de_tag - 16 bits
278
+ | start_bitmask_data - 2 bits (tA=1, tB=2, tC1=3)
279
+ | tof_1 - 10 bit counter
280
+ | tof_2 - 10 bit counter
281
+ | tof_3 - 10 bit counter
282
+
283
+ There are at most total of 664 of 48-bits in each data packet.
284
+ This data packet is of variable length. If there is one event, then
285
+ DE_TOF will contain 48-bits. If there are 664 events, then
286
+ DE_TOF will contain 664 x 48-bits. If there is no event, then
287
+ DE_TOF will contain 0-bits.
288
+
289
+ There should be two data packets per ESA. Each packet contains meta-event
290
+ data that is identical between the two packets for a common ESA.
291
+ If there is no event record for certain ESA step, then both packets will
292
+ contain 0-bits in DE_TOF.
293
+
294
+ Parameters
295
+ ----------
296
+ de_data : bytes
297
+ Binary blob from de_tag field of SCI_DE packet. Must be an integer
298
+ multiple of 48-bits of data.
299
+
300
+ Returns
301
+ -------
302
+ Dict[str, list]
303
+ Parsed event data.
304
+ """
305
+ # The de_data is a binary blob with Nx6 bytes of data where N = number of
306
+ # direct events encoded into the binary blob. Interpreting the data as
307
+ # big-endian uint16 data and reshaping into a (3, -1) ndarray results
308
+ # in an array with shape (3, N). Indexing the first axis of that array
309
+ # (e.g. data_uint16[i]) gives the ith 2-bytes of data for each of the N
310
+ # direct events.
311
+ # Considering the 6-bytes of data for each DE as 3 2-byte words,
312
+ # each word contains the following:
313
+ # word_0: full 16-bits is the de_tag
314
+ # word_1: 2-bits of Trigger ID, 10-bits tof_1, upper 4-bits of tof_2
315
+ # word_2: lower 6-bits of tof_2, 10-bits of tof_3
316
+ data_uint16 = np.reshape(
317
+ np.frombuffer(de_data, dtype=">u2"), (3, -1), order="F"
318
+ ).astype(np.uint16)
319
+
320
+ de_dict = dict()
321
+ de_dict["de_tag"] = data_uint16[0]
322
+ de_dict["trigger_id"] = (data_uint16[1] >> 14).astype(np.uint8)
323
+ de_dict["tof_1"] = (data_uint16[1] & int(b"00111111_11110000", 2)) >> 4
324
+ de_dict["tof_2"] = ((data_uint16[1] & int(b"00000000_00001111", 2)) << 6) + (
325
+ data_uint16[2] >> 10
326
+ )
327
+ de_dict["tof_3"] = data_uint16[2] & int(b"00000011_11111111", 2)
328
+
329
+ return de_dict
330
+
331
+
332
+ def finish_hist_dataset(input_ds: xr.Dataset) -> xr.Dataset:
333
+ """
334
+ Create dataset for a number of Hi Histogram packets.
335
+
336
+ Parameters
337
+ ----------
338
+ input_ds : xarray.Dataset
339
+ Dataset of packets generated using the
340
+ `imap_processing.utils.packet_file_to_datasets` function.
341
+
342
+ Returns
343
+ -------
344
+ dataset : xarray.Dataset
345
+ Dataset with all metadata field data in xr.DataArray.
346
+ """
347
+ attr_mgr = ImapCdfAttributes()
348
+ attr_mgr.add_instrument_global_attrs(instrument="hi")
349
+ attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
350
+
351
+ # Rename shcoarse variable (do this first since it copies the input_ds)
352
+ dataset = input_ds.rename_vars({"shcoarse": "ccsds_met"})
353
+
354
+ dataset.epoch.attrs.update(
355
+ attr_mgr.get_variable_attributes("epoch"),
356
+ )
357
+ # Add the hist_angle coordinate
358
+ # Histogram data is binned in 90, 4-degree bins
359
+ attrs = attr_mgr.get_variable_attributes("hi_hist_angle")
360
+ dataset.coords.update(
361
+ {
362
+ "angle": xr.DataArray(
363
+ np.arange(2, 360, 4),
364
+ name="angle",
365
+ dims=["angle"],
366
+ attrs=attrs,
367
+ )
368
+ }
369
+ )
370
+ # Update existing variable attributes
371
+ for var_name in [
372
+ "version",
373
+ "type",
374
+ "sec_hdr_flg",
375
+ "pkt_apid",
376
+ "seq_flgs",
377
+ "src_seq_ctr",
378
+ "pkt_len",
379
+ "ccsds_met",
380
+ "esa_step",
381
+ "num_of_spins",
382
+ "cksum",
383
+ ]:
384
+ attrs = attr_mgr.get_variable_attributes(f"hi_hist_{var_name}")
385
+ dataset.data_vars[var_name].attrs.update(attrs)
386
+
387
+ new_vars = dict()
388
+ # Populate 90-element histogram counters
389
+ default_counter_attrs = attr_mgr.get_variable_attributes("hi_hist_counters")
390
+ for counter_name in (*QUALIFIED_COUNTERS, *LONG_COUNTERS, *TOTAL_COUNTERS):
391
+ # Inject counter name into generic counter attributes
392
+ counter_attrs = default_counter_attrs.copy()
393
+ for key, val in counter_attrs.items():
394
+ if isinstance(val, str) and "{counter_name}" in val:
395
+ counter_attrs[key] = val.format(counter_name=counter_name)
396
+ # Instantiate the counter DataArray
397
+ new_vars[counter_name] = xr.DataArray(
398
+ data=unpack_hist_counter(input_ds[counter_name].data.sum()),
399
+ dims=["epoch", "angle"],
400
+ attrs=counter_attrs,
401
+ )
402
+
403
+ # Generate label variable for angle coordinate
404
+ new_vars["angle_label"] = xr.DataArray(
405
+ dataset.coords["angle"].values.astype(str),
406
+ name="angle_label",
407
+ dims=["angle"],
408
+ attrs=attr_mgr.get_variable_attributes(
409
+ "hi_hist_angle_label", check_schema=False
410
+ ),
411
+ )
412
+
413
+ dataset.update(new_vars)
414
+
415
+ return dataset
416
+
417
+
418
+ def unpack_hist_counter(counter_bytes: bytes) -> NDArray[np.uint16]:
419
+ """
420
+ Unpack Hi SCI_CNT counter data for a single counter.
421
+
422
+ Parameters
423
+ ----------
424
+ counter_bytes : bytes
425
+ Sum individual bytes for all epochs of a Hi SCI_CNT counter.
426
+
427
+ Returns
428
+ -------
429
+ output_array : numpy.ndarray[numpy.uint16]
430
+ The unpacked 12-bit unsigned integers for the input bytes. The
431
+ output array has a shape of (n, 90) where n is the number of SCI_CNT
432
+ packets in the input dataset.
433
+ """
434
+ # Interpret bytes for all epochs of current counter as uint8 array
435
+ counter_uint8 = np.frombuffer(counter_bytes, dtype=np.uint8)
436
+ # Split into triplets of upper-byte, split-byte and lower-byte arrays
437
+ upper_uint8, split_unit8, lower_uint8 = np.reshape(
438
+ counter_uint8, (3, -1), order="F"
439
+ ).astype(np.uint16)
440
+ # Compute even indexed uint12 values from upper-byte and first 4-bits of
441
+ # split-byte
442
+ even_uint12 = (upper_uint8 << 4) + (split_unit8 >> 4)
443
+ # Compute odd indexed uint12 values from lower 4-bits of split-byte and
444
+ # lower-byte
445
+ odd_uint12 = ((split_unit8 & (2**4 - 1)) << 8) + lower_uint8
446
+ output_array = np.column_stack((even_uint12, odd_uint12)).reshape(-1, 90)
447
+ return output_array
@@ -11,7 +11,7 @@ import xarray as xr
11
11
  from imap_processing import imap_module_directory
12
12
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
13
13
  from imap_processing.cdf.utils import parse_filename_like
14
- from imap_processing.hi.l1a.science_direct_event import HALF_CLOCK_TICK_S
14
+ from imap_processing.hi.hi_l1a import HALF_CLOCK_TICK_S
15
15
  from imap_processing.hi.utils import (
16
16
  HIAPID,
17
17
  CoincidenceBitmap,
@@ -14,7 +14,7 @@ from numpy._typing import NDArray
14
14
 
15
15
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
16
16
  from imap_processing.cdf.utils import parse_filename_like
17
- from imap_processing.hi.l1a.science_direct_event import (
17
+ from imap_processing.hi.hi_l1a import (
18
18
  DE_CLOCK_TICK_S,
19
19
  HALF_CLOCK_TICK_S,
20
20
  )
@@ -57,7 +57,7 @@ def hi_l1c(
57
57
  ----------
58
58
  de_dataset : xarray.Dataset
59
59
  IMAP-Hi l1b de product.
60
- calibration_prod_config_path : Path
60
+ calibration_prod_config_path : pathlib.Path
61
61
  Calibration product configuration file.
62
62
 
63
63
  Returns
@@ -84,7 +84,7 @@ def generate_pset_dataset(
84
84
  ----------
85
85
  de_dataset : xarray.Dataset
86
86
  IMAP-Hi l1b de product.
87
- calibration_prod_config_path : Path
87
+ calibration_prod_config_path : pathlib.Path
88
88
  Calibration product configuration file.
89
89
 
90
90
  Returns
@@ -306,16 +306,16 @@ def pset_counts(
306
306
 
307
307
  Parameters
308
308
  ----------
309
- pset_coords : dict[str, xr.DataArray]
310
- The PSET coordinates from the xr.Dataset.
311
- config_df : pd.DataFrame
309
+ pset_coords : dict[str, xarray.DataArray]
310
+ The PSET coordinates from the xarray.Dataset.
311
+ config_df : pandas.DataFrame
312
312
  The calibration product configuration dataframe.
313
- l1b_de_dataset : xr.Dataset
313
+ l1b_de_dataset : xarray.Dataset
314
314
  The L1B dataset for the pointing being processed.
315
315
 
316
316
  Returns
317
317
  -------
318
- dict[str, xr.DataArray]
318
+ dict[str, xarray.DataArray]
319
319
  Dictionary containing new exposure_times DataArray to be added to the PSET
320
320
  dataset.
321
321
  """
@@ -396,10 +396,10 @@ def get_tof_window_mask(
396
396
 
397
397
  Parameters
398
398
  ----------
399
- de_df : pd.DataFrame
399
+ de_df : pandas.DataFrame
400
400
  The Direct Event dataframe for the DEs to filter based on the TOF
401
401
  windows.
402
- prod_config_row : namedtuple
402
+ prod_config_row : NamedTuple
403
403
  A single row of the prod config dataframe represented as a named tuple.
404
404
  fill_vals : dict
405
405
  A dictionary containing the fill values used in the input DE TOF
@@ -438,12 +438,12 @@ def pset_backgrounds(pset_coords: dict[str, xr.DataArray]) -> dict[str, xr.DataA
438
438
 
439
439
  Parameters
440
440
  ----------
441
- pset_coords : dict[str, xr.DataArray]
442
- The PSET coordinates from the xr.Dataset.
441
+ pset_coords : dict[str, xarray.DataArray]
442
+ The PSET coordinates from the xarray.Dataset.
443
443
 
444
444
  Returns
445
445
  -------
446
- dict[str, xr.DataArray]
446
+ dict[str, xarray.DataArray]
447
447
  Dictionary containing background_rates and background_rates_unc DataArrays
448
448
  to be added to the PSET dataset.
449
449
  """
@@ -475,14 +475,14 @@ def pset_exposure(
475
475
 
476
476
  Parameters
477
477
  ----------
478
- pset_coords : dict[str, xr.DataArray]
479
- The PSET coordinates from the xr.Dataset.
480
- l1b_de_dataset : xr.Dataset
478
+ pset_coords : dict[str, xarray.DataArray]
479
+ The PSET coordinates from the xarray.Dataset.
480
+ l1b_de_dataset : xarray.Dataset
481
481
  The L1B dataset for the pointing being processed.
482
482
 
483
483
  Returns
484
484
  -------
485
- dict[str, xr.DataArray]
485
+ dict[str, xarray.DataArray]
486
486
  Dictionary containing new exposure_times DataArray to be added to the PSET
487
487
  dataset.
488
488
  """
@@ -552,12 +552,12 @@ def find_second_de_packet_data(l1b_dataset: xr.Dataset) -> xr.Dataset:
552
552
 
553
553
  Parameters
554
554
  ----------
555
- l1b_dataset : xr.Dataset
555
+ l1b_dataset : xarray.Dataset
556
556
  The L1B Direct Event Dataset for the current pointing.
557
557
 
558
558
  Returns
559
559
  -------
560
- reduced_dataset : xr.Dataset
560
+ reduced_dataset : xarray.Dataset
561
561
  A dataset containing only the entries for the second packet at an ESA step.
562
562
  """
563
563
  epoch_dataset = l1b_dataset.drop_dims("event_met")
@@ -606,7 +606,7 @@ def get_de_clock_ticks_for_esa_step(
606
606
  ----------
607
607
  ccsds_met : float
608
608
  The CCSDS MET of the second packet in a DE packet pair.
609
- spin_df : pd.DataFrame
609
+ spin_df : pandas.DataFrame
610
610
  Universal spin table dataframe.
611
611
 
612
612
  Returns
@@ -760,7 +760,7 @@ class CalibrationProductConfig:
760
760
 
761
761
  Parameters
762
762
  ----------
763
- path : Path
763
+ path : pathlib.Path
764
764
  Location of the Calibration Product configuration CSV file.
765
765
 
766
766
  Returns