imap-processing 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (124) hide show
  1. imap_processing/__init__.py +1 -1
  2. imap_processing/_version.py +2 -2
  3. imap_processing/ccsds/excel_to_xtce.py +34 -2
  4. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +1 -1
  5. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +145 -30
  6. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +36 -36
  7. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +36 -8
  8. imap_processing/cdf/config/imap_hit_l1b_variable_attrs.yaml +9 -0
  9. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +7 -7
  10. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +32 -33
  11. imap_processing/cdf/config/imap_mag_l1_variable_attrs.yaml +24 -28
  12. imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +1 -0
  13. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +133 -78
  14. imap_processing/cdf/config/imap_variable_schema.yaml +13 -0
  15. imap_processing/cdf/imap_cdf_manager.py +31 -27
  16. imap_processing/cli.py +12 -10
  17. imap_processing/codice/codice_l1a.py +151 -61
  18. imap_processing/codice/constants.py +1 -1
  19. imap_processing/codice/decompress.py +4 -9
  20. imap_processing/codice/utils.py +1 -0
  21. imap_processing/glows/l1b/glows_l1b.py +3 -3
  22. imap_processing/glows/l1b/glows_l1b_data.py +59 -37
  23. imap_processing/glows/l2/glows_l2_data.py +123 -0
  24. imap_processing/hi/l1a/histogram.py +1 -1
  25. imap_processing/hi/l1a/science_direct_event.py +1 -1
  26. imap_processing/hi/l1b/hi_l1b.py +85 -11
  27. imap_processing/hi/l1c/hi_l1c.py +23 -1
  28. imap_processing/hi/utils.py +1 -1
  29. imap_processing/hit/hit_utils.py +221 -0
  30. imap_processing/hit/l0/constants.py +118 -0
  31. imap_processing/hit/l0/decom_hit.py +186 -153
  32. imap_processing/hit/l1a/hit_l1a.py +20 -175
  33. imap_processing/hit/l1b/hit_l1b.py +33 -153
  34. imap_processing/idex/idex_l1a.py +10 -9
  35. imap_processing/lo/l0/decompression_tables/decompression_tables.py +1 -1
  36. imap_processing/lo/l0/lo_science.py +1 -1
  37. imap_processing/lo/packet_definitions/lo_xtce.xml +1 -3296
  38. imap_processing/mag/l0/decom_mag.py +4 -3
  39. imap_processing/mag/l1a/mag_l1a.py +11 -11
  40. imap_processing/mag/l1b/mag_l1b.py +89 -7
  41. imap_processing/spice/geometry.py +126 -4
  42. imap_processing/swapi/l1/swapi_l1.py +1 -1
  43. imap_processing/swapi/l2/swapi_l2.py +1 -1
  44. imap_processing/swe/l1b/swe_l1b_science.py +8 -8
  45. imap_processing/tests/ccsds/test_data/expected_output.xml +1 -0
  46. imap_processing/tests/ccsds/test_excel_to_xtce.py +4 -4
  47. imap_processing/tests/cdf/test_imap_cdf_manager.py +0 -10
  48. imap_processing/tests/codice/conftest.py +1 -17
  49. imap_processing/tests/codice/data/imap_codice_l0_raw_20241110_v001.pkts +0 -0
  50. imap_processing/tests/codice/test_codice_l0.py +8 -2
  51. imap_processing/tests/codice/test_codice_l1a.py +127 -107
  52. imap_processing/tests/codice/test_codice_l1b.py +1 -0
  53. imap_processing/tests/codice/test_decompress.py +7 -7
  54. imap_processing/tests/conftest.py +54 -15
  55. imap_processing/tests/glows/conftest.py +6 -0
  56. imap_processing/tests/glows/test_glows_l1b.py +9 -9
  57. imap_processing/tests/glows/test_glows_l1b_data.py +9 -9
  58. imap_processing/tests/glows/test_glows_l2_data.py +0 -0
  59. imap_processing/tests/hi/test_data/l1a/imap_hi_l1a_45sensor-de_20250415_v000.cdf +0 -0
  60. imap_processing/tests/hi/test_hi_l1b.py +71 -1
  61. imap_processing/tests/hi/test_hi_l1c.py +10 -2
  62. imap_processing/tests/hi/test_utils.py +4 -3
  63. imap_processing/tests/hit/{test_hit_decom.py → test_decom_hit.py} +84 -35
  64. imap_processing/tests/hit/test_hit_l1a.py +2 -197
  65. imap_processing/tests/hit/test_hit_l1b.py +156 -25
  66. imap_processing/tests/hit/test_hit_utils.py +218 -0
  67. imap_processing/tests/idex/conftest.py +1 -1
  68. imap_processing/tests/idex/imap_idex_l0_raw_20231214_v001.pkts +0 -0
  69. imap_processing/tests/idex/impact_14_tof_high_data.txt +4444 -4444
  70. imap_processing/tests/idex/test_idex_l0.py +3 -3
  71. imap_processing/tests/idex/test_idex_l1a.py +1 -1
  72. imap_processing/tests/lo/test_lo_science.py +2 -2
  73. imap_processing/tests/mag/imap_mag_l1a_norm-magi_20251017_v001.cdf +0 -0
  74. imap_processing/tests/mag/test_mag_l1b.py +59 -3
  75. imap_processing/tests/spice/test_data/imap_ena_sim_metakernel.template +3 -1
  76. imap_processing/tests/spice/test_geometry.py +84 -4
  77. imap_processing/tests/swe/conftest.py +33 -0
  78. imap_processing/tests/swe/l1_validation/swe_l0_unpacked-data_20240510_v001_VALIDATION_L1B_v3.dat +4332 -0
  79. imap_processing/tests/swe/test_swe_l1b.py +29 -8
  80. imap_processing/tests/test_utils.py +1 -1
  81. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E12.cdf +0 -0
  82. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E24.cdf +0 -0
  83. imap_processing/tests/ultra/unit/test_de.py +108 -0
  84. imap_processing/tests/ultra/unit/test_ultra_l1b.py +27 -3
  85. imap_processing/tests/ultra/unit/test_ultra_l1b_annotated.py +31 -10
  86. imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +21 -11
  87. imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +9 -44
  88. imap_processing/ultra/constants.py +8 -3
  89. imap_processing/ultra/l1b/de.py +174 -30
  90. imap_processing/ultra/l1b/ultra_l1b_annotated.py +24 -10
  91. imap_processing/ultra/l1b/ultra_l1b_extended.py +21 -14
  92. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +70 -119
  93. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/METADATA +15 -14
  94. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/RECORD +98 -113
  95. imap_processing/cdf/cdf_attribute_manager.py +0 -322
  96. imap_processing/cdf/config/shared/default_global_cdf_attrs_schema.yaml +0 -246
  97. imap_processing/cdf/config/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  98. imap_processing/hit/l0/data_classes/housekeeping.py +0 -240
  99. imap_processing/hit/l0/data_classes/science_packet.py +0 -259
  100. imap_processing/hit/l0/utils/hit_base.py +0 -57
  101. imap_processing/tests/cdf/shared/default_global_cdf_attrs_schema.yaml +0 -246
  102. imap_processing/tests/cdf/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  103. imap_processing/tests/cdf/test_cdf_attribute_manager.py +0 -353
  104. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-aggregated_20240429_v001.pkts +0 -0
  105. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-singles_20240429_v001.pkts +0 -0
  106. imap_processing/tests/codice/data/imap_codice_l0_hi-omni_20240429_v001.pkts +0 -0
  107. imap_processing/tests/codice/data/imap_codice_l0_hi-pha_20240429_v001.pkts +0 -0
  108. imap_processing/tests/codice/data/imap_codice_l0_hi-sectored_20240429_v001.pkts +0 -0
  109. imap_processing/tests/codice/data/imap_codice_l0_hskp_20100101_v001.pkts +0 -0
  110. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-aggregated_20240429_v001.pkts +0 -0
  111. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-singles_20240429_v001.pkts +0 -0
  112. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-angular_20240429_v001.pkts +0 -0
  113. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-priority_20240429_v001.pkts +0 -0
  114. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-species_20240429_v001.pkts +0 -0
  115. imap_processing/tests/codice/data/imap_codice_l0_lo-pha_20240429_v001.pkts +0 -0
  116. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-angular_20240429_v001.pkts +0 -0
  117. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-priority_20240429_v001.pkts +0 -0
  118. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-species_20240429_v001.pkts +0 -0
  119. imap_processing/tests/idex/imap_idex_l0_raw_20230725_v001.pkts +0 -0
  120. imap_processing/tests/mag/imap_mag_l1a_burst-magi_20231025_v001.cdf +0 -0
  121. /imap_processing/tests/hit/test_data/{imap_hit_l0_hk_20100105_v001.pkts → imap_hit_l0_raw_20100105_v001.pkts} +0 -0
  122. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/LICENSE +0 -0
  123. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/WHEEL +0 -0
  124. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,123 @@
1
+ """Module containing the class definition for the HistogramL2 class."""
2
+
3
+ from dataclasses import InitVar, dataclass, field
4
+
5
+ import numpy as np
6
+
7
+
8
+ @dataclass
9
+ class HistogramL2:
10
+ """
11
+ Dataclass describing Histogram L2 data variables and methods.
12
+
13
+ This class collects multiple HistogramL1B classes into one L2 per observational day.
14
+
15
+ flight_software_version : str
16
+ number_of_good_l1b_inputs : int
17
+ number of good-time Level-1B times used for generation of Level-2 data
18
+ total_l1b_inputs : int
19
+ number of all Level-1B times for observational day
20
+ identifier : int
21
+ unique Level-2 histogram identifier
22
+ start_time : numpy.double
23
+ UTC start time of a given observational day
24
+ end_time : numpy.double
25
+ UTC end time of a given observational day
26
+ daily_lightcurve : numpy.ndarray
27
+ arrays for observational-day-accumulated lightcurve
28
+ filter_temperature_average : numpy.ndarray
29
+ observational-day-averaged filter temperature [Celsius deg]
30
+ filter_temperature_variance : numpy.ndarray
31
+ standard deviation for filter temperature [Celsius deg]
32
+ hv_voltage_average : numpy.ndarray
33
+ observational-day-averaged channeltron voltage [volt]
34
+ hv_voltage_variance : numpy.ndarray
35
+ standard deviation for channeltron voltage [volt]
36
+ spin_period_average : numpy.ndarray
37
+ observational-day-averaged spin period [s] (onboard value)
38
+ spin_period_variance : numpy.ndarray
39
+ a standard deviation for spin period [s]
40
+ pulse_length_average : numpy.ndarray
41
+ observational-day-averaged pulse length [μs]
42
+ pulse_length_variance : numpy.ndarray
43
+ standard deviation for pulse length [μs]
44
+ spin_period_ground_average : numpy.ndarray
45
+ observational-day-averaged spin period [s] (ground value)
46
+ spin_period_ground_variance : numpy.ndarray
47
+ a standard deviation for spin period [s]
48
+ position_angle_offset_average : numpy.ndarray
49
+ observational-day-averaged GLOWS angular offset [deg]
50
+ position_angle_offset_variance : numpy.ndarray
51
+ standard deviation for GLOWS angular offset [seg]
52
+ spin_axis_orientation_variance : numpy.ndarray
53
+ standard deviation for spin-axis longitude and latitude [deg]
54
+ spacecraft_location_variance : numpy.ndarray
55
+ standard deviation for ecliptic coordinates [km] of IMAP
56
+ spacecraft_velocity_variance : numpy.ndarray
57
+ standard deviation for IMAP velocity components [km/s]
58
+ spin_axis_orientation_average : numpy.ndarray
59
+ observational-day-averaged spin-axis ecliptic longitude and latitude [deg]
60
+ spacecraft_location_average : numpy.ndarray
61
+ observational-day-averaged Cartesian ecliptic coordinates ⟨X⟩, ⟨Y ⟩, ⟨Z⟩ [km]
62
+ of IMAP
63
+ spacecraft_velocity_average : numpy.ndarray
64
+ observational-day-averaged values ⟨VX ⟩, ⟨VY ⟩, ⟨VZ ⟩ of IMAP velocity
65
+ components [km/s] (Cartesian ecliptic frame)
66
+ bad_time_flag_occurrences : numpy.ndarray
67
+ numbers of occurrences of blocks for each bad-time flag during observational day
68
+ """
69
+
70
+ flight_software_version: str
71
+ number_of_good_l1b_inputs: int
72
+ total_l1b_inputs: int
73
+ # identifier: int # comes from unique_block_identifier
74
+ start_time: np.double
75
+ end_time: np.double
76
+ daily_lightcurve: np.ndarray = field(init=False)
77
+ filter_temperature_average: np.ndarray[np.double]
78
+ filter_temperature_std_dev: np.ndarray[np.double]
79
+ hv_voltage_average: np.ndarray[np.double]
80
+ hv_voltage_std_dev: np.ndarray[np.double]
81
+ spin_period_average: np.ndarray[np.double]
82
+ spin_period_std_dev: np.ndarray[np.double]
83
+ pulse_length_average: np.ndarray[np.double]
84
+ pulse_length_std_dev: np.ndarray[np.double]
85
+ spin_period_ground_average: np.ndarray[np.double]
86
+ spin_period_ground_std_dev: np.ndarray[np.double]
87
+ position_angle_offset_average: np.ndarray[np.double]
88
+ position_angle_offset_std_dev: np.ndarray[np.double]
89
+ spin_axis_orientation_std_dev: np.ndarray[np.double]
90
+ spacecraft_location_std_dev: np.ndarray[np.double]
91
+ spacecraft_velocity_std_dev: np.ndarray[np.double]
92
+ spin_axis_orientation_average: np.ndarray[np.double]
93
+ spacecraft_location_average: np.ndarray[np.double]
94
+ spacecraft_velocity_average: np.ndarray[np.double]
95
+ bad_time_flag_occurrences: np.ndarray
96
+ histogram: InitVar[np.ndarray]
97
+
98
+ def __post_init__(self, histogram: np.ndarray) -> None:
99
+ """
100
+ Post-initialization method to generate the daily light curve from one histogram.
101
+
102
+ Parameters
103
+ ----------
104
+ histogram : numpy.ndarray
105
+ Histogram data from L1B, of shape (bins,) where bins is nominally 3600.
106
+ """
107
+ self.daily_lightcurve = self.generate_lightcurve(histogram)
108
+
109
+ def generate_lightcurve(self, histogram: np.ndarray) -> np.ndarray:
110
+ """
111
+ Given an array of (n, bins) histograms, generate one lightcurve of size (bins).
112
+
113
+ Parameters
114
+ ----------
115
+ histogram : numpy.ndarray
116
+ Histogram data from L1B, of shape (bins,) where bins is nominally 3600.
117
+
118
+ Returns
119
+ -------
120
+ numpy.ndarray
121
+ Lightcurve of size (bins).
122
+ """
123
+ return np.zeros(3600) # type: ignore[no-any-return]
@@ -94,7 +94,7 @@ def allocate_histogram_dataset(num_packets: int) -> xr.Dataset:
94
94
  """
95
95
  attr_mgr = ImapCdfAttributes()
96
96
  attr_mgr.add_instrument_global_attrs(instrument="hi")
97
- attr_mgr.load_variable_attributes("imap_hi_variable_attrs.yaml")
97
+ attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
98
98
  # preallocate the xr.DataArrays for all CDF attributes based on number of packets
99
99
  coords = dict()
100
100
  coords["epoch"] = xr.DataArray(
@@ -263,7 +263,7 @@ def create_dataset(de_data_list: list, packet_met_time: list) -> xr.Dataset:
263
263
  # Load the CDF attributes
264
264
  attr_mgr = ImapCdfAttributes()
265
265
  attr_mgr.add_instrument_global_attrs("hi")
266
- attr_mgr.load_variable_attributes("imap_hi_variable_attrs.yaml")
266
+ attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
267
267
  # uncomment this once Maxine's PR is merged
268
268
  # attr_mgr.add_global_attribute("Data_version", data_version)
269
269
 
@@ -15,7 +15,12 @@ from imap_processing.hi.utils import (
15
15
  create_dataset_variables,
16
16
  parse_sensor_number,
17
17
  )
18
- from imap_processing.spice.geometry import SpiceFrame, instrument_pointing
18
+ from imap_processing.spice.geometry import (
19
+ SpiceFrame,
20
+ get_instrument_spin_phase,
21
+ get_spacecraft_spin_phase,
22
+ instrument_pointing,
23
+ )
19
24
  from imap_processing.spice.time import j2000ns_to_j2000s
20
25
  from imap_processing.utils import convert_raw_to_eu
21
26
 
@@ -40,7 +45,7 @@ class CoincidenceBitmap(IntEnum):
40
45
  logger = logging.getLogger(__name__)
41
46
  ATTR_MGR = ImapCdfAttributes()
42
47
  ATTR_MGR.add_instrument_global_attrs("hi")
43
- ATTR_MGR.load_variable_attributes("imap_hi_variable_attrs.yaml")
48
+ ATTR_MGR.add_instrument_variable_attrs(instrument="hi", level=None)
44
49
 
45
50
 
46
51
  def hi_l1b(l1a_dataset: xr.Dataset, data_version: str) -> xr.Dataset:
@@ -116,17 +121,16 @@ def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset:
116
121
  """
117
122
  l1b_dataset = l1a_dataset.copy()
118
123
  l1b_dataset.update(compute_coincidence_type_and_time_deltas(l1b_dataset))
124
+ l1b_dataset.update(de_nominal_bin_and_spin_phase(l1b_dataset))
119
125
  l1b_dataset.update(compute_hae_coordinates(l1b_dataset))
120
- l1b_de_var_names = [
121
- "esa_energy_step",
122
- "spin_phase",
123
- "quality_flag",
124
- "nominal_bin",
125
- ]
126
- new_data_vars = create_dataset_variables(
127
- l1b_de_var_names, l1b_dataset["epoch"].size, att_manager_lookup_str="hi_de_{0}"
126
+ l1b_dataset.update(de_esa_energy_step(l1b_dataset))
127
+ l1b_dataset.update(
128
+ create_dataset_variables(
129
+ ["quality_flag"],
130
+ l1b_dataset["epoch"].size,
131
+ att_manager_lookup_str="hi_de_{0}",
132
+ )
128
133
  )
129
- l1b_dataset.update(new_data_vars)
130
134
  l1b_dataset = l1b_dataset.drop_vars(
131
135
  ["tof_1", "tof_2", "tof_3", "de_tag", "ccsds_met", "meta_event_met"]
132
136
  )
@@ -258,6 +262,47 @@ def compute_coincidence_type_and_time_deltas(
258
262
  return new_vars
259
263
 
260
264
 
265
+ def de_nominal_bin_and_spin_phase(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
266
+ """
267
+ Compute nominal bin and instrument spin-phase for each direct event.
268
+
269
+ Parameters
270
+ ----------
271
+ dataset : xarray.Dataset
272
+ Direct event data to compute instrument spin-phase for.
273
+
274
+ Returns
275
+ -------
276
+ new_vars : dict[str, xarray.DataArray]
277
+ Dictionary containing new "spin_phase" variable.
278
+ """
279
+ new_vars = create_dataset_variables(
280
+ [
281
+ "spin_phase",
282
+ "nominal_bin",
283
+ ],
284
+ len(dataset.epoch),
285
+ att_manager_lookup_str="hi_de_{0}",
286
+ )
287
+
288
+ # nominal_bin is the index number of the 90 4-degree bins that each DE would
289
+ # be binned into in the histogram packet. The Hi histogram data is binned by
290
+ # spacecraft spin-phase, not instrument spin-phase, so the same is done here.
291
+ met_query_times = j2000ns_to_j2000s(dataset.event_met.values)
292
+ imap_spin_phase = get_spacecraft_spin_phase(met_query_times)
293
+ new_vars["nominal_bin"].values = np.asarray(imap_spin_phase * 360 / 4).astype(
294
+ np.uint8
295
+ )
296
+
297
+ sensor_number = parse_sensor_number(dataset.attrs["Logical_source"])
298
+ new_vars["spin_phase"].values = np.asarray(
299
+ get_instrument_spin_phase(
300
+ met_query_times, SpiceFrame[f"IMAP_HI_{sensor_number}"]
301
+ )
302
+ ).astype(np.float32)
303
+ return new_vars
304
+
305
+
261
306
  def compute_hae_coordinates(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
262
307
  """
263
308
  Compute HAE latitude and longitude.
@@ -296,3 +341,32 @@ def compute_hae_coordinates(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
296
341
  new_vars["hae_longitude"].values = pointing_coordinates[:, 1]
297
342
 
298
343
  return new_vars
344
+
345
+
346
+ def de_esa_energy_step(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
347
+ """
348
+ Compute esa_energy_step for each direct event.
349
+
350
+ TODO: For now this function just returns the esa_step from the input dataset.
351
+ Eventually, it will take L1B housekeeping data and determine the esa
352
+ energy steps from that data.
353
+
354
+ Parameters
355
+ ----------
356
+ dataset : xarray.Dataset
357
+ The partial L1B dataset.
358
+
359
+ Returns
360
+ -------
361
+ new_vars : dict[str, xarray.DataArray]
362
+ Keys are variable names and values are `xarray.DataArray`.
363
+ """
364
+ new_vars = create_dataset_variables(
365
+ ["esa_energy_step"],
366
+ len(dataset.epoch),
367
+ att_manager_lookup_str="hi_de_{0}",
368
+ )
369
+ # TODO: Implement this algorithm
370
+ new_vars["esa_energy_step"].values = dataset.esa_step.values
371
+
372
+ return new_vars
@@ -94,7 +94,7 @@ def allocate_pset_dataset(n_esa_steps: int, sensor_str: str) -> xr.Dataset:
94
94
  """
95
95
  attr_mgr = ImapCdfAttributes()
96
96
  attr_mgr.add_instrument_global_attrs("hi")
97
- attr_mgr.load_variable_attributes("imap_hi_variable_attrs.yaml")
97
+ attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
98
98
 
99
99
  # preallocate coordinates xr.DataArrays
100
100
  coords = dict()
@@ -115,6 +115,20 @@ def allocate_pset_dataset(n_esa_steps: int, sensor_str: str) -> xr.Dataset:
115
115
  dims=["esa_energy_step"],
116
116
  attrs=attrs,
117
117
  )
118
+ # TODO: define calibration product number to coincidence type mapping and
119
+ # use the number of calibration products here. I believe it will be 5
120
+ # 0 for any, 1-4, for the number of detector hits.
121
+ n_calibration_prod = 5
122
+ attrs = attr_mgr.get_variable_attributes(
123
+ "hi_pset_calibration_prod", check_schema=False
124
+ ).copy()
125
+ dtype = attrs.pop("dtype")
126
+ coords["calibration_prod"] = xr.DataArray(
127
+ np.arange(n_calibration_prod, dtype=dtype),
128
+ name="calibration_prod",
129
+ dims=["calibration_prod"],
130
+ attrs=attrs,
131
+ )
118
132
  # spin angle bins are 0.1 degree bins for full 360 degree spin
119
133
  attrs = attr_mgr.get_variable_attributes(
120
134
  "hi_pset_spin_angle_bin", check_schema=False
@@ -157,6 +171,14 @@ def allocate_pset_dataset(n_esa_steps: int, sensor_str: str) -> xr.Dataset:
157
171
  "hi_pset_esa_energy_step_label", check_schema=False
158
172
  ),
159
173
  )
174
+ data_vars["calibration_prod_label"] = xr.DataArray(
175
+ coords["calibration_prod"].values.astype(str),
176
+ name="calibration_prod_label",
177
+ dims=["calibration_prod"],
178
+ attrs=attr_mgr.get_variable_attributes(
179
+ "hi_pset_calibration_prod_label", check_schema=False
180
+ ),
181
+ )
160
182
  data_vars["spin_bin_label"] = xr.DataArray(
161
183
  coords["spin_angle_bin"].values.astype(str),
162
184
  name="spin_bin_label",
@@ -174,7 +174,7 @@ def create_dataset_variables(
174
174
  """
175
175
  attr_mgr = ImapCdfAttributes()
176
176
  attr_mgr.add_instrument_global_attrs("hi")
177
- attr_mgr.load_variable_attributes("imap_hi_variable_attrs.yaml")
177
+ attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
178
178
 
179
179
  new_variables = dict()
180
180
  for var in variable_names:
@@ -0,0 +1,221 @@
1
+ """
2
+ Classes and functions used in HIT processing.
3
+
4
+ This module contains utility classes and functions that are used by
5
+ HIT processing modules.
6
+ """
7
+
8
+ from enum import IntEnum
9
+
10
+ import numpy as np
11
+ import xarray as xr
12
+
13
+ from imap_processing import imap_module_directory
14
+ from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
15
+ from imap_processing.utils import packet_file_to_datasets
16
+
17
+
18
+ class HitAPID(IntEnum):
19
+ """
20
+ HIT APID Mappings.
21
+
22
+ Attributes
23
+ ----------
24
+ HIT_HSKP: int
25
+ Housekeeping
26
+ HIT_SCIENCE : int
27
+ Science
28
+ HIT_IALRT : int
29
+ I-ALiRT
30
+ """
31
+
32
+ HIT_HSKP = 1251
33
+ HIT_SCIENCE = 1252
34
+ HIT_IALRT = 1253
35
+
36
+
37
+ def get_datasets_by_apid(
38
+ packet_file: str, derived: bool = False
39
+ ) -> dict[int, xr.Dataset]:
40
+ """
41
+ Get datasets by APID from a CCSDS packet file.
42
+
43
+ Parameters
44
+ ----------
45
+ packet_file : str
46
+ Path to the CCSDS data packet file.
47
+ derived : bool, optional
48
+ Flag to use derived values, by default False.
49
+ Only set to True to get engineering units for L1B
50
+ housekeeping data product.
51
+
52
+ Returns
53
+ -------
54
+ datasets_by_apid : dict[int, xr.Dataset]
55
+ Dictionary of xarray datasets by APID.
56
+ """
57
+ # Unpack ccsds file
58
+ packet_definition = (
59
+ imap_module_directory / "hit/packet_definitions/hit_packet_definitions.xml"
60
+ )
61
+ datasets_by_apid: dict[int, xr.Dataset] = packet_file_to_datasets(
62
+ packet_file=packet_file,
63
+ xtce_packet_definition=packet_definition,
64
+ use_derived_value=derived,
65
+ )
66
+ return datasets_by_apid
67
+
68
+
69
+ def get_attribute_manager(data_version: str, level: str) -> ImapCdfAttributes:
70
+ """
71
+ Create an attribute manager for the HIT data products.
72
+
73
+ Parameters
74
+ ----------
75
+ data_version : str
76
+ Version of the data product being created.
77
+ level : str
78
+ Data level of the product being created.
79
+
80
+ Returns
81
+ -------
82
+ attr_mgr : ImapCdfAttributes
83
+ Attribute manager to set CDF attributes.
84
+ """
85
+ # Create the attribute manager for this data level
86
+ attr_mgr = ImapCdfAttributes()
87
+ attr_mgr.add_instrument_global_attrs(instrument="hit")
88
+ attr_mgr.add_instrument_variable_attrs(instrument="hit", level=level)
89
+ attr_mgr.add_global_attribute("Data_version", data_version)
90
+ return attr_mgr
91
+
92
+
93
+ def concatenate_leak_variables(
94
+ dataset: xr.Dataset, adc_channels: xr.DataArray
95
+ ) -> xr.Dataset:
96
+ """
97
+ Concatenate leak variables in the dataset.
98
+
99
+ Updates the housekeeping dataset to replace the individual
100
+ leak_i_00, leak_i_01, ..., leak_i_63 variables with a single
101
+ leak_i variable as a 2D array. "i" here represents current
102
+ in the leakage current [Voltage] data.
103
+
104
+ Parameters
105
+ ----------
106
+ dataset : xarray.Dataset
107
+ Dataset containing 64 leak variables.
108
+ adc_channels : xarray.DataArray
109
+ DataArray to be used as a dimension for the concatenated leak variables.
110
+
111
+ Returns
112
+ -------
113
+ dataset : xarray.Dataset
114
+ Updated dataset with concatenated leak variables.
115
+ """
116
+ # Stack 64 leak variables (leak_00, leak_01, ..., leak_63)
117
+ leak_vars = [dataset[f"leak_i_{i:02d}"] for i in range(64)]
118
+
119
+ # Concatenate along 'adc_channels' and reorder dimensions
120
+ stacked_leaks = xr.concat(leak_vars, dim=adc_channels).transpose(
121
+ "epoch", "adc_channels"
122
+ )
123
+ dataset["leak_i"] = stacked_leaks
124
+
125
+ # Drop the individual leak variables
126
+ updated_dataset = dataset.drop_vars([f"leak_i_{i:02d}" for i in range(64)])
127
+
128
+ return updated_dataset
129
+
130
+
131
+ def process_housekeeping_data(
132
+ dataset: xr.Dataset, attr_mgr: ImapCdfAttributes, logical_source: str
133
+ ) -> xr.Dataset:
134
+ """
135
+ Will process housekeeping dataset for CDF product.
136
+
137
+ Updates the housekeeping dataset with a single 2D leak_i
138
+ variable. Also updates the dataset attributes, coordinates
139
+ and data variable dimensions according to specifications in
140
+ a cdf yaml file. This function is used for both L1A and L1B
141
+ housekeeping data products.
142
+
143
+ Parameters
144
+ ----------
145
+ dataset : xarray.Dataset
146
+ Dataset containing HIT housekeeping data.
147
+
148
+ attr_mgr : ImapCdfAttributes
149
+ Attribute manager used to get the data product field's attributes.
150
+
151
+ logical_source : str
152
+ Logical source of the data -> imap_hit_l1a_hk or imap_hit_l1b_hk.
153
+
154
+ Returns
155
+ -------
156
+ dataset : xarray.Dataset
157
+ An updated dataset ready for CDF conversion.
158
+ """
159
+ # Drop keys that are not CDF data variables
160
+ drop_keys = [
161
+ "pkt_apid",
162
+ "sc_tick",
163
+ "version",
164
+ "type",
165
+ "sec_hdr_flg",
166
+ "seq_flgs",
167
+ "src_seq_ctr",
168
+ "pkt_len",
169
+ "hskp_spare1",
170
+ "hskp_spare2",
171
+ "hskp_spare3",
172
+ "hskp_spare4",
173
+ "hskp_spare5",
174
+ ]
175
+
176
+ # Drop variables not needed for CDF
177
+ dataset = dataset.drop_vars(drop_keys)
178
+
179
+ # Create data arrays for dependencies
180
+ adc_channels = xr.DataArray(
181
+ np.arange(64, dtype=np.uint8),
182
+ name="adc_channels",
183
+ dims=["adc_channels"],
184
+ attrs=attr_mgr.get_variable_attributes("adc_channels"),
185
+ )
186
+
187
+ # NOTE: LABL_PTR_1 should be CDF_CHAR.
188
+ adc_channels_label = xr.DataArray(
189
+ adc_channels.values.astype(str),
190
+ name="adc_channels_label",
191
+ dims=["adc_channels_label"],
192
+ attrs=attr_mgr.get_variable_attributes("adc_channels_label"),
193
+ )
194
+
195
+ # Update dataset coordinates and attributes
196
+ dataset = dataset.assign_coords(
197
+ {
198
+ "adc_channels": adc_channels,
199
+ "adc_channels_label": adc_channels_label,
200
+ }
201
+ )
202
+ dataset.attrs = attr_mgr.get_global_attributes(logical_source)
203
+
204
+ # Stack 64 leak variables (leak_00, leak_01, ..., leak_63)
205
+ dataset = concatenate_leak_variables(dataset, adc_channels)
206
+
207
+ # Assign attributes and dimensions to each data array in the Dataset
208
+ for field in dataset.data_vars.keys():
209
+ # Create a dict of dimensions using the DEPEND_I keys in the
210
+ # attributes
211
+ dims = {
212
+ key: value
213
+ for key, value in attr_mgr.get_variable_attributes(field).items()
214
+ if "DEPEND" in key
215
+ }
216
+ dataset[field].attrs = attr_mgr.get_variable_attributes(field)
217
+ dataset[field].assign_coords(dims)
218
+
219
+ dataset.epoch.attrs = attr_mgr.get_variable_attributes("epoch")
220
+
221
+ return dataset
@@ -0,0 +1,118 @@
1
+ """HIT L0 constants for data decommutation."""
2
+
3
+ from collections import namedtuple
4
+
5
+ import numpy as np
6
+
7
+ # energy_units: MeV/n
8
+ MOD_10_MAPPING = {
9
+ 0: {"species": "H", "energy_min": 1.8, "energy_max": 3.6},
10
+ 1: {"species": "H", "energy_min": 4, "energy_max": 6},
11
+ 2: {"species": "H", "energy_min": 6, "energy_max": 10},
12
+ 3: {"species": "4He", "energy_min": 4, "energy_max": 6},
13
+ 4: {"species": "4He", "energy_min": 6, "energy_max": 12},
14
+ 5: {"species": "CNO", "energy_min": 4, "energy_max": 6},
15
+ 6: {"species": "CNO", "energy_min": 6, "energy_max": 12},
16
+ 7: {"species": "NeMgSi", "energy_min": 4, "energy_max": 6},
17
+ 8: {"species": "NeMgSi", "energy_min": 6, "energy_max": 12},
18
+ 9: {"species": "Fe", "energy_min": 4, "energy_max": 12},
19
+ }
20
+
21
+ # Structure to hold binary details for a
22
+ # section of science data. Used to unpack
23
+ # binary data.
24
+ HITPacking = namedtuple(
25
+ "HITPacking",
26
+ [
27
+ "bit_length",
28
+ "section_length",
29
+ "shape",
30
+ ],
31
+ )
32
+
33
+ # Define data structure for counts rates data
34
+ COUNTS_DATA_STRUCTURE = {
35
+ # field: bit_length, section_length, shape
36
+ # ------------------------------------------
37
+ # science frame header
38
+ "hdr_unit_num": HITPacking(2, 2, (1,)),
39
+ "hdr_frame_version": HITPacking(6, 6, (1,)),
40
+ "hdr_dynamic_threshold_state": HITPacking(2, 2, (1,)),
41
+ "hdr_leak_conv": HITPacking(1, 1, (1,)),
42
+ "hdr_heater_duty_cycle": HITPacking(4, 4, (1,)),
43
+ "hdr_code_ok": HITPacking(1, 1, (1,)),
44
+ "hdr_minute_cnt": HITPacking(8, 8, (1,)),
45
+ # ------------------------------------------
46
+ # spare bits. Contains no data
47
+ "spare": HITPacking(24, 24, (1,)),
48
+ # ------------------------------------------
49
+ # erates - contains livetime counters
50
+ "livetime": HITPacking(16, 16, (1,)), # livetime counter
51
+ "num_trig": HITPacking(16, 16, (1,)), # number of triggers
52
+ "num_reject": HITPacking(16, 16, (1,)), # number of rejected events
53
+ "num_acc_w_pha": HITPacking(
54
+ 16, 16, (1,)
55
+ ), # number of accepted events with PHA data
56
+ "num_acc_no_pha": HITPacking(16, 16, (1,)), # number of events without PHA data
57
+ "num_haz_trig": HITPacking(16, 16, (1,)), # number of triggers with hazard flag
58
+ "num_haz_reject": HITPacking(
59
+ 16, 16, (1,)
60
+ ), # number of rejected events with hazard flag
61
+ "num_haz_acc_w_pha": HITPacking(
62
+ 16, 16, (1,)
63
+ ), # number of accepted hazard events with PHA data
64
+ "num_haz_acc_no_pha": HITPacking(
65
+ 16, 16, (1,)
66
+ ), # number of hazard events without PHA data
67
+ # -------------------------------------------
68
+ "sngrates": HITPacking(16, 1856, (2, 58)), # single rates
69
+ # -------------------------------------------
70
+ # evprates - contains event processing rates
71
+ "nread": HITPacking(16, 16, (1,)), # events read from event fifo
72
+ "nhazard": HITPacking(16, 16, (1,)), # events tagged with hazard flag
73
+ "nadcstim": HITPacking(16, 16, (1,)), # adc-stim events
74
+ "nodd": HITPacking(16, 16, (1,)), # odd events
75
+ "noddfix": HITPacking(16, 16, (1,)), # odd events that were fixed in sw
76
+ "nmulti": HITPacking(
77
+ 16, 16, (1,)
78
+ ), # events with multiple hits in a single detector
79
+ "nmultifix": HITPacking(16, 16, (1,)), # multi events that were fixed in sw
80
+ "nbadtraj": HITPacking(16, 16, (1,)), # bad trajectory
81
+ "nl2": HITPacking(16, 16, (1,)), # events sorted into L12 event category
82
+ "nl3": HITPacking(16, 16, (1,)), # events sorted into L123 event category
83
+ "nl4": HITPacking(16, 16, (1,)), # events sorted into L1423 event category
84
+ "npen": HITPacking(16, 16, (1,)), # events sorted into penetrating event category
85
+ "nformat": HITPacking(16, 16, (1,)), # nothing currently goes in this slot
86
+ "naside": HITPacking(16, 16, (1,)), # A-side events
87
+ "nbside": HITPacking(16, 16, (1,)), # B-side events
88
+ "nerror": HITPacking(16, 16, (1,)), # events that caused a processing error
89
+ "nbadtags": HITPacking(
90
+ 16, 16, (1,)
91
+ ), # events with inconsistent tags vs pulse heights
92
+ # -------------------------------------------
93
+ # other count rates
94
+ "coinrates": HITPacking(16, 416, (26,)), # coincidence rates
95
+ "bufrates": HITPacking(16, 512, (32,)), # priority buffer rates
96
+ "l2fgrates": HITPacking(16, 2112, (132,)), # range 2 foreground rates
97
+ "l2bgrates": HITPacking(16, 192, (12,)), # range 2 background rates
98
+ "l3fgrates": HITPacking(16, 2672, (167,)), # range 3 foreground rates
99
+ "l3bgrates": HITPacking(16, 192, (12,)), # range 3 background rates
100
+ "penfgrates": HITPacking(16, 528, (33,)), # range 4 foreground rates
101
+ "penbgrates": HITPacking(16, 240, (15,)), # range 4 background rates
102
+ "ialirtrates": HITPacking(16, 320, (20,)), # ialirt rates
103
+ "sectorates": HITPacking(16, 1920, (8, 15)), # sectored rates
104
+ "l4fgrates": HITPacking(16, 768, (48,)), # all range foreground rates
105
+ "l4bgrates": HITPacking(16, 384, (24,)), # all range foreground rates
106
+ }
107
+
108
+
109
+ # Define the pattern of grouping flags in a complete science frame.
110
+ FLAG_PATTERN = np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2])
111
+
112
+ # Define size of science frame (num of packets)
113
+ FRAME_SIZE = len(FLAG_PATTERN)
114
+
115
+ # Define the number of bits in the mantissa and exponent for
116
+ # decompressing data
117
+ MANTISSA_BITS = 12
118
+ EXPONENT_BITS = 4