imap-processing 0.16.1__py3-none-any.whl → 0.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (46) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +24 -0
  3. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +24 -0
  4. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +8 -8
  5. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +1 -1
  6. imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +394 -411
  7. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +9 -9
  8. imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +150 -57
  9. imap_processing/cdf/config/imap_swapi_variable_attrs.yaml +19 -0
  10. imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +20 -0
  11. imap_processing/cdf/config/imap_swe_l2_variable_attrs.yaml +39 -0
  12. imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +108 -0
  13. imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +103 -2
  14. imap_processing/cdf/utils.py +7 -1
  15. imap_processing/cli.py +14 -8
  16. imap_processing/codice/codice_l1a.py +89 -30
  17. imap_processing/hi/hi_l1a.py +4 -4
  18. imap_processing/hi/hi_l1b.py +2 -2
  19. imap_processing/hi/packet_definitions/TLM_HI_COMBINED_SCI.xml +218 -38
  20. imap_processing/hit/hit_utils.py +2 -2
  21. imap_processing/hit/l0/decom_hit.py +2 -1
  22. imap_processing/hit/l2/hit_l2.py +2 -1
  23. imap_processing/ialirt/l0/process_codice.py +4 -34
  24. imap_processing/idex/idex_constants.py +7 -0
  25. imap_processing/idex/idex_l2b.py +372 -55
  26. imap_processing/lo/l0/lo_star_sensor.py +48 -0
  27. imap_processing/lo/l1a/lo_l1a.py +32 -32
  28. imap_processing/mag/l0/decom_mag.py +9 -6
  29. imap_processing/mag/l0/mag_l0_data.py +46 -0
  30. imap_processing/swapi/l1/swapi_l1.py +12 -2
  31. imap_processing/swapi/l2/swapi_l2.py +7 -6
  32. imap_processing/swe/l1b/swe_l1b.py +9 -0
  33. imap_processing/swe/l2/swe_l2.py +111 -17
  34. imap_processing/ultra/l0/decom_tools.py +13 -6
  35. imap_processing/ultra/l0/decom_ultra.py +190 -4
  36. imap_processing/ultra/l0/ultra_utils.py +184 -3
  37. imap_processing/ultra/l1a/ultra_l1a.py +52 -4
  38. imap_processing/ultra/packet_definitions/ULTRA_SCI_COMBINED.xml +3 -3
  39. imap_processing/utils.py +20 -42
  40. {imap_processing-0.16.1.dist-info → imap_processing-0.17.0.dist-info}/METADATA +1 -1
  41. {imap_processing-0.16.1.dist-info → imap_processing-0.17.0.dist-info}/RECORD +44 -45
  42. imap_processing/lo/l0/data_classes/star_sensor.py +0 -98
  43. imap_processing/lo/l0/utils/lo_base.py +0 -57
  44. {imap_processing-0.16.1.dist-info → imap_processing-0.17.0.dist-info}/LICENSE +0 -0
  45. {imap_processing-0.16.1.dist-info → imap_processing-0.17.0.dist-info}/WHEEL +0 -0
  46. {imap_processing-0.16.1.dist-info → imap_processing-0.17.0.dist-info}/entry_points.txt +0 -0
@@ -15,6 +15,7 @@ from imap_processing.lo.l0.lo_science import (
15
15
  parse_events,
16
16
  parse_histogram,
17
17
  )
18
+ from imap_processing.lo.l0.lo_star_sensor import process_star_sensor
18
19
  from imap_processing.utils import convert_to_binary_string, packet_file_to_datasets
19
20
 
20
21
  logger = logging.getLogger(__name__)
@@ -50,60 +51,59 @@ def lo_l1a(dependency: Path) -> list[xr.Dataset]:
50
51
  attr_mgr.add_instrument_global_attrs(instrument="lo")
51
52
  attr_mgr.add_instrument_variable_attrs(instrument="lo", level="l1a")
52
53
 
54
+ datasets_to_return = []
55
+
53
56
  if LoAPID.ILO_SPIN in datasets_by_apid:
54
57
  logger.info(
55
58
  f"\nProcessing {LoAPID(LoAPID.ILO_SPIN).name} "
56
59
  f"packet (APID: {LoAPID.ILO_SPIN.value})"
57
60
  )
58
61
  logical_source = "imap_lo_l1a_spin"
59
- datasets_by_apid[LoAPID.ILO_SPIN] = organize_spin_data(
60
- datasets_by_apid[LoAPID.ILO_SPIN], attr_mgr
61
- )
62
-
63
- datasets_by_apid[LoAPID.ILO_SPIN] = add_dataset_attrs(
64
- datasets_by_apid[LoAPID.ILO_SPIN], attr_mgr, logical_source
65
- )
62
+ ds = datasets_by_apid[LoAPID.ILO_SPIN]
63
+ ds = organize_spin_data(ds, attr_mgr)
64
+ ds = add_dataset_attrs(ds, attr_mgr, logical_source)
65
+ datasets_to_return.append(ds)
66
66
  if LoAPID.ILO_SCI_CNT in datasets_by_apid:
67
67
  logger.info(
68
68
  f"\nProcessing {LoAPID(LoAPID.ILO_SCI_CNT).name} "
69
69
  f"packet (APID: {LoAPID.ILO_SCI_CNT.value})"
70
70
  )
71
71
  logical_source = "imap_lo_l1a_histogram"
72
- datasets_by_apid[LoAPID.ILO_SCI_CNT] = parse_histogram(
73
- datasets_by_apid[LoAPID.ILO_SCI_CNT], attr_mgr
74
- )
75
- datasets_by_apid[LoAPID.ILO_SCI_CNT] = add_dataset_attrs(
76
- datasets_by_apid[LoAPID.ILO_SCI_CNT], attr_mgr, logical_source
77
- )
72
+ ds = datasets_by_apid[LoAPID.ILO_SCI_CNT]
73
+ ds = parse_histogram(ds, attr_mgr)
74
+ ds = add_dataset_attrs(ds, attr_mgr, logical_source)
75
+ datasets_to_return.append(ds)
78
76
  if LoAPID.ILO_SCI_DE in datasets_by_apid:
79
77
  logger.info(
80
78
  f"\nProcessing {LoAPID(LoAPID.ILO_SCI_DE).name} "
81
79
  f"packet (APID: {LoAPID.ILO_SCI_DE.value})"
82
80
  )
83
81
  logical_source = "imap_lo_l1a_de"
84
- datasets_by_apid[LoAPID.ILO_SCI_DE]["data"] = xr.DataArray(
85
- [
86
- convert_to_binary_string(data)
87
- for data in datasets_by_apid[LoAPID.ILO_SCI_DE]["data"].values
88
- ],
89
- dims=datasets_by_apid[LoAPID.ILO_SCI_DE]["data"].dims,
90
- attrs=datasets_by_apid[LoAPID.ILO_SCI_DE]["data"].attrs,
91
- )
92
-
93
- datasets_by_apid[LoAPID.ILO_SCI_DE] = combine_segmented_packets(
94
- datasets_by_apid[LoAPID.ILO_SCI_DE]
82
+ ds = datasets_by_apid[LoAPID.ILO_SCI_DE]
83
+ # Process the "data" array into a string
84
+ ds["data"] = xr.DataArray(
85
+ [convert_to_binary_string(data) for data in ds["data"].values],
86
+ dims=ds["data"].dims,
87
+ attrs=ds["data"].attrs,
95
88
  )
96
89
 
97
- datasets_by_apid[LoAPID.ILO_SCI_DE] = parse_events(
98
- datasets_by_apid[LoAPID.ILO_SCI_DE], attr_mgr
99
- )
100
- datasets_by_apid[LoAPID.ILO_SCI_DE] = add_dataset_attrs(
101
- datasets_by_apid[LoAPID.ILO_SCI_DE], attr_mgr, logical_source
90
+ ds = combine_segmented_packets(ds)
91
+ ds = parse_events(ds, attr_mgr)
92
+ ds = add_dataset_attrs(ds, attr_mgr, logical_source)
93
+ datasets_to_return.append(ds)
94
+ if LoAPID.ILO_STAR in datasets_by_apid:
95
+ logger.info(
96
+ f"\nProcessing {LoAPID(LoAPID.ILO_STAR).name} "
97
+ f"packet (APID: {LoAPID.ILO_STAR.value})"
102
98
  )
99
+ logical_source = "imap_lo_l1a_star"
100
+ ds = datasets_by_apid[LoAPID.ILO_STAR]
101
+ ds = process_star_sensor(ds)
102
+ ds = add_dataset_attrs(ds, attr_mgr, logical_source)
103
+ datasets_to_return.append(ds)
103
104
 
104
- good_apids = [LoAPID.ILO_SPIN, LoAPID.ILO_SCI_CNT, LoAPID.ILO_SCI_DE]
105
- logger.info(f"\nReturning datasets: {[LoAPID(apid) for apid in good_apids]}")
106
- return [datasets_by_apid[good_apid] for good_apid in good_apids]
105
+ logger.info(f"Returning [{len(datasets_to_return)}] datasets")
106
+ return datasets_to_return
107
107
 
108
108
 
109
109
  def add_dataset_attrs(
@@ -43,8 +43,9 @@ def decom_packets(packet_file_path: str | Path) -> dict[str, list[MagL0]]:
43
43
 
44
44
  packet_definition = definitions.XtcePacketDefinition(xtce_document)
45
45
 
46
- norm_data = []
47
- burst_data = []
46
+ # Store in a dict for de-duplication. Only the keys are returned as a list.
47
+ norm_dict: dict[MagL0, None] = {}
48
+ burst_dict: dict[MagL0, None] = {}
48
49
 
49
50
  with open(packet_file_path, "rb") as binary_data:
50
51
  mag_packets = packet_definition.packet_generator(binary_data)
@@ -53,12 +54,14 @@ def decom_packets(packet_file_path: str | Path) -> dict[str, list[MagL0]]:
53
54
  apid = packet["PKT_APID"]
54
55
  if apid in (Mode.BURST, Mode.NORMAL):
55
56
  values = [item.raw_value for item in packet.user_data.values()]
57
+ mag_l0 = MagL0(CcsdsData(packet.header), *values)
56
58
  if apid == Mode.NORMAL:
57
- norm_data.append(MagL0(CcsdsData(packet.header), *values))
58
- else:
59
- burst_data.append(MagL0(CcsdsData(packet.header), *values))
59
+ if mag_l0 not in norm_dict:
60
+ norm_dict[mag_l0] = None
61
+ elif mag_l0 not in burst_dict:
62
+ burst_dict[mag_l0] = None
60
63
 
61
- return {"norm": norm_data, "burst": burst_data}
64
+ return {"norm": list(norm_dict.keys()), "burst": list(burst_dict.keys())}
62
65
 
63
66
 
64
67
  def generate_dataset(
@@ -31,6 +31,9 @@ class MagL0:
31
31
  """
32
32
  Data class for MAG Level 0 data.
33
33
 
34
+ No attributes should be updated after creation. This class acts as a snapshot of the
35
+ packet data, and is read-only.
36
+
34
37
  Attributes
35
38
  ----------
36
39
  ccsds_header: CcsdsData
@@ -116,3 +119,46 @@ class MagL0:
116
119
 
117
120
  self.PRI_VECSEC = 2**self.PRI_VECSEC
118
121
  self.SEC_VECSEC = 2**self.SEC_VECSEC
122
+
123
+ def __eq__(self, other: object) -> bool:
124
+ """
125
+ Compare two MagL0 objects for equality.
126
+
127
+ Two objects are said to be equal if the SHCOARSE, APID, and SRC_SEQ_CTR are
128
+ equal.
129
+
130
+ Parameters
131
+ ----------
132
+ other : object
133
+ The other MagL0 object to compare against.
134
+
135
+ Returns
136
+ -------
137
+ bool
138
+ True if the objects are equal, False otherwise.
139
+ """
140
+ if not isinstance(other, MagL0):
141
+ return NotImplemented
142
+
143
+ return (
144
+ self.SHCOARSE == other.SHCOARSE
145
+ and self.ccsds_header.PKT_APID == other.ccsds_header.PKT_APID
146
+ and self.ccsds_header.SRC_SEQ_CTR == other.ccsds_header.SRC_SEQ_CTR
147
+ )
148
+
149
+ def __hash__(self) -> int:
150
+ """
151
+ Return a hash of the MagL0 object for use in sets.
152
+
153
+ Returns
154
+ -------
155
+ int
156
+ The hash value of the MagL0 object.
157
+ """
158
+ return hash(
159
+ (
160
+ self.SHCOARSE,
161
+ self.ccsds_header.PKT_APID,
162
+ self.ccsds_header.SRC_SEQ_CTR,
163
+ )
164
+ )
@@ -558,8 +558,11 @@ def process_swapi_science(
558
558
  # Step 3: Create xarray.Dataset
559
559
  # ===================================================================
560
560
 
561
- # epoch time. Should be same dimension as number of good sweeps
562
- epoch_values = good_sweep_sci["epoch"].data.reshape(total_full_sweeps, 12)[:, 0]
561
+ # epoch time. Should be same dimension as number of good sweeps.
562
+ # Use center time for epoch to line up with mission requests. Center time
563
+ # of SWAPI is time of 7th packet(aka SEQ_NUMBER == 6) creation time at the
564
+ # beginning of 7th packet.
565
+ epoch_values = good_sweep_sci["epoch"].data.reshape(total_full_sweeps, 12)[:, 6]
563
566
 
564
567
  epoch_time = xr.DataArray(
565
568
  epoch_values,
@@ -628,6 +631,13 @@ def process_swapi_science(
628
631
  dims=["epoch"],
629
632
  attrs=cdf_manager.get_variable_attributes("plan_id"),
630
633
  )
634
+ # Store start time for L3 purposes per SWAPI requests
635
+ dataset["sci_start_time"] = xr.DataArray(
636
+ good_sweep_sci["epoch"].data.reshape(total_full_sweeps, 12)[:, 0],
637
+ name="sci_start_time",
638
+ dims=["epoch"],
639
+ attrs=cdf_manager.get_variable_attributes("sci_start_time"),
640
+ )
631
641
  # Add ESA_LVL5 for L2 and L3 purposes.
632
642
  # We need to store ESA_LVL5 at SEQ_NUMBER==11
633
643
  # which is 71 energy step's ESA_LVL5 value. ESA_LVL5 gets
@@ -149,15 +149,16 @@ def swapi_l2(
149
149
  # Copy over only certain variables from L1 to L2 dataset
150
150
  l1_data_keys = [
151
151
  "epoch",
152
+ "esa_lvl5",
152
153
  "esa_step",
153
154
  "esa_step_label",
154
- "swp_l1a_flags",
155
- "sweep_table",
156
- "plan_id",
157
- "lut_choice",
158
- "fpga_type",
159
155
  "fpga_rev",
160
- "esa_lvl5",
156
+ "fpga_type",
157
+ "lut_choice",
158
+ "plan_id",
159
+ "sci_start_time",
160
+ "sweep_table",
161
+ "swp_l1a_flags",
161
162
  ]
162
163
  l2_dataset = l1_dataset[l1_data_keys]
163
164
 
@@ -761,6 +761,10 @@ def swe_l1b_science(dependencies: ProcessingInputCollection) -> xr.Dataset:
761
761
 
762
762
  count_rate = convert_counts_to_rate(inflight_applied_count, acq_duration)
763
763
 
764
+ # Statistical uncertainty is sqrt(decompressed counts)
765
+ # TODO: Update this if SWE like to include deadtime correciton.
766
+ counts_stat_uncert = np.sqrt(populated_data["science_data"])
767
+
764
768
  # Store ESA energies of full cycle for L2 purposes.
765
769
  esa_energies = get_esa_energy_pattern(esa_lut_files[0])
766
770
  # Repeat energies to be in the same shape as the science data
@@ -894,6 +898,11 @@ def swe_l1b_science(dependencies: ProcessingInputCollection) -> xr.Dataset:
894
898
  dims=["epoch", "esa_step", "spin_sector", "cem_id"],
895
899
  attrs=cdf_attrs.get_variable_attributes("science_data"),
896
900
  )
901
+ science_dataset["counts_stat_uncert"] = xr.DataArray(
902
+ counts_stat_uncert,
903
+ dims=["epoch", "esa_step", "spin_sector", "cem_id"],
904
+ attrs=cdf_attrs.get_variable_attributes("counts_stat_uncert"),
905
+ )
897
906
  science_dataset["acquisition_time"] = xr.DataArray(
898
907
  acq_time,
899
908
  dims=["epoch", "esa_step", "spin_sector"],
@@ -14,9 +14,11 @@ from imap_processing.spice.spin import get_instrument_spin_phase, get_spin_angle
14
14
  from imap_processing.swe.utils import swe_constants
15
15
 
16
16
 
17
- def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> npt.NDArray:
17
+ def calculate_phase_space_density(
18
+ data: np.ndarray, particle_energy_data: np.ndarray
19
+ ) -> npt.NDArray:
18
20
  """
19
- Convert counts to phase space density.
21
+ Convert counts or uncertainty data to phase space density.
20
22
 
21
23
  Calculate phase space density is represented by this symbol, fv.
22
24
  Its unit is s^3/ (cm^6 * ster).
@@ -49,8 +51,11 @@ def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> npt.NDArray:
49
51
 
50
52
  Parameters
51
53
  ----------
52
- l1b_dataset : xarray.Dataset
53
- The L1B dataset to process.
54
+ data : numpy.ndarray
55
+ The data to process. Two expected inputs are counts or uncertainty data.
56
+ particle_energy_data : numpy.ndarray
57
+ The energy values in eV. This is the energy values from the
58
+ "esa_energy" variable in the L1B dataset.
54
59
 
55
60
  Returns
56
61
  -------
@@ -58,18 +63,14 @@ def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> npt.NDArray:
58
63
  Phase space density. We need to call this phase space density because
59
64
  there will be density in L3 processing.
60
65
  """
61
- # Get energy values.
62
- particle_energy_data = l1b_dataset["esa_energy"].values
63
-
64
66
  # Calculate phase space density using formula:
65
- # 2 * (C/tau) / (G * 1.237e31 * eV^2)
67
+ # 2 * ((C/tau) or uncertainty data) / (G * 1.237e31 * eV^2)
66
68
  # See doc string for more details.
67
- density = (2 * l1b_dataset["science_data"]) / (
69
+ phase_space_density = (2 * data) / (
68
70
  swe_constants.GEOMETRIC_FACTORS[np.newaxis, np.newaxis, np.newaxis, :]
69
71
  * swe_constants.VELOCITY_CONVERSION_FACTOR
70
72
  * particle_energy_data[:, :, :, np.newaxis] ** 2
71
73
  )
72
- phase_space_density = density.data
73
74
 
74
75
  return phase_space_density
75
76
 
@@ -114,7 +115,7 @@ def calculate_flux(
114
115
  Parameters
115
116
  ----------
116
117
  phase_space_density : numpy.ndarray
117
- The phase space density.
118
+ The phase space density of counts or uncertainty data.
118
119
  esa_energy : numpy.ndarray
119
120
  The energy values in eV.
120
121
 
@@ -131,6 +132,70 @@ def calculate_flux(
131
132
  return flux
132
133
 
133
134
 
135
+ def put_uncertainty_into_angle_bins(
136
+ data: np.ndarray, angle_bin_indices: npt.NDArray[np.int_]
137
+ ) -> npt.NDArray:
138
+ """
139
+ Put uncertainty data in its angle bins.
140
+
141
+ This function bins uncertainty data into 30 predefined angle bins
142
+ while preserving the original energy step structure.
143
+
144
+ Since multiple data points can fall into the same angle bin,
145
+ this function computes the combined uncertainty for the bin.
146
+
147
+ Parameters
148
+ ----------
149
+ data : numpy.ndarray
150
+ Uncertainty data to put in bins. Shape:
151
+ (full_cycle_data, N_ESA_STEPS, N_ANGLE_BINS, N_CEMS).
152
+ angle_bin_indices : numpy.ndarray
153
+ Indices of angle bins to put data in. Shape:
154
+ (full_cycle_data, N_ESA_STEPS, N_ANGLE_BINS).
155
+
156
+ Returns
157
+ -------
158
+ numpy.ndarray
159
+ Data in bins. Shape:
160
+ (full_cycle_data, N_ESA_STEPS, N_ANGLE_BINS, N_CEMS).
161
+ """
162
+ # Initialize with zeros instead of NaN because np.add.at() does not
163
+ # work with nan values. It results in nan + value = nan
164
+ binned_data = np.zeros(
165
+ (
166
+ data.shape[0],
167
+ swe_constants.N_ESA_STEPS,
168
+ swe_constants.N_ANGLE_BINS,
169
+ swe_constants.N_CEMS,
170
+ ),
171
+ dtype=np.float64,
172
+ )
173
+
174
+ time_indices = np.arange(data.shape[0])[:, None, None]
175
+ energy_indices = np.arange(swe_constants.N_ESA_STEPS)[None, :, None]
176
+
177
+ # Calculate new uncertainty of each uncertainty data in the bins.
178
+ # Per SWE instruction:
179
+ # At L1B, 'data' is result from sqrt(counts). Now in L2, average
180
+ # uncertainty data using this formula:
181
+ # sqrt(
182
+ # sum(
183
+ # (unc_1) ** 2 + (unc_2) ** 2 + ... + (unc_n) ** 2
184
+ # )
185
+ # )
186
+ # TODO: SWE want to add more defined formula based on spin data and
187
+ # counts uncertainty from it in the future.
188
+
189
+ # Use np.add.at() to put values into bins and add values in the bins into one.
190
+ # Here, we are applying power of 2 to each data point before summing them.
191
+ np.add.at(
192
+ binned_data,
193
+ (time_indices, energy_indices, angle_bin_indices),
194
+ data**2,
195
+ )
196
+ return np.sqrt(binned_data)
197
+
198
+
134
199
  def put_data_into_angle_bins(
135
200
  data: np.ndarray, angle_bin_indices: npt.NDArray[np.int_]
136
201
  ) -> npt.NDArray:
@@ -142,10 +207,8 @@ def put_data_into_angle_bins(
142
207
  full cycle, it assigns data to the corresponding angle bin
143
208
  based on the provided indices.
144
209
 
145
- Since multiple data points may fall into the same angle bin,
146
- the function accumulates values and computes the average across
147
- all 7 CEMs, ensuring that each bin contains a representative
148
- mean value while maintaining the 7 CEM structure.
210
+ Since multiple data points can fall into the same angle bin,
211
+ this function computes the combined averages.
149
212
 
150
213
  Parameters
151
214
  ----------
@@ -177,7 +240,7 @@ def put_data_into_angle_bins(
177
240
  time_indices = np.arange(data.shape[0])[:, None, None]
178
241
  energy_indices = np.arange(swe_constants.N_ESA_STEPS)[None, :, None]
179
242
 
180
- # Use np.add.at() to accumulate values into bins
243
+ # Use np.add.at() to put values into bins and add values in the bins into one.
181
244
  np.add.at(binned_data, (time_indices, energy_indices, angle_bin_indices), data)
182
245
 
183
246
  # Count occurrences in each bin to compute the mean.
@@ -343,7 +406,9 @@ def swe_l2(l1b_dataset: xr.Dataset) -> xr.Dataset:
343
406
  # Calculate phase space density and flux. Store data in shape
344
407
  # (epoch, esa_step, spin_sector, cem_id). This is for L3 purposes.
345
408
  ############################################################
346
- phase_space_density = calculate_phase_space_density(l1b_dataset)
409
+ phase_space_density = calculate_phase_space_density(
410
+ l1b_dataset["science_data"].data, l1b_dataset["esa_energy"].data
411
+ )
347
412
  dataset["phase_space_density_spin_sector"] = xr.DataArray(
348
413
  phase_space_density,
349
414
  name="phase_space_density_spin_sector",
@@ -419,4 +484,33 @@ def swe_l2(l1b_dataset: xr.Dataset) -> xr.Dataset:
419
484
  attrs=cdf_attributes.get_variable_attributes("phase_space_density"),
420
485
  )
421
486
 
487
+ #######################################################
488
+ # Calculate flux and phase space density of uncertainty data.
489
+ # Put uncertainty data in its angle bins.
490
+ #######################################################
491
+ # Calculate phase space density for uncertainty data.
492
+ phase_space_density_uncert = calculate_phase_space_density(
493
+ l1b_dataset["counts_stat_uncert"].data, l1b_dataset["esa_energy"].data
494
+ )
495
+ # Put uncertainty data into its spin angle bins and calculate new uncertainty
496
+ phase_space_density_uncert = put_uncertainty_into_angle_bins(
497
+ phase_space_density_uncert, spin_angle_bins_indices
498
+ )
499
+ dataset["psd_stat_uncert"] = xr.DataArray(
500
+ phase_space_density_uncert,
501
+ name="psd_stat_uncert",
502
+ dims=["epoch", "esa_step", "spin_sector", "cem_id"],
503
+ attrs=cdf_attributes.get_variable_attributes("psd_stat_uncert"),
504
+ )
505
+ # Calculate flux for uncertainty data.
506
+ flux_uncert = calculate_flux(
507
+ phase_space_density_uncert, l1b_dataset["esa_energy"].data
508
+ )
509
+ flux_uncert = put_uncertainty_into_angle_bins(flux_uncert, spin_angle_bins_indices)
510
+ dataset["flux_stat_uncert"] = xr.DataArray(
511
+ flux_uncert,
512
+ name="flux_stat_uncert",
513
+ dims=["epoch", "esa_step", "spin_sector", "cem_id"],
514
+ attrs=cdf_attributes.get_variable_attributes("flux_stat_uncert"),
515
+ )
422
516
  return dataset
@@ -53,15 +53,15 @@ def log_decompression(value: int, mantissa_bit_length: int) -> int:
53
53
  """
54
54
  Perform logarithmic decompression on an integer.
55
55
 
56
- Supports both 16-bit and 8-bit formats based on the specified
56
+ Supports 16-bit, 10-bit, and 8-bit formats based on the specified
57
57
  mantissa bit length.
58
58
 
59
59
  Parameters
60
60
  ----------
61
61
  value : int
62
- An integer comprised of a 4-bit exponent followed by a variable-length mantissa.
62
+ An integer comprised of an exponent followed by a mantissa.
63
63
  mantissa_bit_length : int
64
- The bit length of the mantissa (default is 12 for 16-bit format).
64
+ The bit length of the mantissa.
65
65
 
66
66
  Returns
67
67
  -------
@@ -72,6 +72,9 @@ def log_decompression(value: int, mantissa_bit_length: int) -> int:
72
72
  if mantissa_bit_length == 12:
73
73
  base_value = 4096
74
74
  mantissa_mask = 0xFFF
75
+ elif mantissa_bit_length == 5:
76
+ base_value = 32
77
+ mantissa_mask = 0x1F
75
78
  elif mantissa_bit_length == 4:
76
79
  base_value = 16
77
80
  mantissa_mask = 0x0F
@@ -239,6 +242,7 @@ def decompress_image(
239
242
  def read_image_raw_events_binary(
240
243
  event_data: bytes,
241
244
  count: int,
245
+ field_ranges: dict,
242
246
  ) -> NDArray:
243
247
  """
244
248
  Convert contents of binary string 'EVENTDATA' into values.
@@ -249,6 +253,8 @@ def read_image_raw_events_binary(
249
253
  Event data.
250
254
  count : int
251
255
  Number of events.
256
+ field_ranges : dict
257
+ Field ranges for the event data.
252
258
 
253
259
  Returns
254
260
  -------
@@ -256,15 +262,16 @@ def read_image_raw_events_binary(
256
262
  Event data.
257
263
  """
258
264
  binary = convert_to_binary_string(event_data)
259
- # 166 bits per event
260
- event_length = 166 if count else 0
265
+ length = max(end for (_, end) in field_ranges.values())
266
+ # bits per event
267
+ event_length = length if count else 0
261
268
  event_data_list = []
262
269
 
263
270
  # For all packets with event data, parses the binary string
264
271
  for i in range(count):
265
272
  start_index = i * event_length
266
273
  event_binary = binary[start_index : start_index + event_length]
267
- parsed_event = parse_event(event_binary)
274
+ parsed_event = parse_event(event_binary, field_ranges)
268
275
  event_data_list.append(parsed_event)
269
276
 
270
277
  return np.array(event_data_list)