imap-processing 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (99) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/ccsds/excel_to_xtce.py +2 -0
  3. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +100 -1
  4. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +14 -0
  5. imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +63 -1
  6. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +7 -0
  7. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +574 -231
  8. imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +326 -0
  9. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +33 -23
  10. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +7 -4
  11. imap_processing/cdf/utils.py +3 -5
  12. imap_processing/cli.py +13 -4
  13. imap_processing/codice/codice_l1a.py +5 -5
  14. imap_processing/codice/constants.py +9 -9
  15. imap_processing/codice/decompress.py +6 -2
  16. imap_processing/glows/l1a/glows_l1a.py +1 -2
  17. imap_processing/hi/l1a/hi_l1a.py +4 -4
  18. imap_processing/hi/l1a/histogram.py +106 -108
  19. imap_processing/hi/l1a/science_direct_event.py +91 -224
  20. imap_processing/hi/packet_definitions/TLM_HI_COMBINED_SCI.xml +3994 -0
  21. imap_processing/hit/l0/constants.py +2 -2
  22. imap_processing/hit/l0/decom_hit.py +12 -101
  23. imap_processing/hit/l1a/hit_l1a.py +164 -23
  24. imap_processing/ialirt/l0/process_codicelo.py +153 -0
  25. imap_processing/ialirt/l0/process_hit.py +5 -5
  26. imap_processing/ialirt/packet_definitions/ialirt_codicelo.xml +281 -0
  27. imap_processing/ialirt/process_ephemeris.py +212 -0
  28. imap_processing/idex/idex_l1a.py +55 -75
  29. imap_processing/idex/idex_l1b.py +192 -0
  30. imap_processing/idex/idex_variable_unpacking_and_eu_conversion.csv +33 -0
  31. imap_processing/idex/packet_definitions/idex_packet_definition.xml +97 -595
  32. imap_processing/lo/l0/decompression_tables/decompression_tables.py +16 -0
  33. imap_processing/lo/l0/lo_science.py +44 -12
  34. imap_processing/lo/l1a/lo_l1a.py +76 -8
  35. imap_processing/lo/packet_definitions/lo_xtce.xml +9877 -87
  36. imap_processing/mag/l1a/mag_l1a.py +1 -2
  37. imap_processing/mag/l1a/mag_l1a_data.py +1 -2
  38. imap_processing/mag/l1b/mag_l1b.py +2 -1
  39. imap_processing/spice/geometry.py +37 -19
  40. imap_processing/spice/time.py +144 -2
  41. imap_processing/swapi/l1/swapi_l1.py +3 -3
  42. imap_processing/swapi/packet_definitions/swapi_packet_definition.xml +1535 -446
  43. imap_processing/swe/l2/swe_l2.py +134 -17
  44. imap_processing/tests/ccsds/test_data/expected_output.xml +1 -1
  45. imap_processing/tests/codice/test_codice_l1a.py +8 -8
  46. imap_processing/tests/codice/test_decompress.py +4 -4
  47. imap_processing/tests/conftest.py +46 -43
  48. imap_processing/tests/hi/test_data/l0/H90_NHK_20241104.bin +0 -0
  49. imap_processing/tests/hi/test_data/l0/H90_sci_cnt_20241104.bin +0 -0
  50. imap_processing/tests/hi/test_data/l0/H90_sci_de_20241104.bin +0 -0
  51. imap_processing/tests/hi/test_hi_l1b.py +2 -2
  52. imap_processing/tests/hi/test_l1a.py +31 -58
  53. imap_processing/tests/hi/test_science_direct_event.py +58 -0
  54. imap_processing/tests/hit/test_data/sci_sample1.ccsds +0 -0
  55. imap_processing/tests/hit/test_decom_hit.py +60 -50
  56. imap_processing/tests/hit/test_hit_l1a.py +327 -12
  57. imap_processing/tests/hit/test_hit_l1b.py +76 -0
  58. imap_processing/tests/hit/validation_data/hskp_sample_eu.csv +89 -0
  59. imap_processing/tests/hit/validation_data/sci_sample_raw1.csv +29 -0
  60. imap_processing/tests/ialirt/test_data/l0/apid01152.tlm +0 -0
  61. imap_processing/tests/ialirt/test_data/l0/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
  62. imap_processing/tests/ialirt/unit/test_process_codicelo.py +106 -0
  63. imap_processing/tests/ialirt/unit/test_process_ephemeris.py +109 -0
  64. imap_processing/tests/ialirt/unit/test_process_hit.py +9 -6
  65. imap_processing/tests/idex/conftest.py +1 -1
  66. imap_processing/tests/idex/test_idex_l0.py +1 -1
  67. imap_processing/tests/idex/test_idex_l1a.py +7 -1
  68. imap_processing/tests/idex/test_idex_l1b.py +126 -0
  69. imap_processing/tests/lo/test_lo_l1a.py +7 -16
  70. imap_processing/tests/lo/test_lo_science.py +67 -3
  71. imap_processing/tests/lo/test_pkts/imap_lo_l0_raw_20240803_v002.pkts +0 -0
  72. imap_processing/tests/lo/validation_data/Instrument_FM1_T104_R129_20240803_ILO_SCI_DE_dec_DN_with_fills.csv +1999 -0
  73. imap_processing/tests/mag/test_mag_l1b.py +39 -5
  74. imap_processing/tests/spice/test_geometry.py +32 -6
  75. imap_processing/tests/spice/test_time.py +135 -6
  76. imap_processing/tests/swapi/test_swapi_decom.py +75 -69
  77. imap_processing/tests/swapi/test_swapi_l1.py +4 -4
  78. imap_processing/tests/swe/test_swe_l2.py +64 -8
  79. imap_processing/tests/test_utils.py +1 -1
  80. imap_processing/tests/ultra/test_data/l0/ultra45_raw_sc_ultrarawimg_withFSWcalcs_FM45_40P_Phi28p5_BeamCal_LinearScan_phi2850_theta-000_20240207T102740.csv +3314 -3314
  81. imap_processing/tests/ultra/unit/test_de.py +8 -3
  82. imap_processing/tests/ultra/unit/test_spatial_utils.py +125 -0
  83. imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +39 -29
  84. imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +2 -25
  85. imap_processing/ultra/constants.py +4 -0
  86. imap_processing/ultra/l1b/de.py +8 -14
  87. imap_processing/ultra/l1b/ultra_l1b_extended.py +29 -70
  88. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +1 -36
  89. imap_processing/ultra/utils/spatial_utils.py +221 -0
  90. {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/METADATA +1 -1
  91. {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/RECORD +94 -76
  92. imap_processing/hi/l0/__init__.py +0 -0
  93. imap_processing/hi/l0/decom_hi.py +0 -24
  94. imap_processing/hi/packet_definitions/hi_packet_definition.xml +0 -482
  95. imap_processing/tests/hi/test_decom.py +0 -55
  96. imap_processing/tests/hi/test_l1a_sci_de.py +0 -72
  97. {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/LICENSE +0 -0
  98. {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/WHEEL +0 -0
  99. {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/entry_points.txt +0 -0
@@ -1,9 +1,14 @@
1
- """SWE L2 processing module."""
1
+ """
2
+ SWE L2 processing module.
3
+
4
+ This module contains functions to process L1B data to L2 data products.
5
+ """
2
6
 
3
7
  import numpy as np
4
8
  import numpy.typing as npt
5
9
  import xarray as xr
6
10
 
11
+ from imap_processing.spice.geometry import get_spacecraft_spin_phase
7
12
  from imap_processing.swe.utils.swe_utils import read_lookup_table
8
13
 
9
14
  # TODO: add these to instrument status summary
@@ -22,6 +27,9 @@ GEOMETRIC_FACTORS = np.array(
22
27
  )
23
28
  ELECTRON_MASS = 9.10938356e-31 # kg
24
29
 
30
+ # See doc string of calculate_phase_space_density() for more details.
31
+ VELOCITY_CONVERSION_FACTOR = 1.237e31
32
+
25
33
 
26
34
  def get_particle_energy() -> npt.NDArray:
27
35
  """
@@ -44,11 +52,11 @@ def get_particle_energy() -> npt.NDArray:
44
52
  return lookup_table
45
53
 
46
54
 
47
- def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> npt.NDArray:
55
+ def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> xr.Dataset:
48
56
  """
49
57
  Convert counts to phase space density.
50
58
 
51
- Calculate phase space density, fv, in units of s^3/cm^6
59
+ Calculate phase space density, fv, in units of s^3/ (cm^6 * ster).
52
60
  fv = 2 * (C/tau) / (G * v^4)
53
61
  where:
54
62
  C / tau = corrected count rate. L1B science data.
@@ -56,12 +64,26 @@ def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> npt.NDArray:
56
64
  v = electron speed, computed from energy, in cm/s.
57
65
  We need to use this formula to convert energy to speed:
58
66
  E = 0.5 * m * v^2
59
- where E is electron energy, in eV
60
- (result from get_particle_energy() function),
61
- m is mass of electron (9.10938356e-31 kg),
62
- and v is what we want to calculate. Reorganizing above
63
- formula result in v = sqrt(2 * E / m). This will be used
64
- to calculate electron speed.
67
+ where E is electron energy, in eV
68
+ (result from get_particle_energy() function),
69
+ m is mass of electron (9.10938356e-31 kg),
70
+ and v is what we want to calculate. Reorganizing above
71
+ formula result in v = sqrt(2 * E / m). This will be used
72
+ to calculate electron speed, v.
73
+
74
+ Now to convert electron speed units to cm/s:
75
+ v = sqrt(2 * E / m)
76
+ = sqrt(2 * E(eV) * 1.60219e-19(J/eV) / 9.10938e-31 kg)
77
+ where J = kg * m^2 / s^2.
78
+ = sqrt(2 * 1.60219 * 10e−19 m^2/s^2 * E(eV) / 9.10938e-31)
79
+ = sqrt(2 * 1.60219 * 10e−19 * 10e4 cm^2/s^2 * E(eV) / 9.10938e-31)
80
+ = sqrt(3.20438 * 10e-15 * E(eV) / 9.10938e-31) cm/s
81
+ = sqrt( (3.20438 * 10e-15 / 9.10938e-31) * E(eV) ) cm/s
82
+ fv = 2 * (C/tau) / (G * v^4)
83
+ = 2 * (C/tau) / (G * (sqrt( (3.20438 * 10e-15 / 9.10938e-31) * E(eV) ))^4)
84
+ = 2 * (C/tau) / (G * (sqrt(3.5176e16)^4 * E(eV)^2)
85
+ = 2 * (C/tau) / (G * 1.237e31 * E(eV)^2)
86
+ Ruth Skoug also got the same result, 1.237e31.
65
87
 
66
88
  Parameters
67
89
  ----------
@@ -70,8 +92,9 @@ def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> npt.NDArray:
70
92
 
71
93
  Returns
72
94
  -------
73
- density : numpy.ndarray
74
- Phase space density.
95
+ phase_space_density_dataset : xarray.Dataset
96
+ Phase space density. We need to call this phase space density because
97
+ there will be density in L3 processing.
75
98
  """
76
99
  # Get esa_table_num for each full sweep.
77
100
  esa_table_nums = l1b_dataset["esa_table_num"].values[:, 0]
@@ -87,13 +110,107 @@ def calculate_phase_space_density(l1b_dataset: xr.Dataset) -> npt.NDArray:
87
110
  )
88
111
  particle_energy_data = particle_energy_data.reshape(-1, 24, 30)
89
112
 
90
- # Calculate electron speed.
91
- electron_speed = np.sqrt(2 * particle_energy_data / ELECTRON_MASS)
92
-
93
- # Calculate phase space density.
113
+ # Calculate phase space density using formula:
114
+ # 2 * (C/tau) / (G * 1.237e31 * E(eV)^2)
115
+ # See doc string for more details.
94
116
  density = (2 * l1b_dataset["science_data"]) / (
95
117
  GEOMETRIC_FACTORS[np.newaxis, np.newaxis, np.newaxis, :]
96
- * electron_speed[:, :, :, np.newaxis] ** 4
118
+ * VELOCITY_CONVERSION_FACTOR
119
+ * particle_energy_data[:, :, :, np.newaxis] ** 2
120
+ )
121
+
122
+ # Return density as xr.dataset with phase space density and
123
+ # energy in eV value that flux calculation can use.
124
+ phase_space_density_dataset = xr.Dataset(
125
+ {
126
+ "phase_space_density": (["epoch", "energy", "angle", "cem"], density.data),
127
+ "energy_in_eV": (["epoch", "energy", "angle"], particle_energy_data),
128
+ },
129
+ coords=l1b_dataset.coords,
130
+ )
131
+
132
+ return phase_space_density_dataset
133
+
134
+
135
+ def calculate_flux(l1b_dataset: xr.Dataset) -> npt.NDArray:
136
+ """
137
+ Calculate flux.
138
+
139
+ To get flux, j = 2 * m * E * f
140
+ where:
141
+ f = calculate_phase_space_density() result
142
+ m - mass of electron
143
+ E - energy in joules. E(eV) * 1.60219e-19(J/eV)
144
+
145
+ To convert flux units:
146
+ j = 2 * m * E * f
147
+ = 2 * 9.10938356e-31 kg * E(eV) * 1.60219e-19(J/eV) * (s^3/ (cm^6 * ster)
148
+ = 2 * 9.10938356e-31 kg * E(eV) * 1.60219e-19 (kg * m^2 / s^2) *
149
+ (s^3 / (cm^6 * ster)
150
+ = 2 * 9.10938356e-31 * E(eV) * 1.60219e-19 kg^2 * ( 10^4 cm^2 / s^2) *
151
+ (s^3 / (cm^6 * ster)
152
+ = 2 * 9.10938356e-31 * E(eV) * 1.60219e-19 * 10^4 ((kg^2 * cm^2 * s^3) /
153
+ (s^2 * cm^6 * ster))
154
+ TODO: ask Ruth Skoug what units to use for flux and work out remaining units.
155
+
156
+ Parameters
157
+ ----------
158
+ l1b_dataset : xarray.Dataset
159
+ The L1B dataset to process.
160
+
161
+ Returns
162
+ -------
163
+ flux : numpy.ndarray
164
+ Flux values.
165
+ """
166
+ phase_space_density_ds = calculate_phase_space_density(l1b_dataset)
167
+ # TODO: update this once Ruth sends the correct conversion factors.
168
+ flux = (
169
+ 2
170
+ * ELECTRON_MASS
171
+ * phase_space_density_ds["energy_in_eV"].data[:, :, :, np.newaxis]
172
+ * 1.60219e-19
173
+ * 10e4
174
+ * phase_space_density_ds["phase_space_density"].data
97
175
  )
176
+ return flux
98
177
 
99
- return density
178
+
179
+ def swe_l2(l1b_dataset: xr.Dataset, data_version: str) -> xr.Dataset:
180
+ """
181
+ Will process data to L2.
182
+
183
+ Parameters
184
+ ----------
185
+ l1b_dataset : xarray.Dataset
186
+ The L1B dataset to process.
187
+ data_version : str
188
+ Version of the data product being created.
189
+
190
+ Returns
191
+ -------
192
+ data : xarray.Dataset
193
+ Processed data to L2.
194
+ """
195
+ flux = calculate_flux(l1b_dataset)
196
+
197
+ # Calculate spin phase using SWE sci_step_acq_time_sec calculated in l1b.
198
+ # L1B dataset stores it by (epoch, energy, angle, cem).
199
+ data_acq_time = l1b_dataset["sci_step_acq_time_sec"].data.flatten()
200
+
201
+ # calculate spin phase
202
+ spin_phase = get_spacecraft_spin_phase(
203
+ query_met_times=data_acq_time,
204
+ ).reshape(-1, 24, 30, 7)
205
+ # TODO: organize flux data by energy and spin_phase.
206
+ # My understanding from conversation with Ruth is that this is the hardest part
207
+ # and last part of the L2 processing.
208
+
209
+ # TODO: Correct return value. This is just a placeholder.
210
+ return xr.Dataset(
211
+ {
212
+ "flux": (["epoch", "energy", "spin_phase", "cem"], flux),
213
+ "spin_phase": (["epoch", "energy", "spin_phase", "cem"], spin_phase),
214
+ },
215
+ coords=l1b_dataset.coords,
216
+ )
@@ -1,6 +1,6 @@
1
1
  <?xml version='1.0' encoding='utf-8'?>
2
2
  <xtce:SpaceSystem xmlns:xtce="http://www.omg.org/space/xtce" name="Test Instrument">
3
- <xtce:Header date="2024-07-26 00:00:00" version="v1.2" author="IMAP SDC" />
3
+ <xtce:Header date="2024-07-26 00:00:00" version="v1.2" author="IMAP SDC" source_file="excel_to_xtce_test_file.xlsx" />
4
4
  <xtce:TelemetryMetaData>
5
5
  <xtce:ParameterTypeSet>
6
6
  <xtce:IntegerParameterType name="VERSION" signed="false">
@@ -18,14 +18,14 @@ EXPECTED_ARRAY_SHAPES = [
18
18
  (), # hi-ialirt # TODO: Need to implement
19
19
  (), # lo-ialirt # TODO: Need to implement
20
20
  (31778,), # hskp
21
- (77, 6, 6, 128), # lo-counters-aggregated
22
- (77, 24, 6, 128), # lo-counters-singles
23
- (77, 1, 12, 128), # lo-sw-priority
24
- (77, 1, 12, 128), # lo-nsw-priority
25
- (77, 1, 1, 128), # lo-sw-species
26
- (77, 1, 1, 128), # lo-nsw-species
27
- (77, 5, 12, 128), # lo-sw-angular
28
- (77, 19, 12, 128), # lo-nsw-angular
21
+ (77, 128, 6, 6), # lo-counters-aggregated
22
+ (77, 128, 24, 6), # lo-counters-singles
23
+ (77, 128, 1, 12), # lo-sw-priority
24
+ (77, 128, 1, 12), # lo-nsw-priority
25
+ (77, 128, 1, 1), # lo-sw-species
26
+ (77, 128, 1, 1), # lo-nsw-species
27
+ (77, 128, 5, 12), # lo-sw-angular
28
+ (77, 128, 19, 12), # lo-nsw-angular
29
29
  (77, 1, 6, 1), # hi-counters-aggregated
30
30
  (77, 1, 12, 1), # hi-counters-singles
31
31
  (77, 15, 4, 1), # hi-omni
@@ -13,11 +13,11 @@ lzma_bytes = lzma.compress((234).to_bytes(1, byteorder="big"))
13
13
  # LZMA_EXAMPLE = "".join(format(byte, "08b") for byte in lzma_bytes)
14
14
  TEST_DATA = [
15
15
  (b"\xea", CoDICECompression.NO_COMPRESSION, [234]),
16
- (b"\xea", CoDICECompression.LOSSY_A, [221184]),
17
- (b"\xea", CoDICECompression.LOSSY_B, [1441792]),
16
+ (b"\xea", CoDICECompression.LOSSY_A, [212992]),
17
+ (b"\xea", CoDICECompression.LOSSY_B, [1310720]),
18
18
  (lzma_bytes, CoDICECompression.LOSSLESS, [234]),
19
- (lzma_bytes, CoDICECompression.LOSSY_A_LOSSLESS, [221184]),
20
- (lzma_bytes, CoDICECompression.LOSSY_B_LOSSLESS, [1441792]),
19
+ (lzma_bytes, CoDICECompression.LOSSY_A_LOSSLESS, [212992]),
20
+ (lzma_bytes, CoDICECompression.LOSSY_B_LOSSLESS, [1310720]),
21
21
  ]
22
22
 
23
23
 
@@ -46,45 +46,48 @@ def _autoclear_spice():
46
46
 
47
47
 
48
48
  @pytest.fixture(scope="session")
49
- def _download_de440s(spice_test_data_path):
50
- """This fixture downloads the de440s.bsp kernel into the
51
- tests/spice/test_data directory if it does not already exist there. The
49
+ def _download_external_kernels(spice_test_data_path):
50
+ """This fixture downloads the de440s.bsp and pck00011.tpc kernels into the
51
+ tests/spice/test_data directory if they do not already exist there. The
52
52
  fixture is not intended to be used directly. It is automatically added to
53
53
  tests marked with "external_kernel" in the hook below."""
54
54
  logger = logging.getLogger(__name__)
55
- kernel_url = (
56
- "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de440s.bsp"
57
- )
58
- kernel_name = kernel_url.split("/")[-1]
59
- local_filepath = spice_test_data_path / kernel_name
60
-
61
- if local_filepath.exists():
62
- return
63
- allowed_attempts = 3
64
- for attempt_number in range(allowed_attempts):
65
- try:
66
- with requests.get(kernel_url, stream=True, timeout=30) as r:
67
- r.raise_for_status()
68
- with open(local_filepath, "wb") as f:
69
- for chunk in r.iter_content(chunk_size=8192):
70
- f.write(chunk)
71
- logger.info("Cached kernel file to %s", local_filepath)
72
- break
73
- except requests.exceptions.RequestException as error:
74
- logger.info(f"Request failed. {error}")
75
- if attempt_number < allowed_attempts:
76
- logger.info(
77
- f"Trying again, retries left "
78
- f"{allowed_attempts - attempt_number}, "
79
- f"Exception: {error}"
80
- )
81
- time.sleep(1)
82
- else:
83
- logger.error(
84
- f"Failed to download file after {allowed_attempts} "
85
- f"attempts, Final Error: {error}"
86
- )
87
- raise
55
+ kernel_urls = [
56
+ "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de440s.bsp",
57
+ "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/pck00011.tpc",
58
+ ]
59
+
60
+ for kernel_url in kernel_urls:
61
+ kernel_name = kernel_url.split("/")[-1]
62
+ local_filepath = spice_test_data_path / kernel_name
63
+
64
+ if local_filepath.exists():
65
+ continue
66
+ allowed_attempts = 3
67
+ for attempt_number in range(allowed_attempts):
68
+ try:
69
+ with requests.get(kernel_url, stream=True, timeout=30) as r:
70
+ r.raise_for_status()
71
+ with open(local_filepath, "wb") as f:
72
+ for chunk in r.iter_content(chunk_size=8192):
73
+ f.write(chunk)
74
+ logger.info("Cached kernel file to %s", local_filepath)
75
+ continue
76
+ except requests.exceptions.RequestException as error:
77
+ logger.info(f"Request failed. {error}")
78
+ if attempt_number < allowed_attempts:
79
+ logger.info(
80
+ f"Trying again, retries left "
81
+ f"{allowed_attempts - attempt_number}, "
82
+ f"Exception: {error}"
83
+ )
84
+ time.sleep(1)
85
+ else:
86
+ logger.error(
87
+ f"Failed to download file {kernel_name} after "
88
+ f"{allowed_attempts} attempts, Final Error: {error}"
89
+ )
90
+ raise
88
91
 
89
92
 
90
93
  def pytest_collection_modifyitems(items):
@@ -93,12 +96,12 @@ def pytest_collection_modifyitems(items):
93
96
  been collected. In this case, it automatically adds fixtures based on the
94
97
  following table:
95
98
 
96
- +---------------------+---------------------+
97
- | pytest mark | fixture added |
98
- +=====================+=====================+
99
- | external_kernel | _download_de440s |
100
- | use_test_metakernel | use_test_metakernel |
101
- +---------------------+---------------------+
99
+ +---------------------+----------------------------+
100
+ | pytest mark | fixture added |
101
+ +=====================+============================+
102
+ | external_kernel | _download_external_kernels |
103
+ | use_test_metakernel | use_test_metakernel |
104
+ +---------------------+----------------------------+
102
105
 
103
106
  Notes
104
107
  -----
@@ -108,7 +111,7 @@ def pytest_collection_modifyitems(items):
108
111
  """
109
112
  for item in items:
110
113
  if item.get_closest_marker("external_kernel") is not None:
111
- item.fixturenames.append("_download_de440s")
114
+ item.fixturenames.append("_download_external_kernels")
112
115
  if item.get_closest_marker("use_test_metakernel") is not None:
113
116
  item.fixturenames.append("use_test_metakernel")
114
117
 
@@ -24,12 +24,12 @@ def test_hi_l1b_hk(hi_l0_test_data_path):
24
24
  """Test coverage for imap_processing.hi.hi_l1b.hi_l1b() with
25
25
  housekeeping L1A as input"""
26
26
  # TODO: once things are more stable, check in an L1A HK file as test data
27
- bin_data_path = hi_l0_test_data_path / "20231030_H45_APP_NHK.bin"
27
+ bin_data_path = hi_l0_test_data_path / "H90_NHK_20241104.bin"
28
28
  data_version = "001"
29
29
  processed_data = hi_l1a(packet_file_path=bin_data_path, data_version=data_version)
30
30
 
31
31
  l1b_dataset = hi_l1b(processed_data[0], data_version=data_version)
32
- assert l1b_dataset.attrs["Logical_source"] == "imap_hi_l1b_45sensor-hk"
32
+ assert l1b_dataset.attrs["Logical_source"] == "imap_hi_l1b_90sensor-hk"
33
33
 
34
34
 
35
35
  @pytest.mark.external_kernel()
@@ -1,43 +1,25 @@
1
1
  import numpy as np
2
- import pytest
3
2
 
4
3
  from imap_processing.cdf.utils import write_cdf
5
- from imap_processing.hi.l1a import histogram as hist
6
4
  from imap_processing.hi.l1a.hi_l1a import hi_l1a
5
+ from imap_processing.hi.l1a.histogram import unpack_hist_counter
7
6
  from imap_processing.hi.utils import HIAPID
8
7
 
9
8
 
10
- def test_sci_de_decom(create_de_data):
9
+ def test_sci_de_decom(hi_l0_test_data_path):
11
10
  """Test science direct event data"""
12
11
 
13
- # Process using test data
14
- processed_data = hi_l1a(
15
- packet_file_path=create_de_data(HIAPID.H45_SCI_DE.value), data_version="001"
16
- )
12
+ bin_data_path = hi_l0_test_data_path / "H90_sci_de_20241104.bin"
13
+ processed_data = hi_l1a(bin_data_path, data_version="001")
17
14
 
18
- assert processed_data[0].attrs["Logical_source"] == "imap_hi_l1a_45sensor-de"
15
+ assert processed_data[0].attrs["Logical_source"] == "imap_hi_l1a_90sensor-de"
19
16
  assert processed_data[0].attrs["Data_version"] == "001"
20
17
 
21
- # unique ESA steps should be [1, 2]
22
- assert np.array_equal(
23
- np.sort(np.unique(processed_data[0]["esa_step"].values)),
24
- np.array([1, 2]),
25
- )
26
- # unique trigger_id should be [1, 2, 3]
27
- assert np.array_equal(
28
- np.sort(np.unique(processed_data[0]["trigger_id"].values)), np.array([1, 2, 3])
29
- )
30
- # tof_x should be in this range [0, 1023]
31
- assert processed_data[0]["tof_1"].min() >= 0
32
- assert processed_data[0]["tof_1"].max() <= 1023
33
- assert processed_data[0]["tof_2"].min() >= 0
34
- assert processed_data[0]["tof_2"].max() <= 1023
35
- assert processed_data[0]["tof_3"].min() >= 0
36
- assert processed_data[0]["tof_3"].max() <= 1023
18
+ # TODO: Verify correct unpacking of sample data. Issue: #1186
37
19
 
38
20
  # Write to CDF
39
- cdf_filename = "imap_hi_l1a_45sensor-de_20230927_v001.cdf"
40
- # TODO: Dropping duplicates to ignore ISTP for now. Need to update test data
21
+ cdf_filename = "imap_hi_l1a_90sensor-de_20241105_v001.cdf"
22
+ # TODO: Dropping duplicates to ignore ISTP for now. Should be fixed by #1186
41
23
  processed_data[0] = processed_data[0].sortby("epoch").groupby("epoch").first()
42
24
  cdf_filepath = write_cdf(processed_data[0])
43
25
  assert cdf_filepath.name == cdf_filename
@@ -47,54 +29,45 @@ def test_app_nhk_decom(hi_l0_test_data_path):
47
29
  """Test housekeeping data"""
48
30
 
49
31
  # Unpack housekeeping data
50
- bin_data_path = hi_l0_test_data_path / "20231030_H45_APP_NHK.bin"
32
+ bin_data_path = hi_l0_test_data_path / "H90_NHK_20241104.bin"
51
33
  processed_data = hi_l1a(packet_file_path=bin_data_path, data_version="001")
52
34
 
53
- assert np.unique(processed_data[0]["pkt_apid"].values) == HIAPID.H45_APP_NHK.value
54
- assert processed_data[0].attrs["Logical_source"] == "imap_hi_l1a_45sensor-hk"
35
+ assert np.unique(processed_data[0]["pkt_apid"].values) == HIAPID.H90_APP_NHK.value
36
+ assert processed_data[0].attrs["Logical_source"] == "imap_hi_l1a_90sensor-hk"
55
37
  assert processed_data[0].attrs["Data_version"] == "001"
56
- # TODO: compare with validation data once we have it
38
+ # TODO: compare with validation data once we have it. Issue: #1184
57
39
 
58
40
  # Write CDF
59
41
  cem_raw_cdf_filepath = write_cdf(processed_data[0], istp=False)
60
42
 
61
43
  # TODO: ask Vivek about this date mismatch between the file name
62
44
  # and the data. May get resolved when we have good sample data.
63
- assert cem_raw_cdf_filepath.name == "imap_hi_l1a_45sensor-hk_20100313_v001.cdf"
45
+ assert cem_raw_cdf_filepath.name == "imap_hi_l1a_90sensor-hk_20241105_v001.cdf"
64
46
 
65
47
 
66
- @pytest.mark.skip(
67
- reason="Need new test data with monotonically increasing epoch values"
68
- )
69
48
  def test_app_hist_decom(hi_l0_test_data_path):
70
49
  """Test histogram (SCI_CNT) data"""
71
- bin_data_path = hi_l0_test_data_path / "20231030_H45_SCI_CNT.bin"
50
+ bin_data_path = hi_l0_test_data_path / "H90_sci_cnt_20241104.bin"
72
51
  processed_data = hi_l1a(packet_file_path=bin_data_path, data_version="001")
73
52
 
74
- assert processed_data[0].attrs["Logical_source"] == "imap_hi_l1a_45sensor-hist"
75
- # TODO: compare with validation data once we have it
76
- # TODO: Dropping duplicates to ignore ISTP for now. Need to update test data
77
- processed_data[0] = processed_data[0].sortby("epoch").groupby("epoch").first()
53
+ assert processed_data[0].attrs["Logical_source"] == "imap_hi_l1a_90sensor-hist"
54
+ # TODO: compare with validation data once we have it. Issue: #1185
78
55
 
79
56
  # Write CDF
80
57
  cem_raw_cdf_filepath = write_cdf(processed_data[0])
81
58
 
82
- assert cem_raw_cdf_filepath.name.startswith("imap_hi_l1a_45sensor-hist_")
83
-
84
-
85
- def test_allocate_histogram_dataset():
86
- """Test hi.l1a.histogram.allocate_histogram_dataset()"""
87
- n_packets = 5
88
- dataset = hist.allocate_histogram_dataset(n_packets)
89
-
90
- assert dataset.attrs["Data_type"] == "L1A_HIST>Level-1A Histogram"
91
- assert dataset.sizes["epoch"] == n_packets
92
- assert dataset.sizes["angle"] == 90
93
- for var_name in (
94
- "ccsds_met",
95
- "esa_stepping_num",
96
- *hist.QUALIFIED_COUNTERS,
97
- *hist.LONG_COUNTERS,
98
- *hist.TOTAL_COUNTERS,
99
- ):
100
- assert var_name in dataset
59
+ assert cem_raw_cdf_filepath.name.startswith("imap_hi_l1a_90sensor-hist_")
60
+
61
+
62
+ def test_unpack_hist_counter():
63
+ """Test hi.l1a.histogram.unpack_hist_counter()"""
64
+ # To ensure correct unpacking, use expected values with ones in the upper
65
+ # and lower parts of the 12-bit numbers
66
+ expected = (np.arange(180).reshape((2, 90)) + 2**10).astype(">u2")
67
+ # convert each expected uint16 to a 12-bit bitstring and join
68
+ bin_str = "".join([f"{val:012b}" for val in expected.ravel()])
69
+ # convert the bitstring to a bytes object
70
+ bytes_array = int(bin_str, 2).to_bytes(len(bin_str) // 8, byteorder="big")
71
+ output_array = unpack_hist_counter(bytes_array)
72
+ np.testing.assert_array_equal(output_array, expected)
73
+ assert output_array.dtype == np.uint16
@@ -0,0 +1,58 @@
1
+ import numpy as np
2
+
3
+ from imap_processing.hi.l1a.science_direct_event import (
4
+ create_dataset,
5
+ parse_direct_events,
6
+ )
7
+
8
+
9
+ def test_parse_direct_events():
10
+ """Test coverage for parse_direct_events function."""
11
+ # Generate fake, binary blob using random numbers
12
+ np.random.seed(2)
13
+ n_events = 10_000
14
+ exp_dict = dict()
15
+ exp_dict["trigger_id"] = np.random.randint(1, 4, size=n_events, dtype=np.uint8)
16
+ exp_dict["de_tag"] = np.random.randint(0, 2**16, size=n_events, dtype=np.uint16)
17
+ exp_dict["tof_1"] = np.random.randint(0, 2**10, size=n_events, dtype=np.uint16)
18
+ exp_dict["tof_2"] = np.random.randint(0, 2**10, size=n_events, dtype=np.uint16)
19
+ exp_dict["tof_3"] = np.random.randint(0, 2**10, size=n_events, dtype=np.uint16)
20
+
21
+ # Encode the random events data into a bit-string
22
+ bin_str = ""
23
+ for i in range(n_events):
24
+ bin_str += f"{exp_dict['trigger_id'][i]:02b}" # 2-bits for trigger_id
25
+ bin_str += f"{exp_dict['de_tag'][i]:016b}" # 16-bits for de_tag
26
+ bin_str += f"{exp_dict['tof_1'][i]:010b}" # 10-bits for tof_1
27
+ bin_str += f"{exp_dict['tof_2'][i]:010b}" # 10-bits for tof_2
28
+ bin_str += f"{exp_dict['tof_3'][i]:010b}" # 10-bits for tof_3
29
+ # Convert the bit-string into a bytes object
30
+ bytes_obj = bytes([int(bin_str[i : i + 8], 2) for i in range(0, len(bin_str), 8)])
31
+ # Parse the fake events and check values
32
+ de_dict = parse_direct_events(bytes_obj)
33
+ for key in exp_dict.keys():
34
+ np.testing.assert_array_equal(de_dict[key], exp_dict[key])
35
+
36
+
37
+ def test_create_dataset():
38
+ """Test create_dataset"""
39
+ # dummy data to test create_dataset
40
+ data_dict = {
41
+ "trigger_id": [1, 2, 3],
42
+ "tof_1": [512, 512, 512],
43
+ "tof_2": [512, 512, 512],
44
+ "tof_3": [512, 512, 512],
45
+ "de_tag": [1, 2, 3],
46
+ "meta_seconds": [433522962, 433522962, 433522962],
47
+ "meta_subseconds": [512, 512, 512],
48
+ "esa_step": [4, 4, 4],
49
+ "ccsds_met": [433522961, 433522961, 433522961],
50
+ "src_seq_ctr": [10, 10, 10],
51
+ "pkt_len": [146, 146, 146],
52
+ "last_spin_num": [4, 4, 4],
53
+ "spin_invalids": [0, 0, 0],
54
+ }
55
+
56
+ # Test for good data
57
+ dataset = create_dataset(data_dict)
58
+ assert dataset["epoch"].shape == (3,)