imap-processing 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (124) hide show
  1. imap_processing/__init__.py +1 -1
  2. imap_processing/_version.py +2 -2
  3. imap_processing/ccsds/excel_to_xtce.py +34 -2
  4. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +1 -1
  5. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +145 -30
  6. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +36 -36
  7. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +36 -8
  8. imap_processing/cdf/config/imap_hit_l1b_variable_attrs.yaml +9 -0
  9. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +7 -7
  10. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +32 -33
  11. imap_processing/cdf/config/imap_mag_l1_variable_attrs.yaml +24 -28
  12. imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +1 -0
  13. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +133 -78
  14. imap_processing/cdf/config/imap_variable_schema.yaml +13 -0
  15. imap_processing/cdf/imap_cdf_manager.py +31 -27
  16. imap_processing/cli.py +12 -10
  17. imap_processing/codice/codice_l1a.py +151 -61
  18. imap_processing/codice/constants.py +1 -1
  19. imap_processing/codice/decompress.py +4 -9
  20. imap_processing/codice/utils.py +1 -0
  21. imap_processing/glows/l1b/glows_l1b.py +3 -3
  22. imap_processing/glows/l1b/glows_l1b_data.py +59 -37
  23. imap_processing/glows/l2/glows_l2_data.py +123 -0
  24. imap_processing/hi/l1a/histogram.py +1 -1
  25. imap_processing/hi/l1a/science_direct_event.py +1 -1
  26. imap_processing/hi/l1b/hi_l1b.py +85 -11
  27. imap_processing/hi/l1c/hi_l1c.py +23 -1
  28. imap_processing/hi/utils.py +1 -1
  29. imap_processing/hit/hit_utils.py +221 -0
  30. imap_processing/hit/l0/constants.py +118 -0
  31. imap_processing/hit/l0/decom_hit.py +186 -153
  32. imap_processing/hit/l1a/hit_l1a.py +20 -175
  33. imap_processing/hit/l1b/hit_l1b.py +33 -153
  34. imap_processing/idex/idex_l1a.py +10 -9
  35. imap_processing/lo/l0/decompression_tables/decompression_tables.py +1 -1
  36. imap_processing/lo/l0/lo_science.py +1 -1
  37. imap_processing/lo/packet_definitions/lo_xtce.xml +1 -3296
  38. imap_processing/mag/l0/decom_mag.py +4 -3
  39. imap_processing/mag/l1a/mag_l1a.py +11 -11
  40. imap_processing/mag/l1b/mag_l1b.py +89 -7
  41. imap_processing/spice/geometry.py +126 -4
  42. imap_processing/swapi/l1/swapi_l1.py +1 -1
  43. imap_processing/swapi/l2/swapi_l2.py +1 -1
  44. imap_processing/swe/l1b/swe_l1b_science.py +8 -8
  45. imap_processing/tests/ccsds/test_data/expected_output.xml +1 -0
  46. imap_processing/tests/ccsds/test_excel_to_xtce.py +4 -4
  47. imap_processing/tests/cdf/test_imap_cdf_manager.py +0 -10
  48. imap_processing/tests/codice/conftest.py +1 -17
  49. imap_processing/tests/codice/data/imap_codice_l0_raw_20241110_v001.pkts +0 -0
  50. imap_processing/tests/codice/test_codice_l0.py +8 -2
  51. imap_processing/tests/codice/test_codice_l1a.py +127 -107
  52. imap_processing/tests/codice/test_codice_l1b.py +1 -0
  53. imap_processing/tests/codice/test_decompress.py +7 -7
  54. imap_processing/tests/conftest.py +54 -15
  55. imap_processing/tests/glows/conftest.py +6 -0
  56. imap_processing/tests/glows/test_glows_l1b.py +9 -9
  57. imap_processing/tests/glows/test_glows_l1b_data.py +9 -9
  58. imap_processing/tests/glows/test_glows_l2_data.py +0 -0
  59. imap_processing/tests/hi/test_data/l1a/imap_hi_l1a_45sensor-de_20250415_v000.cdf +0 -0
  60. imap_processing/tests/hi/test_hi_l1b.py +71 -1
  61. imap_processing/tests/hi/test_hi_l1c.py +10 -2
  62. imap_processing/tests/hi/test_utils.py +4 -3
  63. imap_processing/tests/hit/{test_hit_decom.py → test_decom_hit.py} +84 -35
  64. imap_processing/tests/hit/test_hit_l1a.py +2 -197
  65. imap_processing/tests/hit/test_hit_l1b.py +156 -25
  66. imap_processing/tests/hit/test_hit_utils.py +218 -0
  67. imap_processing/tests/idex/conftest.py +1 -1
  68. imap_processing/tests/idex/imap_idex_l0_raw_20231214_v001.pkts +0 -0
  69. imap_processing/tests/idex/impact_14_tof_high_data.txt +4444 -4444
  70. imap_processing/tests/idex/test_idex_l0.py +3 -3
  71. imap_processing/tests/idex/test_idex_l1a.py +1 -1
  72. imap_processing/tests/lo/test_lo_science.py +2 -2
  73. imap_processing/tests/mag/imap_mag_l1a_norm-magi_20251017_v001.cdf +0 -0
  74. imap_processing/tests/mag/test_mag_l1b.py +59 -3
  75. imap_processing/tests/spice/test_data/imap_ena_sim_metakernel.template +3 -1
  76. imap_processing/tests/spice/test_geometry.py +84 -4
  77. imap_processing/tests/swe/conftest.py +33 -0
  78. imap_processing/tests/swe/l1_validation/swe_l0_unpacked-data_20240510_v001_VALIDATION_L1B_v3.dat +4332 -0
  79. imap_processing/tests/swe/test_swe_l1b.py +29 -8
  80. imap_processing/tests/test_utils.py +1 -1
  81. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E12.cdf +0 -0
  82. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E24.cdf +0 -0
  83. imap_processing/tests/ultra/unit/test_de.py +108 -0
  84. imap_processing/tests/ultra/unit/test_ultra_l1b.py +27 -3
  85. imap_processing/tests/ultra/unit/test_ultra_l1b_annotated.py +31 -10
  86. imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +21 -11
  87. imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +9 -44
  88. imap_processing/ultra/constants.py +8 -3
  89. imap_processing/ultra/l1b/de.py +174 -30
  90. imap_processing/ultra/l1b/ultra_l1b_annotated.py +24 -10
  91. imap_processing/ultra/l1b/ultra_l1b_extended.py +21 -14
  92. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +70 -119
  93. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/METADATA +15 -14
  94. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/RECORD +98 -113
  95. imap_processing/cdf/cdf_attribute_manager.py +0 -322
  96. imap_processing/cdf/config/shared/default_global_cdf_attrs_schema.yaml +0 -246
  97. imap_processing/cdf/config/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  98. imap_processing/hit/l0/data_classes/housekeeping.py +0 -240
  99. imap_processing/hit/l0/data_classes/science_packet.py +0 -259
  100. imap_processing/hit/l0/utils/hit_base.py +0 -57
  101. imap_processing/tests/cdf/shared/default_global_cdf_attrs_schema.yaml +0 -246
  102. imap_processing/tests/cdf/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  103. imap_processing/tests/cdf/test_cdf_attribute_manager.py +0 -353
  104. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-aggregated_20240429_v001.pkts +0 -0
  105. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-singles_20240429_v001.pkts +0 -0
  106. imap_processing/tests/codice/data/imap_codice_l0_hi-omni_20240429_v001.pkts +0 -0
  107. imap_processing/tests/codice/data/imap_codice_l0_hi-pha_20240429_v001.pkts +0 -0
  108. imap_processing/tests/codice/data/imap_codice_l0_hi-sectored_20240429_v001.pkts +0 -0
  109. imap_processing/tests/codice/data/imap_codice_l0_hskp_20100101_v001.pkts +0 -0
  110. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-aggregated_20240429_v001.pkts +0 -0
  111. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-singles_20240429_v001.pkts +0 -0
  112. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-angular_20240429_v001.pkts +0 -0
  113. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-priority_20240429_v001.pkts +0 -0
  114. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-species_20240429_v001.pkts +0 -0
  115. imap_processing/tests/codice/data/imap_codice_l0_lo-pha_20240429_v001.pkts +0 -0
  116. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-angular_20240429_v001.pkts +0 -0
  117. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-priority_20240429_v001.pkts +0 -0
  118. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-species_20240429_v001.pkts +0 -0
  119. imap_processing/tests/idex/imap_idex_l0_raw_20230725_v001.pkts +0 -0
  120. imap_processing/tests/mag/imap_mag_l1a_burst-magi_20231025_v001.cdf +0 -0
  121. /imap_processing/tests/hit/test_data/{imap_hit_l0_hk_20100105_v001.pkts → imap_hit_l0_raw_20100105_v001.pkts} +0 -0
  122. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/LICENSE +0 -0
  123. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/WHEEL +0 -0
  124. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,6 @@
1
1
  """Tests the L1a processing for decommutated CoDICE data"""
2
2
 
3
3
  import logging
4
- from pathlib import Path
5
4
 
6
5
  import numpy as np
7
6
  import pytest
@@ -10,139 +9,172 @@ import xarray as xr
10
9
  from imap_processing.cdf.utils import load_cdf, write_cdf
11
10
  from imap_processing.codice.codice_l1a import process_codice_l1a
12
11
 
13
- from .conftest import TEST_PACKETS, VALIDATION_DATA
12
+ from .conftest import TEST_L0_FILE, VALIDATION_DATA
14
13
 
15
14
  logger = logging.getLogger(__name__)
16
15
  logger.setLevel(logging.INFO)
17
16
 
18
- # TODO: Add test that processes a file with multiple APIDs
19
-
20
17
  EXPECTED_ARRAY_SHAPES = [
21
- (99,), # hskp
22
- (1, 1, 6, 1), # hi-counters-aggregated # TODO: Double check with Joey
23
- (1, 1, 16, 1), # hi-counters-singles # TODO: Double check with Joey
24
- (1, 15, 4, 1), # hi-omni # TODO: Double check with Joey
25
- (1, 8, 12, 12), # hi-sectored
26
- (1, 1), # hi-pha
27
- (1, 6, 6, 128), # lo-counters-aggregated
28
- (1, 24, 6, 128), # lo-counters-singles
29
- (1, 5, 12, 128), # lo-sw-angular
30
- (1, 19, 12, 128), # lo-nsw-angular
31
- (1, 1, 12, 128), # lo-sw-priority
32
- (1, 1, 12, 128), # lo-nsw-priority
33
- (1, 1, 1, 128), # lo-sw-species
34
- (1, 1, 1, 128), # lo-nsw-species
35
- (1, 128), # lo-pha
18
+ (), # hi-ialirt # TODO: Need to implement
19
+ (), # lo-ialirt # TODO: Need to implement
20
+ (31778,), # hskp
21
+ (77, 6, 6, 128), # lo-counters-aggregated
22
+ (77, 24, 6, 128), # lo-counters-singles
23
+ (77, 1, 12, 128), # lo-sw-priority
24
+ (77, 1, 12, 128), # lo-nsw-priority
25
+ (77, 1, 1, 128), # lo-sw-species
26
+ (77, 1, 1, 128), # lo-nsw-species
27
+ (77, 5, 12, 128), # lo-sw-angular
28
+ (77, 19, 12, 128), # lo-nsw-angular
29
+ (77, 1, 6, 1), # hi-counters-aggregated
30
+ (77, 1, 12, 1), # hi-counters-singles
31
+ (77, 15, 4, 1), # hi-omni
32
+ (77, 8, 12, 12), # hi-sectored
33
+ (), # hi-priority # TODO: Need to implement
34
+ (), # lo-pha # TODO: Need to implement
35
+ (), # hi-pha # TODO: Need to implement
36
36
  ]
37
- EXPECTED_LOGICAL_SOURCE = [
37
+
38
+ EXPECTED_LOGICAL_SOURCES = [
39
+ "imap_codice_l1a_hi-ialirt",
40
+ "imap_codice_l1a_lo-ialirt",
38
41
  "imap_codice_l1a_hskp",
39
- "imap_codice_l1a_hi-counters-aggregated",
40
- "imap_codice_l1a_hi-counters-singles",
41
- "imap_codice_l1a_hi-omni",
42
- "imap_codice_l1a_hi-sectored",
43
- "imap_codice_l1a_hi-pha",
44
42
  "imap_codice_l1a_lo-counters-aggregated",
45
43
  "imap_codice_l1a_lo-counters-singles",
46
- "imap_codice_l1a_lo-sw-angular",
47
- "imap_codice_l1a_lo-nsw-angular",
48
44
  "imap_codice_l1a_lo-sw-priority",
49
45
  "imap_codice_l1a_lo-nsw-priority",
50
46
  "imap_codice_l1a_lo-sw-species",
51
47
  "imap_codice_l1a_lo-nsw-species",
48
+ "imap_codice_l1a_lo-sw-angular",
49
+ "imap_codice_l1a_lo-nsw-angular",
50
+ "imap_codice_l1a_hi-counters-aggregated",
51
+ "imap_codice_l1a_hi-counters-singles",
52
+ "imap_codice_l1a_hi-omni",
53
+ "imap_codice_l1a_hi-sectored",
54
+ "imap_codice_l1a_hi-priority",
52
55
  "imap_codice_l1a_lo-pha",
56
+ "imap_codice_l1a_hi-pha",
53
57
  ]
58
+
54
59
  EXPECTED_NUM_VARIABLES = [
55
- 129, # hskp
56
- 1, # hi-counters-aggregated
57
- 3, # hi-counters-singles
58
- 8, # hi-omni
59
- 4, # hi-sectored
60
- 0, # hi-pha
60
+ 0, # hi-ialirt # TODO: Need to implement
61
+ 0, # lo-ialirt # TODO: Need to implement
62
+ 148, # hskp
61
63
  3, # lo-counters-aggregated
62
64
  3, # lo-counters-singles
63
- 6, # lo-sw-angular
64
- 3, # lo-nsw-angular
65
65
  7, # lo-sw-priority
66
66
  4, # lo-nsw-priority
67
67
  18, # lo-sw-species
68
68
  10, # lo-nsw-species
69
- 0, # lo-pha
69
+ 6, # lo-sw-angular
70
+ 3, # lo-nsw-angular
71
+ 1, # hi-counters-aggregated
72
+ 3, # hi-counters-singles
73
+ 8, # hi-omni
74
+ 4, # hi-sectored
75
+ 0, # hi-priority # TODO: Need to implement
76
+ 0, # lo-pha # TODO: Need to implement
77
+ 0, # hi-pha # TODO: Need to implement
70
78
  ]
71
79
 
72
80
 
73
- @pytest.fixture(params=TEST_PACKETS)
74
- def test_l1a_data(request) -> xr.Dataset:
81
+ @pytest.fixture(scope="session")
82
+ def test_l1a_data() -> xr.Dataset:
75
83
  """Return a ``xarray`` dataset containing test data.
76
84
 
77
85
  Returns
78
86
  -------
79
- dataset : xarray.Dataset
80
- A ``xarray`` dataset containing the test data
87
+ processed_datasets : list[xarray.Dataset]
88
+ A list of ``xarray`` datasets containing the test data
81
89
  """
82
90
 
83
- dataset = process_codice_l1a(file_path=request.param, data_version="001")
91
+ processed_datasets = process_codice_l1a(file_path=TEST_L0_FILE, data_version="001")
92
+
93
+ return processed_datasets
94
+
95
+
96
+ @pytest.mark.parametrize("index", range(len(EXPECTED_ARRAY_SHAPES)))
97
+ def test_l1a_data_array_shape(test_l1a_data, index):
98
+ """Tests that the data arrays in the generated CDFs have the expected shape.
99
+
100
+ Parameters
101
+ ----------
102
+ test_l1a_data : list[xarray.Dataset]
103
+ A list of ``xarray`` datasets containing the test data
104
+ index : int
105
+ The index of the list to test
106
+ """
107
+
108
+ processed_dataset = test_l1a_data[index]
109
+ expected_shape = EXPECTED_ARRAY_SHAPES[index]
110
+
111
+ # Mark currently broken/unsupported datasets as expected to fail
112
+ # TODO: Remove these once they are supported
113
+ if index in [0, 1, 15, 16, 17]:
114
+ pytest.xfail("Data product is currently unsupported")
84
115
 
85
- # Write the dataset to a CDF so it can be manually inspected as well
86
- file_path = write_cdf(dataset)
87
- logger.info(f"CDF file written to {file_path}")
116
+ for variable in processed_dataset:
117
+ if variable in ["energy_table", "acquisition_time_per_step"]:
118
+ assert processed_dataset[variable].data.shape == (128,)
119
+ else:
120
+ assert processed_dataset[variable].data.shape == expected_shape
88
121
 
89
- return dataset
90
122
 
123
+ @pytest.mark.parametrize("index", range(len(EXPECTED_LOGICAL_SOURCES)))
124
+ def test_l1a_logical_sources(test_l1a_data, index):
125
+ """Tests that the Logical source of the dataset is what is expected.
91
126
 
92
- @pytest.mark.xfail(reason="Epoch variable data needs to monotonically increase")
93
- @pytest.mark.parametrize(
94
- "test_l1a_data, expected_logical_source",
95
- list(zip(TEST_PACKETS, EXPECTED_LOGICAL_SOURCE)),
96
- indirect=["test_l1a_data"],
97
- )
98
- def test_l1a_cdf_filenames(test_l1a_data: xr.Dataset, expected_logical_source: str):
99
- """Tests that the ``process_codice_l1a`` function generates datasets
100
- with the expected logical source.
127
+ Since the logical source gets set by ``write_cdf``, this also tests that
128
+ the dataset can be written to a file.
101
129
 
102
130
  Parameters
103
131
  ----------
104
- test_l1a_data : xarray.Dataset
105
- A ``xarray`` dataset containing the test data
106
- expected_logical_source : str
107
- The expected CDF filename
132
+ test_l1a_data : list[xarray.Dataset]
133
+ A list of ``xarray`` datasets containing the test data
134
+ index : int
135
+ The index of the list to test
108
136
  """
109
137
 
110
- dataset = test_l1a_data
111
- assert dataset.attrs["Logical_source"] == expected_logical_source
138
+ processed_dataset = test_l1a_data[index]
139
+ expected_logical_source = EXPECTED_LOGICAL_SOURCES[index]
112
140
 
141
+ # Mark currently broken/unsupported datasets as expected to fail
142
+ # TODO: Remove these once they are supported
143
+ if index in [0, 1, 2, 15, 16, 17]:
144
+ pytest.xfail("Data product is currently unsupported")
113
145
 
114
- @pytest.mark.xfail(reason="Epoch variable data needs to monotonically increase")
115
- @pytest.mark.parametrize(
116
- "test_l1a_data, expected_shape",
117
- list(zip(TEST_PACKETS, EXPECTED_ARRAY_SHAPES)),
118
- indirect=["test_l1a_data"],
119
- )
120
- def test_l1a_data_array_shape(test_l1a_data: xr.Dataset, expected_shape: tuple):
121
- """Tests that the data arrays in the generated CDFs have the expected shape.
146
+ # Write the dataset to a file to set the logical source attribute
147
+ _ = write_cdf(processed_dataset)
148
+
149
+ assert processed_dataset.attrs["Logical_source"] == expected_logical_source
150
+
151
+
152
+ @pytest.mark.parametrize("index", range(len(EXPECTED_NUM_VARIABLES)))
153
+ def test_l1a_num_variables(test_l1a_data, index):
154
+ """Tests that the data arrays in the generated CDFs have the expected number
155
+ of variables.
122
156
 
123
157
  Parameters
124
158
  ----------
125
- test_l1a_data : xarray.Dataset
126
- A ``xarray`` dataset containing the test data
127
- expected_shape : tuple
128
- The expected shape of the data array
159
+ test_l1a_data : list[xarray.Dataset]
160
+ A list of ``xarray`` datasets containing the test data
161
+ index : int
162
+ The index of the list to test
129
163
  """
130
164
 
131
- dataset = test_l1a_data
132
- for variable in dataset:
133
- if variable in ["energy_table", "acquisition_time_per_step"]:
134
- assert dataset[variable].data.shape == (128,)
135
- else:
136
- assert dataset[variable].data.shape == expected_shape
165
+ processed_dataset = test_l1a_data[index]
166
+
167
+ # Mark currently broken/unsupported datasets as expected to fail
168
+ # TODO: Remove these once they are supported
169
+ if index in [0, 1, 15, 16, 17]:
170
+ pytest.xfail("Data product is currently unsupported")
171
+
172
+ assert len(processed_dataset) == EXPECTED_NUM_VARIABLES[index]
137
173
 
138
174
 
139
175
  @pytest.mark.skip("Awaiting validation data")
140
- @pytest.mark.parametrize(
141
- "test_l1a_data, validation_data",
142
- list(zip(TEST_PACKETS, VALIDATION_DATA)),
143
- indirect=["test_l1a_data"],
144
- )
145
- def test_l1a_data_array_values(test_l1a_data: xr.Dataset, validation_data: Path):
176
+ @pytest.mark.parametrize("index", range(len(VALIDATION_DATA)))
177
+ def test_l1a_data_array_values(test_l1a_data: xr.Dataset, index):
146
178
  """Tests that the generated L1a CDF contents are valid.
147
179
 
148
180
  Once proper validation files are acquired, this test function should point
@@ -151,40 +183,28 @@ def test_l1a_data_array_values(test_l1a_data: xr.Dataset, validation_data: Path)
151
183
 
152
184
  Parameters
153
185
  ----------
154
- test_l1a_data : xarray.Dataset
155
- A ``xarray`` dataset containing the test data
156
- validataion_data : pathlib.Path
157
- The path to the file containing the validation data
186
+ test_l1a_data : list[xarray.Dataset]
187
+ A list of ``xarray`` datasets containing the test data
188
+ index : int
189
+ The index of the list to test
158
190
  """
159
191
 
160
192
  generated_dataset = test_l1a_data
161
- validation_dataset = load_cdf(validation_data)
193
+ validation_dataset = load_cdf(VALIDATION_DATA[index])
162
194
 
163
195
  # Ensure the processed data matches the validation data
164
196
  for variable in validation_dataset:
165
197
  assert variable in generated_dataset
166
198
  if variable != "epoch":
167
199
  np.testing.assert_array_equal(
168
- validation_data[variable].data, generated_dataset[variable].data[0]
200
+ validation_dataset[variable].data, generated_dataset[variable].data[0]
169
201
  )
170
202
 
171
203
 
172
- @pytest.mark.xfail(reason="Epoch variable data needs to monotonically increase")
173
- @pytest.mark.parametrize(
174
- "test_l1a_data, expected_num_variables",
175
- list(zip(TEST_PACKETS, EXPECTED_NUM_VARIABLES)),
176
- indirect=["test_l1a_data"],
177
- )
178
- def test_l1a_num_variables(test_l1a_data: xr.Dataset, expected_num_variables: int):
179
- """Tests that the data arrays in the generated CDFs have the expected size.
204
+ def test_l1a_multiple_packets():
205
+ """Tests that an input L0 file containing multiple APIDs can be processed."""
180
206
 
181
- Parameters
182
- ----------
183
- test_l1a_data : xarray.Dataset
184
- A ``xarray`` dataset containing the test data
185
- expected_num_variables : int
186
- The expected number of data variables in the CDF
187
- """
207
+ processed_datasets = process_codice_l1a(file_path=TEST_L0_FILE, data_version="001")
188
208
 
189
- dataset = test_l1a_data
190
- assert len(dataset) == expected_num_variables
209
+ # TODO: Could add some more checks here?
210
+ assert len(processed_datasets) == 18
@@ -39,6 +39,7 @@ def test_l1b_data(request) -> xr.Dataset:
39
39
  return dataset
40
40
 
41
41
 
42
+ @pytest.mark.skip("Awaiting proper implementation of L1B")
42
43
  @pytest.mark.parametrize(
43
44
  "test_l1b_data, expected_logical_source",
44
45
  list(zip(TEST_L1A_FILES, EXPECTED_LOGICAL_SOURCE)),
@@ -10,14 +10,14 @@ from imap_processing.codice.utils import CoDICECompression
10
10
 
11
11
  # Test the algorithms using input value of 234 (picked randomly)
12
12
  lzma_bytes = lzma.compress((234).to_bytes(1, byteorder="big"))
13
- LZMA_EXAMPLE = "".join(format(byte, "08b") for byte in lzma_bytes)
13
+ # LZMA_EXAMPLE = "".join(format(byte, "08b") for byte in lzma_bytes)
14
14
  TEST_DATA = [
15
- ("11101010", CoDICECompression.NO_COMPRESSION, [234]),
16
- ("11101010", CoDICECompression.LOSSY_A, [221184]),
17
- ("11101010", CoDICECompression.LOSSY_B, [1441792]),
18
- (LZMA_EXAMPLE, CoDICECompression.LOSSLESS, [234]),
19
- (LZMA_EXAMPLE, CoDICECompression.LOSSY_A_LOSSLESS, [221184]),
20
- (LZMA_EXAMPLE, CoDICECompression.LOSSY_B_LOSSLESS, [1441792]),
15
+ (b"\xea", CoDICECompression.NO_COMPRESSION, [234]),
16
+ (b"\xea", CoDICECompression.LOSSY_A, [221184]),
17
+ (b"\xea", CoDICECompression.LOSSY_B, [1441792]),
18
+ (lzma_bytes, CoDICECompression.LOSSLESS, [234]),
19
+ (lzma_bytes, CoDICECompression.LOSSY_A_LOSSLESS, [221184]),
20
+ (lzma_bytes, CoDICECompression.LOSSY_B_LOSSLESS, [1441792]),
21
21
  ]
22
22
 
23
23
 
@@ -297,19 +297,55 @@ def _unset_metakernel_path(monkeypatch):
297
297
 
298
298
 
299
299
  @pytest.fixture()
300
- def _set_spin_data_filepath(monkeypatch, tmpdir, generate_spin_data):
301
- """Set the SPIN_DATA_FILEPATH environment variable"""
302
- # SWE test data time minus 56120 seconds to get mid-night time
303
- start_time = 453051323.0 - 56120
304
- spin_df = generate_spin_data(start_time)
305
- spin_csv_file_path = tmpdir / "spin_data.spin.csv"
306
- spin_df.to_csv(spin_csv_file_path, index=False)
307
- monkeypatch.setenv("SPIN_DATA_FILEPATH", str(spin_csv_file_path))
300
+ def use_test_spin_data_csv(monkeypatch):
301
+ """Sets the SPIN_DATA_FILEPATH environment variable to input path."""
302
+
303
+ def wrapped_set_spin_data_filepath(path: Path):
304
+ monkeypatch.setenv("SPIN_DATA_FILEPATH", str(path))
305
+
306
+ return wrapped_set_spin_data_filepath
307
+
308
+
309
+ @pytest.fixture()
310
+ def use_fake_spin_data_for_time(
311
+ request, use_test_spin_data_csv, tmpdir, generate_spin_data
312
+ ):
313
+ """
314
+ Generate and use fake spin data for testing.
315
+
316
+ Returns
317
+ -------
318
+ callable
319
+ Returns a callable function that takes start_met and optionally end_met
320
+ as inputs, generates fake spin data, writes the data to a csv file,
321
+ and sets the SPIN_DATA_FILEPATH environment variable to point to the
322
+ fake spin data file.
323
+ """
324
+
325
+ def wrapped_set_spin_data_filepath(
326
+ start_met: float, end_met: Optional[int] = None
327
+ ) -> pd.DataFrame:
328
+ """
329
+ Generate and use fake spin data for testing.
330
+ Parameters
331
+ ----------
332
+ start_met : int
333
+ Provides the start time in Mission Elapsed Time (MET).
334
+ end_met : int
335
+ Provides the end time in MET. If not provided, default to one day
336
+ from start time.
337
+ """
338
+ spin_df = generate_spin_data(start_met, end_met=end_met)
339
+ spin_csv_file_path = tmpdir / "spin_data.spin.csv"
340
+ spin_df.to_csv(spin_csv_file_path, index=False)
341
+ use_test_spin_data_csv(spin_csv_file_path)
342
+
343
+ return wrapped_set_spin_data_filepath
308
344
 
309
345
 
310
346
  @pytest.fixture()
311
347
  def generate_spin_data():
312
- def make_data(start_met: int, end_met: Optional[int] = None) -> pd.DataFrame:
348
+ def make_data(start_met: float, end_met: Optional[float] = None) -> pd.DataFrame:
313
349
  """
314
350
  Generate a spin table CSV covering one or more days.
315
351
  Spin table contains the following fields:
@@ -324,14 +360,14 @@ def generate_spin_data():
324
360
  thruster_firing
325
361
  )
326
362
  This function creates spin data using start MET and end MET time.
327
- Each spin start data uses the nominal 15 second spin period. The spins that
363
+ Each spin start data uses the nominal 15-second spin period. The spins that
328
364
  occur from 00:00(Mid-night) to 00:10 UTC are marked with flags for
329
365
  thruster firing, invalid spin period, and invalid spin phase.
330
366
  Parameters
331
367
  ----------
332
- start_met : int
368
+ start_met : float
333
369
  Provides the start time in Mission Elapsed Time (MET).
334
- end_met : int
370
+ end_met : float
335
371
  Provides the end time in MET. If not provided, default to one day
336
372
  from start time.
337
373
  Returns
@@ -344,7 +380,8 @@ def generate_spin_data():
344
380
  end_met = start_met + 86400
345
381
 
346
382
  # Create spin start second data of 15 seconds increment
347
- spin_start_sec = np.arange(start_met, end_met + 1, 15)
383
+ spin_start_sec = np.arange(np.floor(start_met), end_met + 1, 15)
384
+ spin_start_subsec = int((start_met - spin_start_sec[0]) * 1000)
348
385
 
349
386
  nspins = len(spin_start_sec)
350
387
 
@@ -352,7 +389,9 @@ def generate_spin_data():
352
389
  {
353
390
  "spin_number": np.arange(nspins, dtype=np.uint32),
354
391
  "spin_start_sec": spin_start_sec,
355
- "spin_start_subsec": np.zeros(nspins, dtype=np.uint32),
392
+ "spin_start_subsec": np.full(
393
+ nspins, spin_start_subsec, dtype=np.uint32
394
+ ),
356
395
  "spin_period_sec": np.full(nspins, 15.0, dtype=np.float32),
357
396
  "spin_period_valid": np.ones(nspins, dtype=np.uint8),
358
397
  "spin_phase_valid": np.ones(nspins, dtype=np.uint8),
@@ -362,7 +401,7 @@ def generate_spin_data():
362
401
  )
363
402
 
364
403
  # Convert spin_start_sec to datetime to set repointing times flags
365
- spin_start_dates = met_to_j2000ns(spin_start_sec)
404
+ spin_start_dates = met_to_j2000ns(spin_start_sec + spin_start_subsec / 1000)
366
405
  spin_start_dates = cdflib.cdfepoch.to_datetime(spin_start_dates)
367
406
 
368
407
  # Convert DatetimeIndex to Series for using .dt accessor
@@ -6,6 +6,7 @@ import pytest
6
6
  from imap_processing.glows.l0 import decom_glows
7
7
  from imap_processing.glows.l1a.glows_l1a import glows_l1a, process_de_l0
8
8
  from imap_processing.glows.l1a.glows_l1a_data import HistogramL1A
9
+ from imap_processing.glows.l1b.glows_l1b import glows_l1b
9
10
 
10
11
 
11
12
  @pytest.fixture()
@@ -40,3 +41,8 @@ def l1a_test_data(decom_test_data):
40
41
  @pytest.fixture()
41
42
  def l1a_dataset(packet_path):
42
43
  return glows_l1a(packet_path, "v001")
44
+
45
+
46
+ @pytest.fixture()
47
+ def l1b_hist_dataset(l1a_dataset):
48
+ return glows_l1b(l1a_dataset[0], "v001")
@@ -318,23 +318,23 @@ def test_glows_l1b(de_dataset, hist_dataset):
318
318
  "imap_spin_angle_bin_cntr",
319
319
  "histogram_flag_array",
320
320
  "filter_temperature_average",
321
- "filter_temperature_variance",
321
+ "filter_temperature_std_dev",
322
322
  "hv_voltage_average",
323
- "hv_voltage_variance",
323
+ "hv_voltage_std_dev",
324
324
  "spin_period_average",
325
- "spin_period_variance",
325
+ "spin_period_std_dev",
326
326
  "pulse_length_average",
327
- "pulse_length_variance",
327
+ "pulse_length_std_dev",
328
328
  "spin_period_ground_average",
329
- "spin_period_ground_variance",
329
+ "spin_period_ground_std_dev",
330
330
  "position_angle_offset_average",
331
- "position_angle_offset_variance",
332
- "spin_axis_orientation_variance",
331
+ "position_angle_offset_std_dev",
332
+ "spin_axis_orientation_std_dev",
333
333
  "spin_axis_orientation_average",
334
334
  "spacecraft_location_average",
335
- "spacecraft_location_variance",
335
+ "spacecraft_location_std_dev",
336
336
  "spacecraft_velocity_average",
337
- "spacecraft_velocity_variance",
337
+ "spacecraft_velocity_std_dev",
338
338
  "flags",
339
339
  ]
340
340
 
@@ -102,24 +102,24 @@ def test_validation_data_histogram(l1a_dataset):
102
102
  # "imap_spin_angle_bin_cntr": "imap_spin_angle_bin_cntr",
103
103
  # "histogram_flag_array": "histogram_flag_array",
104
104
  "filter_temperature_average": "filter_temperature_average",
105
- "filter_temperature_std_dev": "filter_temperature_variance",
105
+ "filter_temperature_std_dev": "filter_temperature_std_dev",
106
106
  "hv_voltage_average": "hv_voltage_average",
107
- "hv_voltage_std_dev": "hv_voltage_variance",
107
+ "hv_voltage_std_dev": "hv_voltage_std_dev",
108
108
  "spin_period_average": "spin_period_average",
109
- "spin_period_std_dev": "spin_period_variance",
109
+ "spin_period_std_dev": "spin_period_std_dev",
110
110
  "pulse_length_average": "pulse_length_average",
111
- "pulse_length_std_dev": "pulse_length_variance",
111
+ "pulse_length_std_dev": "pulse_length_std_dev",
112
112
  # TODO uncomment when spice is complete
113
113
  # "spin_period_ground_average": "spin_period_ground_average",
114
- # "spin_period_ground_std_dev": "spin_period_ground_variance",
114
+ # "spin_period_ground_std_dev": "spin_period_ground_std_dev",
115
115
  # "position_angle_offset_average": "position_angle_offset_average",
116
- # "position_angle_offset_std_dev": "position_angle_offset_variance",
116
+ # "position_angle_offset_std_dev": "position_angle_offset_std_dev",
117
117
  # "spin_axis_orientation_average": "spin_axis_orientation_average",
118
- # "spin_axis_orientation_std_dev": "spin_axis_orientation_variance",
118
+ # "spin_axis_orientation_std_dev": "spin_axis_orientation_std_dev",
119
119
  # "spacecraft_location_average": "spacecraft_location_average",
120
- # "spacecraft_location_std_dev": "spacecraft_location_variance",
120
+ # "spacecraft_location_std_dev": "spacecraft_location_std_dev",
121
121
  # "spacecraft_velocity_average": "spacecraft_velocity_average",
122
- # "spacecraft_velocity_std_dev": "spacecraft_velocity_variance",
122
+ # "spacecraft_velocity_std_dev": "spacecraft_velocity_std_dev",
123
123
  }
124
124
 
125
125
  for index, validation_output in enumerate(out["output"]):
File without changes
@@ -12,6 +12,8 @@ from imap_processing.hi.l1b.hi_l1b import (
12
12
  CoincidenceBitmap,
13
13
  compute_coincidence_type_and_time_deltas,
14
14
  compute_hae_coordinates,
15
+ de_esa_energy_step,
16
+ de_nominal_bin_and_spin_phase,
15
17
  hi_l1b,
16
18
  )
17
19
  from imap_processing.hi.utils import HiConstants
@@ -32,9 +34,13 @@ def test_hi_l1b_hk(hi_l0_test_data_path):
32
34
 
33
35
  @pytest.mark.external_kernel()
34
36
  @pytest.mark.use_test_metakernel("imap_ena_sim_metakernel.template")
35
- def test_hi_l1b_de(hi_l1a_test_data_path):
37
+ def test_hi_l1b_de(
38
+ hi_l1a_test_data_path, spice_test_data_path, use_fake_spin_data_for_time
39
+ ):
36
40
  """Test coverage for imap_processing.hi.hi_l1b.hi_l1b() with
37
41
  direct events L1A as input"""
42
+ # Start MET time of spin for simulated input data is 482372988
43
+ use_fake_spin_data_for_time(482372988)
38
44
  l1a_test_file_path = (
39
45
  hi_l1a_test_data_path / "imap_hi_l1a_45sensor-de_20250415_v000.cdf"
40
46
  )
@@ -152,6 +158,53 @@ def test_compute_coincidence_type_and_time_deltas(synthetic_trigger_id_and_tof_d
152
158
  )
153
159
 
154
160
 
161
+ @mock.patch("imap_processing.hi.l1b.hi_l1b.parse_sensor_number", return_value=90)
162
+ @mock.patch("imap_processing.hi.l1b.hi_l1b.get_instrument_spin_phase")
163
+ @mock.patch("imap_processing.hi.l1b.hi_l1b.get_spacecraft_spin_phase")
164
+ def test_de_nominal_bin_and_spin_phase(
165
+ spacecraft_phase_moc, instrument_phase_mock, parse_sensor_number_mock
166
+ ):
167
+ """Test coverage for de_nominal_bin_and_spin_phase."""
168
+ # set the spacecraft_phase_mock to return an array of values between 0 and 1
169
+ # that is rolled 30 places for easy testing
170
+ spacecraft_phase_roll = 30
171
+ spacecraft_phase_moc.side_effect = lambda x: np.roll(
172
+ np.arange(0, 1, 1 / len(x)), spacecraft_phase_roll
173
+ )
174
+ # set the get_instrument_spin_phase mock to return an array of values between
175
+ # 0 and 1
176
+ instrument_phase_mock.side_effect = lambda x, y: np.arange(0, 1, 1 / len(x))
177
+ # generate a fake dataset with epoch coordinate and event_met variable
178
+ de_list_length = 720
179
+ synthetic_ds = xr.Dataset(
180
+ coords={
181
+ "epoch": xr.DataArray(
182
+ np.arange(de_list_length), name="epoch", dims=["epoch"]
183
+ )
184
+ },
185
+ data_vars={
186
+ "event_met": xr.DataArray(np.arange(de_list_length), dims=["epoch"])
187
+ },
188
+ attrs={"Logical_source": "foo_source"},
189
+ )
190
+
191
+ new_vars = de_nominal_bin_and_spin_phase(synthetic_ds)
192
+ # Check spin_phase
193
+ assert "spin_phase" in new_vars
194
+ assert new_vars["spin_phase"].shape == (de_list_length,)
195
+ np.testing.assert_array_equal(
196
+ new_vars["spin_phase"].values,
197
+ np.linspace(0, 1, de_list_length + 1, dtype=np.float32)[:-1],
198
+ )
199
+ # Check nominal_bin
200
+ assert "nominal_bin" in new_vars
201
+ expected_nominal_bin = np.roll(
202
+ np.digitize(np.arange(0, 360, 360 / de_list_length), np.arange(90) * 4) - 1,
203
+ spacecraft_phase_roll,
204
+ )
205
+ np.testing.assert_array_equal(new_vars["nominal_bin"].values, expected_nominal_bin)
206
+
207
+
155
208
  @pytest.mark.parametrize("sensor_number", [45, 90])
156
209
  @mock.patch("imap_processing.hi.l1b.hi_l1b.instrument_pointing")
157
210
  def test_compute_hae_coordinates(mock_instrument_pointing, sensor_number):
@@ -183,3 +236,20 @@ def test_compute_hae_coordinates(mock_instrument_pointing, sensor_number):
183
236
  assert "hae_longitude" in new_vars
184
237
  assert new_vars["hae_longitude"].shape == fake_dataset.epoch.shape
185
238
  np.testing.assert_allclose(new_vars["hae_longitude"].values, sensor_number)
239
+
240
+
241
+ def test_de_esa_energy_step():
242
+ """Test coverage for de_esa_energy_step function."""
243
+ n_epoch = 20
244
+ fake_dataset = xr.Dataset(
245
+ coords={
246
+ "epoch": xr.DataArray(np.arange(n_epoch), name="epoch", dims=["epoch"])
247
+ },
248
+ data_vars={"esa_step": xr.DataArray(np.arange(n_epoch) % 9, dims=["epoch"])},
249
+ )
250
+ esa_energy_step_var = de_esa_energy_step(fake_dataset)
251
+ # TODO: The below check is for the temporary implementation and should be
252
+ # removed when the function is update.
253
+ np.testing.assert_array_equal(
254
+ esa_energy_step_var["esa_energy_step"].values, fake_dataset.esa_step.values
255
+ )