imap-processing 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of imap-processing might be problematic. Click here for more details.
- imap_processing/_version.py +2 -2
- imap_processing/ccsds/excel_to_xtce.py +2 -0
- imap_processing/cdf/config/imap_hi_variable_attrs.yaml +100 -1
- imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +14 -0
- imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +63 -1
- imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +7 -0
- imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +574 -231
- imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +326 -0
- imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +33 -23
- imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +7 -4
- imap_processing/cdf/utils.py +3 -5
- imap_processing/cli.py +13 -4
- imap_processing/codice/codice_l1a.py +5 -5
- imap_processing/codice/constants.py +9 -9
- imap_processing/codice/decompress.py +6 -2
- imap_processing/glows/l1a/glows_l1a.py +1 -2
- imap_processing/hi/l1a/hi_l1a.py +4 -4
- imap_processing/hi/l1a/histogram.py +106 -108
- imap_processing/hi/l1a/science_direct_event.py +91 -224
- imap_processing/hi/packet_definitions/TLM_HI_COMBINED_SCI.xml +3994 -0
- imap_processing/hit/l0/constants.py +2 -2
- imap_processing/hit/l0/decom_hit.py +12 -101
- imap_processing/hit/l1a/hit_l1a.py +164 -23
- imap_processing/ialirt/l0/process_codicelo.py +153 -0
- imap_processing/ialirt/l0/process_hit.py +5 -5
- imap_processing/ialirt/packet_definitions/ialirt_codicelo.xml +281 -0
- imap_processing/ialirt/process_ephemeris.py +212 -0
- imap_processing/idex/idex_l1a.py +55 -75
- imap_processing/idex/idex_l1b.py +192 -0
- imap_processing/idex/idex_variable_unpacking_and_eu_conversion.csv +33 -0
- imap_processing/idex/packet_definitions/idex_packet_definition.xml +97 -595
- imap_processing/lo/l0/decompression_tables/decompression_tables.py +16 -0
- imap_processing/lo/l0/lo_science.py +44 -12
- imap_processing/lo/l1a/lo_l1a.py +76 -8
- imap_processing/lo/packet_definitions/lo_xtce.xml +9877 -87
- imap_processing/mag/l1a/mag_l1a.py +1 -2
- imap_processing/mag/l1a/mag_l1a_data.py +1 -2
- imap_processing/mag/l1b/mag_l1b.py +2 -1
- imap_processing/spice/geometry.py +37 -19
- imap_processing/spice/time.py +144 -2
- imap_processing/swapi/l1/swapi_l1.py +3 -3
- imap_processing/swapi/packet_definitions/swapi_packet_definition.xml +1535 -446
- imap_processing/swe/l2/swe_l2.py +134 -17
- imap_processing/tests/ccsds/test_data/expected_output.xml +1 -1
- imap_processing/tests/codice/test_codice_l1a.py +8 -8
- imap_processing/tests/codice/test_decompress.py +4 -4
- imap_processing/tests/conftest.py +46 -43
- imap_processing/tests/hi/test_data/l0/H90_NHK_20241104.bin +0 -0
- imap_processing/tests/hi/test_data/l0/H90_sci_cnt_20241104.bin +0 -0
- imap_processing/tests/hi/test_data/l0/H90_sci_de_20241104.bin +0 -0
- imap_processing/tests/hi/test_hi_l1b.py +2 -2
- imap_processing/tests/hi/test_l1a.py +31 -58
- imap_processing/tests/hi/test_science_direct_event.py +58 -0
- imap_processing/tests/hit/test_data/sci_sample1.ccsds +0 -0
- imap_processing/tests/hit/test_decom_hit.py +60 -50
- imap_processing/tests/hit/test_hit_l1a.py +327 -12
- imap_processing/tests/hit/test_hit_l1b.py +76 -0
- imap_processing/tests/hit/validation_data/hskp_sample_eu.csv +89 -0
- imap_processing/tests/hit/validation_data/sci_sample_raw1.csv +29 -0
- imap_processing/tests/ialirt/test_data/l0/apid01152.tlm +0 -0
- imap_processing/tests/ialirt/test_data/l0/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/ialirt/unit/test_process_codicelo.py +106 -0
- imap_processing/tests/ialirt/unit/test_process_ephemeris.py +109 -0
- imap_processing/tests/ialirt/unit/test_process_hit.py +9 -6
- imap_processing/tests/idex/conftest.py +1 -1
- imap_processing/tests/idex/test_idex_l0.py +1 -1
- imap_processing/tests/idex/test_idex_l1a.py +7 -1
- imap_processing/tests/idex/test_idex_l1b.py +126 -0
- imap_processing/tests/lo/test_lo_l1a.py +7 -16
- imap_processing/tests/lo/test_lo_science.py +67 -3
- imap_processing/tests/lo/test_pkts/imap_lo_l0_raw_20240803_v002.pkts +0 -0
- imap_processing/tests/lo/validation_data/Instrument_FM1_T104_R129_20240803_ILO_SCI_DE_dec_DN_with_fills.csv +1999 -0
- imap_processing/tests/mag/test_mag_l1b.py +39 -5
- imap_processing/tests/spice/test_geometry.py +32 -6
- imap_processing/tests/spice/test_time.py +135 -6
- imap_processing/tests/swapi/test_swapi_decom.py +75 -69
- imap_processing/tests/swapi/test_swapi_l1.py +4 -4
- imap_processing/tests/swe/test_swe_l2.py +64 -8
- imap_processing/tests/test_utils.py +1 -1
- imap_processing/tests/ultra/test_data/l0/ultra45_raw_sc_ultrarawimg_withFSWcalcs_FM45_40P_Phi28p5_BeamCal_LinearScan_phi2850_theta-000_20240207T102740.csv +3314 -3314
- imap_processing/tests/ultra/unit/test_de.py +8 -3
- imap_processing/tests/ultra/unit/test_spatial_utils.py +125 -0
- imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +39 -29
- imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +2 -25
- imap_processing/ultra/constants.py +4 -0
- imap_processing/ultra/l1b/de.py +8 -14
- imap_processing/ultra/l1b/ultra_l1b_extended.py +29 -70
- imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +1 -36
- imap_processing/ultra/utils/spatial_utils.py +221 -0
- {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/METADATA +1 -1
- {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/RECORD +94 -76
- imap_processing/hi/l0/__init__.py +0 -0
- imap_processing/hi/l0/decom_hi.py +0 -24
- imap_processing/hi/packet_definitions/hi_packet_definition.xml +0 -482
- imap_processing/tests/hi/test_decom.py +0 -55
- imap_processing/tests/hi/test_l1a_sci_de.py +0 -72
- {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/LICENSE +0 -0
- {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/WHEEL +0 -0
- {imap_processing-0.8.0.dist-info → imap_processing-0.9.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,279 +1,135 @@
|
|
|
1
1
|
"""IMAP-Hi direct event processing."""
|
|
2
2
|
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
|
|
3
5
|
import numpy as np
|
|
6
|
+
import numpy._typing as npt
|
|
4
7
|
import xarray as xr
|
|
5
8
|
|
|
6
9
|
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
|
|
7
10
|
from imap_processing.spice.time import met_to_j2000ns
|
|
8
|
-
from imap_processing.utils import convert_to_binary_string
|
|
9
11
|
|
|
10
12
|
# TODO: read LOOKED_UP_DURATION_OF_TICK from
|
|
11
13
|
# instrument status summary later. This value
|
|
12
14
|
# is rarely change but want to be able to change
|
|
13
15
|
# it if needed. It stores information about how
|
|
14
16
|
# fast the time was ticking. It is in microseconds.
|
|
15
|
-
LOOKED_UP_DURATION_OF_TICK =
|
|
17
|
+
LOOKED_UP_DURATION_OF_TICK = 1999
|
|
16
18
|
|
|
17
19
|
SECOND_TO_NS = 1e9
|
|
18
20
|
MILLISECOND_TO_NS = 1e6
|
|
19
21
|
MICROSECOND_TO_NS = 1e3
|
|
20
22
|
|
|
21
23
|
|
|
22
|
-
def
|
|
24
|
+
def parse_direct_events(de_data: bytes) -> dict[str, npt.ArrayLike]:
|
|
23
25
|
"""
|
|
24
|
-
Parse event data.
|
|
26
|
+
Parse event data from a binary blob.
|
|
25
27
|
|
|
26
28
|
IMAP-Hi direct event data information is stored in
|
|
27
|
-
48-bits as
|
|
29
|
+
48-bits as follows:
|
|
28
30
|
|
|
29
|
-
|
|
|
30
|
-
| out which type of event it is. start_bitmask_data value mapping:
|
|
31
|
-
|
|
|
32
|
-
| 1 - A
|
|
33
|
-
| 2 - B
|
|
34
|
-
| 3 - C
|
|
35
|
-
| 0 - META
|
|
36
|
-
| If it's a metaevent:
|
|
37
|
-
|
|
|
38
|
-
| Read 48-bits into 2, 4, 10, 32 bits. Each of these breaks
|
|
39
|
-
| down as:
|
|
40
|
-
|
|
|
41
|
-
| start_bitmask_data - 2 bits (tA=1, tB=2, tC1=3, META=0)
|
|
42
|
-
| ESA step - 4 bits
|
|
43
|
-
| integer millisecond of MET(subseconds) - 10 bits
|
|
44
|
-
| integer MET(seconds) - 32 bits
|
|
45
|
-
|
|
|
46
|
-
| If it's not a metaevent:
|
|
47
|
-
| Read 48-bits into 2, 10, 10, 10, 16 bits. Each of these breaks
|
|
31
|
+
| Read 48-bits into 2, 16, 10, 10, 10, bits. Each of these breaks
|
|
48
32
|
| down as:
|
|
49
33
|
|
|
|
50
34
|
| start_bitmask_data - 2 bits (tA=1, tB=2, tC1=3, META=0)
|
|
35
|
+
| de_tag - 16 bits
|
|
51
36
|
| tof_1 - 10 bit counter
|
|
52
37
|
| tof_2 - 10 bit counter
|
|
53
38
|
| tof_3 - 10 bit counter
|
|
54
|
-
| de_tag - 16 bits
|
|
55
39
|
|
|
56
|
-
There are at most total of
|
|
40
|
+
There are at most total of 664 of 48-bits in each data packet.
|
|
57
41
|
This data packet is of variable length. If there is one event, then
|
|
58
|
-
DE_TOF will contain 48-bits. If there are
|
|
59
|
-
DE_TOF will contain
|
|
42
|
+
DE_TOF will contain 48-bits. If there are 664 events, then
|
|
43
|
+
DE_TOF will contain 664 x 48-bits. If there is no event, then
|
|
60
44
|
DE_TOF will contain 0-bits.
|
|
61
45
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
information and second packet will contain 0-bits in DE_TOF. In general,
|
|
67
|
-
every two packets will look like this.
|
|
68
|
-
|
|
69
|
-
| first packet = [
|
|
70
|
-
| (start_bitmask_data, ESA step, int millisecond of MET, int MET),
|
|
71
|
-
| (start_bitmask_data, tof_1, tof_2, tof_3, de_tag),
|
|
72
|
-
| .....
|
|
73
|
-
| ]
|
|
74
|
-
| second packet = [
|
|
75
|
-
| (start_bitmask_data, tof_1, tof_2, tof_3, de_tag),
|
|
76
|
-
| .....
|
|
77
|
-
| ]
|
|
78
|
-
|
|
79
|
-
In direct event data, if no hit is registered, the tof_x field in
|
|
80
|
-
the DE to a value of negative one. However, since the field is described as a
|
|
81
|
-
"10-bit unsigned counter," it cannot actually store negative numbers.
|
|
82
|
-
Instead, the value negative is represented by the maximum value that can
|
|
83
|
-
be stored in a 10-bit unsigned integer, which is 0x3FF (in hexadecimal)
|
|
84
|
-
or 1023 in decimal. This value is used as a special marker to
|
|
85
|
-
indicate that no hit was registered. Ideally, the system should
|
|
86
|
-
not be able to register a hit with a value of 1023 for all
|
|
87
|
-
tof_1, tof_2, tof_3, because this is in case of an error. But,
|
|
88
|
-
IMAP-Hi like to process it still to investigate the data.
|
|
89
|
-
Example of what it will look like if no hit was registered.
|
|
90
|
-
|
|
91
|
-
| (start_bitmask_data, 1023, 1023, 1023, de_tag)
|
|
92
|
-
| start_bitmask_data will be 1 or 2 or 3.
|
|
46
|
+
There should be two data packets per ESA. Each packet contains meta-event
|
|
47
|
+
data that is identical between the two packets for a common ESA.
|
|
48
|
+
If there is no event record for certain ESA step, then both packets will
|
|
49
|
+
contain 0-bits in DE_TOF.
|
|
93
50
|
|
|
94
51
|
Parameters
|
|
95
52
|
----------
|
|
96
|
-
|
|
97
|
-
|
|
53
|
+
de_data : bytes
|
|
54
|
+
Binary blob from de_tag field of SCI_DE packet. Must be an integer
|
|
55
|
+
multiple of 48-bits of data.
|
|
98
56
|
|
|
99
57
|
Returns
|
|
100
58
|
-------
|
|
101
|
-
|
|
59
|
+
Dict[str, list]
|
|
102
60
|
Parsed event data.
|
|
103
61
|
"""
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
tof_1 =
|
|
123
|
-
tof_2 =
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
# return parsed direct event data
|
|
128
|
-
return {
|
|
129
|
-
"start_bitmask_data": int(trigger_id, 2),
|
|
130
|
-
"tof_1": int(tof_1, 2),
|
|
131
|
-
"tof_2": int(tof_2, 2),
|
|
132
|
-
"tof_3": int(tof_3, 2),
|
|
133
|
-
"de_tag": int(de_tag, 2),
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
def break_into_bits_size(binary_data: str) -> list:
|
|
138
|
-
"""
|
|
139
|
-
Break binary stream data into 48-bits.
|
|
140
|
-
|
|
141
|
-
Parameters
|
|
142
|
-
----------
|
|
143
|
-
binary_data : str
|
|
144
|
-
Binary data.
|
|
62
|
+
# The de_data is a binary blob with Nx6 bytes of data where N = number of
|
|
63
|
+
# direct events encoded into the binary blob. Interpreting the data as
|
|
64
|
+
# big-endian uint16 data and reshaping into a (3, -1) ndarray results
|
|
65
|
+
# in an array with shape (3, N). Indexing the first axis of that array
|
|
66
|
+
# (e.g. data_uint16[i]) gives the ith 2-bytes of data for each of the N
|
|
67
|
+
# direct events.
|
|
68
|
+
# Considering the 6-bytes of data for each DE as 3 2-byte words,
|
|
69
|
+
# each word contains the following:
|
|
70
|
+
# word_0: 2-bits of Trigger ID, upper 14-bits of de_tag
|
|
71
|
+
# word_1: lower 2-bits of de_tag, 10-bits tof_1, upper 4-bits of tof_2
|
|
72
|
+
# word_2: lower 6-bits of tof_2, 10-bits of tof_3
|
|
73
|
+
data_uint16 = np.reshape(
|
|
74
|
+
np.frombuffer(de_data, dtype=">u2"), (3, -1), order="F"
|
|
75
|
+
).astype(np.uint16)
|
|
76
|
+
|
|
77
|
+
de_dict = dict()
|
|
78
|
+
de_dict["trigger_id"] = (data_uint16[0] >> 14).astype(np.uint8)
|
|
79
|
+
de_dict["de_tag"] = (data_uint16[0] << 2) + (data_uint16[1] >> 14)
|
|
80
|
+
de_dict["tof_1"] = (data_uint16[1] & int(b"00111111_11110000", 2)) >> 4
|
|
81
|
+
de_dict["tof_2"] = ((data_uint16[1] & int(b"00000000_00001111", 2)) << 6) + (
|
|
82
|
+
data_uint16[2] >> 10
|
|
83
|
+
)
|
|
84
|
+
de_dict["tof_3"] = data_uint16[2] & int(b"00000011_11111111", 2)
|
|
145
85
|
|
|
146
|
-
|
|
147
|
-
-------
|
|
148
|
-
list
|
|
149
|
-
List of 48-bits.
|
|
150
|
-
"""
|
|
151
|
-
# TODO: ask Paul what to do if the length of
|
|
152
|
-
# binary_data is not a multiple of 48
|
|
153
|
-
field_bit_length = 48
|
|
154
|
-
return [
|
|
155
|
-
binary_data[i : i + field_bit_length]
|
|
156
|
-
for i in range(0, len(binary_data), field_bit_length)
|
|
157
|
-
]
|
|
86
|
+
return de_dict
|
|
158
87
|
|
|
159
88
|
|
|
160
|
-
def create_dataset(
|
|
89
|
+
def create_dataset(de_data_dict: dict[str, npt.ArrayLike]) -> xr.Dataset:
|
|
161
90
|
"""
|
|
162
91
|
Create xarray dataset.
|
|
163
92
|
|
|
164
93
|
Parameters
|
|
165
94
|
----------
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
packet_met_time : list
|
|
169
|
-
List of packet MET time.
|
|
95
|
+
de_data_dict : Dict[list]
|
|
96
|
+
Dictionary of packet telemetry and direct event data lists.
|
|
170
97
|
|
|
171
98
|
Returns
|
|
172
99
|
-------
|
|
173
100
|
dataset : xarray.Dataset
|
|
174
101
|
Xarray dataset.
|
|
175
102
|
"""
|
|
176
|
-
#
|
|
177
|
-
|
|
178
|
-
"
|
|
179
|
-
"
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
"
|
|
187
|
-
"de_tag"
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
# means that current data file started with direct event.
|
|
192
|
-
# Per Paul, log a warning and discard all direct events
|
|
193
|
-
# until we see next metaevent because it could mean
|
|
194
|
-
# that the instrument was turned off during repoint.
|
|
195
|
-
|
|
196
|
-
# Find the index of the first occurrence of the metaevent
|
|
197
|
-
first_metaevent_index = next(
|
|
198
|
-
(i for i, d in enumerate(de_data_list) if d.get("start_bitmask_data") == 0),
|
|
199
|
-
None,
|
|
103
|
+
# Compute the meta-event MET in nanoseconds
|
|
104
|
+
de_data_dict["meta_event_met"] = (
|
|
105
|
+
np.array(de_data_dict.pop("meta_seconds")) * SECOND_TO_NS
|
|
106
|
+
+ np.array(de_data_dict.pop("meta_subseconds")) * MILLISECOND_TO_NS
|
|
107
|
+
)
|
|
108
|
+
# Compute the MET of each event in nanoseconds
|
|
109
|
+
# event MET = meta_event_met + de_clock + 1/2 de_clock_tick
|
|
110
|
+
# See Hi Algorithm Document section 2.2.5
|
|
111
|
+
half_tick_ns = LOOKED_UP_DURATION_OF_TICK / 2 * MICROSECOND_TO_NS
|
|
112
|
+
de_data_dict["event_met"] = (
|
|
113
|
+
de_data_dict["meta_event_met"]
|
|
114
|
+
+ np.array(de_data_dict["de_tag"])
|
|
115
|
+
* LOOKED_UP_DURATION_OF_TICK
|
|
116
|
+
* MICROSECOND_TO_NS
|
|
117
|
+
+ half_tick_ns
|
|
200
118
|
)
|
|
201
|
-
|
|
202
|
-
if first_metaevent_index is None:
|
|
203
|
-
return None
|
|
204
|
-
elif first_metaevent_index != 0:
|
|
205
|
-
# Discard all direct events until we see next metaevent
|
|
206
|
-
# TODO: log a warning
|
|
207
|
-
de_data_list = de_data_list[first_metaevent_index:]
|
|
208
|
-
packet_met_time = packet_met_time[first_metaevent_index:]
|
|
209
|
-
|
|
210
|
-
for index, event in enumerate(de_data_list):
|
|
211
|
-
if event["start_bitmask_data"] == 0:
|
|
212
|
-
# metaevent is a way to store information
|
|
213
|
-
# about bigger portion of time information. Eg.
|
|
214
|
-
# metaevent stores information about, let's say
|
|
215
|
-
# "20240319T09:30:01.000". Then direct event time
|
|
216
|
-
# tag stores information of time ticks since
|
|
217
|
-
# that time. Then we use those two to combine and
|
|
218
|
-
# get exact time information of each event.
|
|
219
|
-
|
|
220
|
-
# set time and esa step values to
|
|
221
|
-
# be used for direct event followed by
|
|
222
|
-
# this metaevent
|
|
223
|
-
int_subseconds = event["subseconds"]
|
|
224
|
-
int_seconds = event["seconds"]
|
|
225
|
-
current_esa_step = event["esa_step"]
|
|
226
|
-
|
|
227
|
-
metaevent_time_in_ns = (
|
|
228
|
-
int_seconds * SECOND_TO_NS + int_subseconds * MILLISECOND_TO_NS
|
|
229
|
-
)
|
|
230
|
-
|
|
231
|
-
# Add half a tick once per algorithm document(
|
|
232
|
-
# section 2.2.5 and second last bullet point)
|
|
233
|
-
# and Paul Janzen.
|
|
234
|
-
half_tick = LOOKED_UP_DURATION_OF_TICK / 2
|
|
235
|
-
# convert microseconds to nanosecond to
|
|
236
|
-
# match other time format
|
|
237
|
-
half_tick_ns = half_tick * MICROSECOND_TO_NS
|
|
238
|
-
metaevent_time_in_ns += half_tick_ns
|
|
239
|
-
continue
|
|
240
|
-
|
|
241
|
-
data_dict["meta_event_met"].append(metaevent_time_in_ns)
|
|
242
|
-
# calculate direct event time using time information from metaevent
|
|
243
|
-
# and de_tag. epoch in this dataset uses this time of the event
|
|
244
|
-
de_met_in_ns = (
|
|
245
|
-
metaevent_time_in_ns
|
|
246
|
-
+ event["de_tag"] * LOOKED_UP_DURATION_OF_TICK * MICROSECOND_TO_NS
|
|
247
|
-
)
|
|
248
|
-
data_dict["event_met"].append(de_met_in_ns)
|
|
249
|
-
data_dict["epoch"].append(met_to_j2000ns(de_met_in_ns / 1e9))
|
|
250
|
-
data_dict["esa_step"].append(current_esa_step)
|
|
251
|
-
# start_bitmask_data is 1, 2, 3 for detector A, B, C
|
|
252
|
-
# respectively. This is used to identify which detector
|
|
253
|
-
# was hit first for this current direct event.
|
|
254
|
-
data_dict["trigger_id"].append(event["start_bitmask_data"])
|
|
255
|
-
data_dict["tof_1"].append(event["tof_1"])
|
|
256
|
-
data_dict["tof_2"].append(event["tof_2"])
|
|
257
|
-
data_dict["tof_3"].append(event["tof_3"])
|
|
258
|
-
# IMAP-Hi like to keep de_tag value for diagnostic purposes
|
|
259
|
-
data_dict["de_tag"].append(event["de_tag"])
|
|
260
|
-
# add packet time to ccsds_met list
|
|
261
|
-
data_dict["ccsds_met"].append(packet_met_time[index])
|
|
262
119
|
|
|
263
120
|
# Load the CDF attributes
|
|
264
121
|
attr_mgr = ImapCdfAttributes()
|
|
265
122
|
attr_mgr.add_instrument_global_attrs("hi")
|
|
266
123
|
attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
|
|
267
|
-
# uncomment this once Maxine's PR is merged
|
|
268
|
-
# attr_mgr.add_global_attribute("Data_version", data_version)
|
|
269
124
|
|
|
270
|
-
|
|
125
|
+
# check_schema=False keeps DEPEND_0 = '' from being auto added
|
|
126
|
+
epoch_attrs = attr_mgr.get_variable_attributes("epoch", check_schema=False)
|
|
271
127
|
epoch_attrs["CATDESC"] = (
|
|
272
128
|
"Direct Event time, number of nanoseconds since J2000 with leap "
|
|
273
129
|
"seconds included"
|
|
274
130
|
)
|
|
275
|
-
|
|
276
|
-
|
|
131
|
+
epoch = xr.DataArray(
|
|
132
|
+
met_to_j2000ns(de_data_dict["event_met"] / 1e9),
|
|
277
133
|
name="epoch",
|
|
278
134
|
dims=["epoch"],
|
|
279
135
|
attrs=epoch_attrs,
|
|
@@ -281,11 +137,11 @@ def create_dataset(de_data_list: list, packet_met_time: list) -> xr.Dataset:
|
|
|
281
137
|
|
|
282
138
|
de_global_attrs = attr_mgr.get_global_attributes("imap_hi_l1a_de_attrs")
|
|
283
139
|
dataset = xr.Dataset(
|
|
284
|
-
coords={"epoch":
|
|
140
|
+
coords={"epoch": epoch},
|
|
285
141
|
attrs=de_global_attrs,
|
|
286
142
|
)
|
|
287
143
|
|
|
288
|
-
for var_name, data in
|
|
144
|
+
for var_name, data in de_data_dict.items():
|
|
289
145
|
attrs = attr_mgr.get_variable_attributes(
|
|
290
146
|
f"hi_de_{var_name}", check_schema=False
|
|
291
147
|
).copy()
|
|
@@ -321,23 +177,34 @@ def science_direct_event(packets_data: xr.Dataset) -> xr.Dataset:
|
|
|
321
177
|
dataset : xarray.Dataset
|
|
322
178
|
Xarray dataset.
|
|
323
179
|
"""
|
|
324
|
-
|
|
325
|
-
packet_met_time = []
|
|
180
|
+
de_data_dict: dict[str, list] = defaultdict(list)
|
|
326
181
|
|
|
327
182
|
# Because DE_TOF is a variable length data,
|
|
328
183
|
# I am using extend to add another list to the
|
|
329
184
|
# end of the list. This way, I don't need to flatten
|
|
330
185
|
# the list later.
|
|
331
186
|
for i, data in enumerate(packets_data["de_tof"].data):
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
#
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
187
|
+
parsed_de_data = parse_direct_events(data)
|
|
188
|
+
for key, new_data in parsed_de_data.items():
|
|
189
|
+
de_data_dict[key].extend(new_data)
|
|
190
|
+
|
|
191
|
+
# add packet data to keep_packet_data dictionary, repeating values
|
|
192
|
+
# for each direct event encoded in the current packet
|
|
193
|
+
for from_key, to_key in {
|
|
194
|
+
"shcoarse": "ccsds_met",
|
|
195
|
+
"src_seq_ctr": "src_seq_ctr",
|
|
196
|
+
"pkt_len": "pkt_len",
|
|
197
|
+
"last_spin_num": "last_spin_num",
|
|
198
|
+
"spin_invalids": "spin_invalids",
|
|
199
|
+
"esa_step_num": "esa_step",
|
|
200
|
+
"meta_seconds": "meta_seconds",
|
|
201
|
+
"meta_subseconds": "meta_subseconds",
|
|
202
|
+
}.items():
|
|
203
|
+
# Repeat the ith packet from_key value N times, where N is the
|
|
204
|
+
# number of events in the ith packet.
|
|
205
|
+
de_data_dict[to_key].extend(
|
|
206
|
+
[packets_data[from_key].data[i]] * len(parsed_de_data["de_tag"])
|
|
207
|
+
)
|
|
341
208
|
|
|
342
209
|
# create dataset
|
|
343
|
-
return create_dataset(
|
|
210
|
+
return create_dataset(de_data_dict)
|