imap-processing 0.16.2__py3-none-any.whl → 0.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of imap-processing might be problematic. Click here for more details.
- imap_processing/_version.py +2 -2
- imap_processing/ccsds/excel_to_xtce.py +12 -0
- imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +6 -6
- imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +35 -0
- imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +35 -0
- imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +24 -0
- imap_processing/cdf/config/imap_hi_variable_attrs.yaml +8 -8
- imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +1 -1
- imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +163 -100
- imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +398 -415
- imap_processing/cdf/config/imap_ialirt_l1_variable_attrs.yaml +97 -54
- imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +9 -9
- imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +233 -57
- imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +16 -90
- imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +30 -0
- imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +15 -1
- imap_processing/cdf/config/imap_swapi_variable_attrs.yaml +19 -0
- imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +20 -0
- imap_processing/cdf/config/imap_swe_l2_variable_attrs.yaml +39 -0
- imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +168 -0
- imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +103 -2
- imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +91 -11
- imap_processing/cdf/utils.py +7 -1
- imap_processing/cli.py +42 -13
- imap_processing/codice/codice_l1a.py +125 -78
- imap_processing/codice/codice_l1b.py +1 -1
- imap_processing/codice/codice_l2.py +0 -9
- imap_processing/codice/constants.py +481 -498
- imap_processing/hi/hi_l1a.py +4 -4
- imap_processing/hi/hi_l1b.py +2 -2
- imap_processing/hi/packet_definitions/TLM_HI_COMBINED_SCI.xml +218 -38
- imap_processing/hit/hit_utils.py +2 -2
- imap_processing/hit/l0/decom_hit.py +4 -3
- imap_processing/hit/l1a/hit_l1a.py +64 -24
- imap_processing/hit/l1b/constants.py +5 -0
- imap_processing/hit/l1b/hit_l1b.py +18 -16
- imap_processing/hit/l2/constants.py +1 -1
- imap_processing/hit/l2/hit_l2.py +4 -4
- imap_processing/ialirt/constants.py +21 -0
- imap_processing/ialirt/generate_coverage.py +188 -0
- imap_processing/ialirt/l0/parse_mag.py +62 -5
- imap_processing/ialirt/l0/process_swapi.py +1 -1
- imap_processing/ialirt/l0/process_swe.py +23 -7
- imap_processing/ialirt/utils/constants.py +22 -16
- imap_processing/ialirt/utils/create_xarray.py +42 -19
- imap_processing/idex/idex_constants.py +8 -5
- imap_processing/idex/idex_l2b.py +554 -58
- imap_processing/idex/idex_l2c.py +30 -196
- imap_processing/lo/l0/lo_apid.py +1 -0
- imap_processing/lo/l0/lo_star_sensor.py +48 -0
- imap_processing/lo/l1a/lo_l1a.py +74 -30
- imap_processing/lo/packet_definitions/lo_xtce.xml +5359 -106
- imap_processing/mag/constants.py +1 -0
- imap_processing/mag/l0/decom_mag.py +9 -6
- imap_processing/mag/l0/mag_l0_data.py +46 -0
- imap_processing/mag/l1d/__init__.py +0 -0
- imap_processing/mag/l1d/mag_l1d.py +133 -0
- imap_processing/mag/l1d/mag_l1d_data.py +588 -0
- imap_processing/mag/l2/__init__.py +0 -0
- imap_processing/mag/l2/mag_l2.py +25 -20
- imap_processing/mag/l2/mag_l2_data.py +191 -130
- imap_processing/quality_flags.py +20 -2
- imap_processing/spice/geometry.py +25 -3
- imap_processing/spice/pointing_frame.py +1 -1
- imap_processing/spice/spin.py +4 -0
- imap_processing/spice/time.py +51 -0
- imap_processing/swapi/l1/swapi_l1.py +12 -2
- imap_processing/swapi/l2/swapi_l2.py +59 -14
- imap_processing/swapi/swapi_utils.py +1 -1
- imap_processing/swe/l1b/swe_l1b.py +11 -4
- imap_processing/swe/l2/swe_l2.py +111 -17
- imap_processing/ultra/constants.py +49 -1
- imap_processing/ultra/l0/decom_tools.py +28 -14
- imap_processing/ultra/l0/decom_ultra.py +225 -15
- imap_processing/ultra/l0/ultra_utils.py +281 -8
- imap_processing/ultra/l1a/ultra_l1a.py +77 -8
- imap_processing/ultra/l1b/cullingmask.py +3 -3
- imap_processing/ultra/l1b/de.py +53 -15
- imap_processing/ultra/l1b/extendedspin.py +26 -2
- imap_processing/ultra/l1b/lookup_utils.py +171 -50
- imap_processing/ultra/l1b/quality_flag_filters.py +14 -0
- imap_processing/ultra/l1b/ultra_l1b_culling.py +198 -5
- imap_processing/ultra/l1b/ultra_l1b_extended.py +304 -66
- imap_processing/ultra/l1c/helio_pset.py +54 -7
- imap_processing/ultra/l1c/spacecraft_pset.py +9 -1
- imap_processing/ultra/l1c/ultra_l1c.py +2 -0
- imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +106 -109
- imap_processing/ultra/packet_definitions/ULTRA_SCI_COMBINED.xml +3 -3
- imap_processing/ultra/utils/ultra_l1_utils.py +13 -1
- imap_processing/utils.py +20 -42
- {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/METADATA +2 -2
- {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/RECORD +95 -103
- imap_processing/lo/l0/data_classes/star_sensor.py +0 -98
- imap_processing/lo/l0/utils/lo_base.py +0 -57
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_LeftSlit.csv +0 -526
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_RightSlit.csv +0 -526
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_LeftSlit.csv +0 -526
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +0 -524
- imap_processing/ultra/lookup_tables/EgyNorm.mem.csv +0 -32769
- imap_processing/ultra/lookup_tables/FM45_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
- imap_processing/ultra/lookup_tables/FM90_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
- imap_processing/ultra/lookup_tables/dps_grid45_compressed.cdf +0 -0
- imap_processing/ultra/lookup_tables/ultra45_back-pos-luts.csv +0 -4097
- imap_processing/ultra/lookup_tables/ultra45_tdc_norm.csv +0 -2050
- imap_processing/ultra/lookup_tables/ultra90_back-pos-luts.csv +0 -4097
- imap_processing/ultra/lookup_tables/ultra90_tdc_norm.csv +0 -2050
- imap_processing/ultra/lookup_tables/yadjust.csv +0 -257
- {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/LICENSE +0 -0
- {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/WHEEL +0 -0
- {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/entry_points.txt +0 -0
imap_processing/idex/idex_l2b.py
CHANGED
|
@@ -21,26 +21,74 @@ Examples
|
|
|
21
21
|
write_cdf(l2b_data)
|
|
22
22
|
"""
|
|
23
23
|
|
|
24
|
+
import collections
|
|
24
25
|
import logging
|
|
26
|
+
from collections import defaultdict
|
|
27
|
+
from datetime import datetime, timedelta
|
|
25
28
|
|
|
26
29
|
import numpy as np
|
|
27
30
|
import xarray as xr
|
|
28
31
|
|
|
29
|
-
from imap_processing.
|
|
30
|
-
from imap_processing.idex.
|
|
31
|
-
|
|
32
|
+
from imap_processing.ena_maps.utils.spatial_utils import AzElSkyGrid
|
|
33
|
+
from imap_processing.idex.idex_constants import (
|
|
34
|
+
FG_TO_KG,
|
|
35
|
+
IDEX_SPACING_DEG,
|
|
36
|
+
SECONDS_IN_DAY,
|
|
37
|
+
IDEXEvtAcquireCodes,
|
|
38
|
+
)
|
|
39
|
+
from imap_processing.idex.idex_utils import get_idex_attrs
|
|
40
|
+
from imap_processing.spice.time import epoch_to_doy, et_to_datetime64, ttj2000ns_to_et
|
|
32
41
|
|
|
33
42
|
logger = logging.getLogger(__name__)
|
|
43
|
+
# Bin edges
|
|
44
|
+
MASS_BIN_EDGES = np.array(
|
|
45
|
+
[
|
|
46
|
+
6.31e-17,
|
|
47
|
+
1.00e-16,
|
|
48
|
+
1.58e-16,
|
|
49
|
+
2.51e-16,
|
|
50
|
+
3.98e-16,
|
|
51
|
+
6.31e-16,
|
|
52
|
+
1.00e-15,
|
|
53
|
+
1.58e-15,
|
|
54
|
+
2.51e-15,
|
|
55
|
+
3.98e-15,
|
|
56
|
+
1.00e-14,
|
|
57
|
+
]
|
|
58
|
+
)
|
|
59
|
+
CHARGE_BIN_EDGES = np.array(
|
|
60
|
+
[
|
|
61
|
+
1.00e-01,
|
|
62
|
+
3.16e-01,
|
|
63
|
+
1.00e00,
|
|
64
|
+
3.16e00,
|
|
65
|
+
1.00e01,
|
|
66
|
+
3.16e01,
|
|
67
|
+
1.00e02,
|
|
68
|
+
3.16e02,
|
|
69
|
+
1.00e03,
|
|
70
|
+
3.16e03,
|
|
71
|
+
1.00e04,
|
|
72
|
+
]
|
|
73
|
+
)
|
|
74
|
+
SPIN_PHASE_BIN_EDGES = np.array([0, 90, 180, 270, 360])
|
|
34
75
|
|
|
76
|
+
# Get the rectangular map grid with the specified spacing
|
|
77
|
+
SKY_GRID = AzElSkyGrid(IDEX_SPACING_DEG)
|
|
78
|
+
LON_BINS_EDGES = SKY_GRID.az_bin_edges
|
|
79
|
+
LAT_BINS_EDGES = SKY_GRID.el_bin_edges
|
|
35
80
|
|
|
36
|
-
|
|
81
|
+
|
|
82
|
+
def idex_l2b(
|
|
83
|
+
l2a_datasets: list[xr.Dataset], evt_datasets: list[xr.Dataset]
|
|
84
|
+
) -> xr.Dataset:
|
|
37
85
|
"""
|
|
38
86
|
Will process IDEX l2a data to create l2b data products.
|
|
39
87
|
|
|
40
88
|
Parameters
|
|
41
89
|
----------
|
|
42
|
-
|
|
43
|
-
IDEX L2a
|
|
90
|
+
l2a_datasets : list[xarray.Dataset]
|
|
91
|
+
IDEX L2a datasets to process.
|
|
44
92
|
evt_datasets : list[xarray.Dataset]
|
|
45
93
|
List of IDEX housekeeping event message datasets.
|
|
46
94
|
|
|
@@ -50,63 +98,440 @@ def idex_l2b(l2a_dataset: xr.Dataset, evt_datasets: list[xr.Dataset]) -> xr.Data
|
|
|
50
98
|
The``xarray`` dataset containing the science data and supporting metadata.
|
|
51
99
|
"""
|
|
52
100
|
logger.info(
|
|
53
|
-
f"Running IDEX L2B processing on
|
|
101
|
+
f"Running IDEX L2B processing on datasets: "
|
|
102
|
+
f"{[ds.attrs['Logical_source'] for ds in l2a_datasets]}"
|
|
54
103
|
)
|
|
55
104
|
|
|
56
105
|
# create the attribute manager for this data level
|
|
57
106
|
idex_attrs = get_idex_attrs("l2b")
|
|
58
|
-
|
|
59
107
|
evt_dataset = xr.concat(evt_datasets, dim="epoch")
|
|
60
108
|
|
|
109
|
+
# Concat all the l2a datasets together
|
|
110
|
+
l2a_dataset = xr.concat(l2a_datasets, dim="epoch")
|
|
111
|
+
epoch_doy_unique = np.unique(epoch_to_doy(l2a_dataset["epoch"].data))
|
|
112
|
+
(
|
|
113
|
+
counts_by_charge,
|
|
114
|
+
counts_by_mass,
|
|
115
|
+
counts_by_charge_map,
|
|
116
|
+
counts_by_mass_map,
|
|
117
|
+
daily_epoch,
|
|
118
|
+
) = compute_counts_by_charge_and_mass(l2a_dataset, epoch_doy_unique)
|
|
119
|
+
# Get science acquisition percentage for each day
|
|
120
|
+
daily_on_percentage = get_science_acquisition_on_percentage(evt_dataset)
|
|
121
|
+
(
|
|
122
|
+
rate_by_charge,
|
|
123
|
+
rate_by_mass,
|
|
124
|
+
rate_by_charge_map,
|
|
125
|
+
rate_by_mass_map,
|
|
126
|
+
rate_quality_flags,
|
|
127
|
+
) = compute_rates_by_charge_and_mass(
|
|
128
|
+
counts_by_charge,
|
|
129
|
+
counts_by_mass,
|
|
130
|
+
counts_by_charge_map,
|
|
131
|
+
counts_by_mass_map,
|
|
132
|
+
epoch_doy_unique,
|
|
133
|
+
daily_on_percentage,
|
|
134
|
+
)
|
|
61
135
|
# Create l2b Dataset
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
l2b_dataset["science_acquisition_messages"] = xr.DataArray(
|
|
69
|
-
name="science_acquisition_messages",
|
|
70
|
-
data=evt_logs.astype(str),
|
|
71
|
-
dims="epoch_science_acquisition",
|
|
72
|
-
attrs=idex_attrs.get_variable_attributes("science_acquisition_messages"),
|
|
73
|
-
)
|
|
74
|
-
l2b_dataset["epoch_science_acquisition"] = xr.DataArray(
|
|
75
|
-
name="epoch_science_acquisition",
|
|
76
|
-
data=evt_time,
|
|
77
|
-
dims="epoch_science_acquisition",
|
|
78
|
-
attrs=idex_attrs.get_variable_attributes(
|
|
79
|
-
"epoch_science_acquisition", check_schema=False
|
|
80
|
-
),
|
|
81
|
-
)
|
|
82
|
-
l2b_dataset["science_acquisition_values"] = xr.DataArray(
|
|
83
|
-
name="science_acquisition_values",
|
|
84
|
-
data=evt_values,
|
|
85
|
-
dims="epoch_science_acquisition",
|
|
86
|
-
attrs=idex_attrs.get_variable_attributes("science_acquisition_values"),
|
|
87
|
-
)
|
|
88
|
-
spin_phase_quadrants = round_spin_phases(l2a_dataset["spin_phase"])
|
|
89
|
-
spin_phase_quadrants.attrs.update(
|
|
90
|
-
idex_attrs.get_variable_attributes("spin_phase_quadrants")
|
|
91
|
-
)
|
|
92
|
-
l2b_dataset["spin_phase_quadrants"] = spin_phase_quadrants
|
|
93
|
-
|
|
94
|
-
# Get the time of impact array (in day of year)
|
|
95
|
-
impact_day_of_year = epoch_to_doy(l2b_dataset["epoch"].data)
|
|
96
|
-
l2b_dataset["impact_day_of_year"] = xr.DataArray(
|
|
97
|
-
name="impact_day_of_year",
|
|
98
|
-
data=impact_day_of_year,
|
|
136
|
+
charge_bins = np.arange(len(CHARGE_BIN_EDGES) - 1)
|
|
137
|
+
mass_bins = np.arange(len(CHARGE_BIN_EDGES) - 1)
|
|
138
|
+
spin_phase_bins = np.arange(len(SPIN_PHASE_BIN_EDGES) - 1)
|
|
139
|
+
epoch = xr.DataArray(
|
|
140
|
+
name="epoch",
|
|
141
|
+
data=daily_epoch,
|
|
99
142
|
dims="epoch",
|
|
100
|
-
attrs=idex_attrs.get_variable_attributes("
|
|
143
|
+
attrs=idex_attrs.get_variable_attributes("epoch", check_schema=False),
|
|
101
144
|
)
|
|
145
|
+
data_vars = {
|
|
146
|
+
"impact_day_of_year": xr.DataArray(
|
|
147
|
+
name="impact_day_of_year",
|
|
148
|
+
data=epoch_doy_unique,
|
|
149
|
+
dims="epoch",
|
|
150
|
+
attrs=idex_attrs.get_variable_attributes("impact_day_of_year"),
|
|
151
|
+
),
|
|
152
|
+
"rate_calculation_quality_flags": xr.DataArray(
|
|
153
|
+
name="rate_calculation_quality_flags",
|
|
154
|
+
data=rate_quality_flags,
|
|
155
|
+
dims="epoch",
|
|
156
|
+
attrs=idex_attrs.get_variable_attributes("rate_calculation_quality_flags"),
|
|
157
|
+
),
|
|
158
|
+
"charge_labels": xr.DataArray(
|
|
159
|
+
name="impact_charge_labels",
|
|
160
|
+
data=charge_bins.astype(str),
|
|
161
|
+
dims="impact_charge",
|
|
162
|
+
attrs=idex_attrs.get_variable_attributes(
|
|
163
|
+
"charge_labels", check_schema=False
|
|
164
|
+
),
|
|
165
|
+
),
|
|
166
|
+
"spin_phase_labels": xr.DataArray(
|
|
167
|
+
name="spin_phase_labels",
|
|
168
|
+
data=spin_phase_bins.astype(str),
|
|
169
|
+
dims="spin_phase",
|
|
170
|
+
attrs=idex_attrs.get_variable_attributes(
|
|
171
|
+
"spin_phase_labels", check_schema=False
|
|
172
|
+
),
|
|
173
|
+
),
|
|
174
|
+
"mass_labels": xr.DataArray(
|
|
175
|
+
name="mass_labels",
|
|
176
|
+
data=mass_bins.astype(str),
|
|
177
|
+
dims="mass",
|
|
178
|
+
attrs=idex_attrs.get_variable_attributes("mass_labels", check_schema=False),
|
|
179
|
+
),
|
|
180
|
+
"rectangular_lon_pixel_label": xr.DataArray(
|
|
181
|
+
name="rectangular_lon_pixel_label",
|
|
182
|
+
data=SKY_GRID.az_bin_midpoints.astype(str),
|
|
183
|
+
dims="rectangular_lon_pixel",
|
|
184
|
+
attrs=idex_attrs.get_variable_attributes(
|
|
185
|
+
"rectangular_lon_pixel_label", check_schema=False
|
|
186
|
+
),
|
|
187
|
+
),
|
|
188
|
+
"rectangular_lat_pixel_label": xr.DataArray(
|
|
189
|
+
name="rectangular_lat_pixel_label",
|
|
190
|
+
data=SKY_GRID.el_bin_midpoints.astype(str),
|
|
191
|
+
dims="rectangular_lat_pixel",
|
|
192
|
+
attrs=idex_attrs.get_variable_attributes(
|
|
193
|
+
"rectangular_lat_pixel_label", check_schema=False
|
|
194
|
+
),
|
|
195
|
+
),
|
|
196
|
+
"impact_charge": xr.DataArray(
|
|
197
|
+
name="impact_charge",
|
|
198
|
+
data=charge_bins,
|
|
199
|
+
dims="impact_charge",
|
|
200
|
+
attrs=idex_attrs.get_variable_attributes(
|
|
201
|
+
"impact_charge", check_schema=False
|
|
202
|
+
),
|
|
203
|
+
),
|
|
204
|
+
"mass": xr.DataArray(
|
|
205
|
+
name="mass",
|
|
206
|
+
data=mass_bins,
|
|
207
|
+
dims="mass",
|
|
208
|
+
attrs=idex_attrs.get_variable_attributes("mass", check_schema=False),
|
|
209
|
+
),
|
|
210
|
+
"spin_phase": xr.DataArray(
|
|
211
|
+
name="spin_phase",
|
|
212
|
+
data=spin_phase_bins,
|
|
213
|
+
dims="spin_phase",
|
|
214
|
+
attrs=idex_attrs.get_variable_attributes("spin_phase", check_schema=False),
|
|
215
|
+
),
|
|
216
|
+
"rectangular_lon_pixel": xr.DataArray(
|
|
217
|
+
name="rectangular_lon_pixel",
|
|
218
|
+
data=SKY_GRID.az_bin_midpoints,
|
|
219
|
+
dims="rectangular_lon_pixel",
|
|
220
|
+
attrs=idex_attrs.get_variable_attributes(
|
|
221
|
+
"rectangular_lon_pixel", check_schema=False
|
|
222
|
+
),
|
|
223
|
+
),
|
|
224
|
+
"rectangular_lat_pixel": xr.DataArray(
|
|
225
|
+
name="rectangular_lat_pixel",
|
|
226
|
+
data=SKY_GRID.el_bin_midpoints,
|
|
227
|
+
dims="rectangular_lat_pixel",
|
|
228
|
+
attrs=idex_attrs.get_variable_attributes(
|
|
229
|
+
"rectangular_lat_pixel", check_schema=False
|
|
230
|
+
),
|
|
231
|
+
),
|
|
232
|
+
"counts_by_charge": xr.DataArray(
|
|
233
|
+
name="counts_by_charge",
|
|
234
|
+
data=counts_by_charge.astype(np.int64),
|
|
235
|
+
dims=("epoch", "impact_charge", "spin_phase"),
|
|
236
|
+
attrs=idex_attrs.get_variable_attributes("counts_by_charge"),
|
|
237
|
+
),
|
|
238
|
+
"counts_by_mass": xr.DataArray(
|
|
239
|
+
name="counts_by_mass",
|
|
240
|
+
data=counts_by_mass.astype(np.int64),
|
|
241
|
+
dims=("epoch", "mass", "spin_phase"),
|
|
242
|
+
attrs=idex_attrs.get_variable_attributes("counts_by_mass"),
|
|
243
|
+
),
|
|
244
|
+
"rate_by_charge": xr.DataArray(
|
|
245
|
+
name="rate_by_charge",
|
|
246
|
+
data=rate_by_charge,
|
|
247
|
+
dims=("epoch", "impact_charge", "spin_phase"),
|
|
248
|
+
attrs=idex_attrs.get_variable_attributes("rate_by_charge"),
|
|
249
|
+
),
|
|
250
|
+
"rate_by_mass": xr.DataArray(
|
|
251
|
+
name="rate_by_mass",
|
|
252
|
+
data=rate_by_mass,
|
|
253
|
+
dims=("epoch", "mass", "spin_phase"),
|
|
254
|
+
attrs=idex_attrs.get_variable_attributes("rate_by_mass"),
|
|
255
|
+
),
|
|
256
|
+
"counts_by_charge_map": xr.DataArray(
|
|
257
|
+
name="counts_by_charge_map",
|
|
258
|
+
data=counts_by_charge_map.astype(np.int64),
|
|
259
|
+
dims=(
|
|
260
|
+
"epoch",
|
|
261
|
+
"impact_charge",
|
|
262
|
+
"rectangular_lon_pixel",
|
|
263
|
+
"rectangular_lat_pixel",
|
|
264
|
+
),
|
|
265
|
+
attrs=idex_attrs.get_variable_attributes("counts_by_charge_map"),
|
|
266
|
+
),
|
|
267
|
+
"counts_by_mass_map": xr.DataArray(
|
|
268
|
+
name="counts_by_mass_map",
|
|
269
|
+
data=counts_by_mass_map.astype(np.int64),
|
|
270
|
+
dims=(
|
|
271
|
+
"epoch",
|
|
272
|
+
"mass",
|
|
273
|
+
"rectangular_lon_pixel",
|
|
274
|
+
"rectangular_lat_pixel",
|
|
275
|
+
),
|
|
276
|
+
attrs=idex_attrs.get_variable_attributes("counts_by_mass_map"),
|
|
277
|
+
),
|
|
278
|
+
"rate_by_charge_map": xr.DataArray(
|
|
279
|
+
name="rate_by_charge_map",
|
|
280
|
+
data=rate_by_charge_map,
|
|
281
|
+
dims=(
|
|
282
|
+
"epoch",
|
|
283
|
+
"impact_charge",
|
|
284
|
+
"rectangular_lon_pixel",
|
|
285
|
+
"rectangular_lat_pixel",
|
|
286
|
+
),
|
|
287
|
+
attrs=idex_attrs.get_variable_attributes("rate_by_charge_map"),
|
|
288
|
+
),
|
|
289
|
+
"rate_by_mass_map": xr.DataArray(
|
|
290
|
+
name="rate_by_mass_map",
|
|
291
|
+
data=rate_by_mass_map,
|
|
292
|
+
dims=(
|
|
293
|
+
"epoch",
|
|
294
|
+
"mass",
|
|
295
|
+
"rectangular_lon_pixel",
|
|
296
|
+
"rectangular_lat_pixel",
|
|
297
|
+
),
|
|
298
|
+
attrs=idex_attrs.get_variable_attributes("rate_by_mass_map"),
|
|
299
|
+
),
|
|
300
|
+
}
|
|
301
|
+
l2b_dataset = xr.Dataset(
|
|
302
|
+
coords={"epoch": epoch},
|
|
303
|
+
data_vars=data_vars,
|
|
304
|
+
attrs=idex_attrs.get_global_attributes("imap_idex_l2b_sci"),
|
|
305
|
+
)
|
|
306
|
+
|
|
102
307
|
logger.info("IDEX L2B science data processing completed.")
|
|
103
308
|
|
|
104
309
|
return l2b_dataset
|
|
105
310
|
|
|
106
311
|
|
|
107
|
-
def
|
|
312
|
+
def compute_counts_by_charge_and_mass(
|
|
313
|
+
l2a_dataset: xr.Dataset, epoch_doy_unique: np.ndarray
|
|
314
|
+
) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
|
315
|
+
"""
|
|
316
|
+
Compute the dust counts by charge and mass by spin phase or lon and lat per day.
|
|
317
|
+
|
|
318
|
+
Parameters
|
|
319
|
+
----------
|
|
320
|
+
l2a_dataset : xarray.Dataset
|
|
321
|
+
Combined IDEX L2a datasets.
|
|
322
|
+
epoch_doy_unique : np.ndarray
|
|
323
|
+
Unique days of year corresponding to the epochs in the dataset.
|
|
324
|
+
|
|
325
|
+
Returns
|
|
326
|
+
-------
|
|
327
|
+
tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
|
|
328
|
+
Two 3D arrays containing counts by charge or mass, and by spin phase for each
|
|
329
|
+
dataset, Two 4D arrays containing counts by charge or mass, and by lon and lat
|
|
330
|
+
for each dataset, and a 1D array of daily epoch values.
|
|
331
|
+
"""
|
|
332
|
+
# Initialize arrays to hold counts.
|
|
333
|
+
# There should be 4 spin phase bins, 10 charge bins, and 10 mass bins.
|
|
334
|
+
# The first bin for charge and mass is for values below the first bin edge.
|
|
335
|
+
counts_by_charge = np.zeros(
|
|
336
|
+
(
|
|
337
|
+
len(epoch_doy_unique),
|
|
338
|
+
len(CHARGE_BIN_EDGES) - 1,
|
|
339
|
+
len(SPIN_PHASE_BIN_EDGES) - 1,
|
|
340
|
+
),
|
|
341
|
+
)
|
|
342
|
+
counts_by_mass = np.zeros(
|
|
343
|
+
(len(epoch_doy_unique), len(MASS_BIN_EDGES) - 1, len(SPIN_PHASE_BIN_EDGES) - 1),
|
|
344
|
+
)
|
|
345
|
+
# Initialize arrays to hold count maps. Each map is a 3 or 4D array with shape
|
|
346
|
+
# (epoch, 10 [charge or mass], 60 [longitude bins], 30 [latitude bins]).
|
|
347
|
+
counts_by_charge_map = np.zeros(
|
|
348
|
+
(
|
|
349
|
+
len(epoch_doy_unique),
|
|
350
|
+
len(CHARGE_BIN_EDGES) - 1,
|
|
351
|
+
len(LON_BINS_EDGES) - 1,
|
|
352
|
+
len(LAT_BINS_EDGES) - 1,
|
|
353
|
+
),
|
|
354
|
+
)
|
|
355
|
+
counts_by_mass_map = np.zeros(
|
|
356
|
+
(
|
|
357
|
+
len(epoch_doy_unique),
|
|
358
|
+
len(MASS_BIN_EDGES) - 1,
|
|
359
|
+
len(LON_BINS_EDGES) - 1,
|
|
360
|
+
len(LAT_BINS_EDGES) - 1,
|
|
361
|
+
),
|
|
362
|
+
)
|
|
363
|
+
daily_epoch = np.zeros(len(epoch_doy_unique), dtype=np.float64)
|
|
364
|
+
for i in range(len(epoch_doy_unique)):
|
|
365
|
+
doy = epoch_doy_unique[i]
|
|
366
|
+
# Get the indices for the current day
|
|
367
|
+
current_day_indices = np.where(epoch_to_doy(l2a_dataset["epoch"].data) == doy)[
|
|
368
|
+
0
|
|
369
|
+
]
|
|
370
|
+
# Set the epoch for the current day to be the mean epoch of the day.
|
|
371
|
+
daily_epoch[i] = np.mean(l2a_dataset["epoch"].data[current_day_indices])
|
|
372
|
+
mass_vals = l2a_dataset["target_low_dust_mass_estimate"].data[
|
|
373
|
+
current_day_indices
|
|
374
|
+
]
|
|
375
|
+
charge_vals = l2a_dataset["target_low_impact_charge"].data[current_day_indices]
|
|
376
|
+
spin_phase_angles = l2a_dataset["spin_phase"].data[current_day_indices]
|
|
377
|
+
# Make sure longitude values are in the range [0, 360)
|
|
378
|
+
longitude = np.mod(l2a_dataset["longitude"].data[current_day_indices], 360)
|
|
379
|
+
latitude = l2a_dataset["latitude"].data[current_day_indices]
|
|
380
|
+
# Convert units
|
|
381
|
+
mass_vals = FG_TO_KG * np.atleast_1d(mass_vals)
|
|
382
|
+
# Bin masses
|
|
383
|
+
binned_mass = np.asarray(np.digitize(mass_vals, bins=MASS_BIN_EDGES))
|
|
384
|
+
# Bin charges
|
|
385
|
+
binned_charge = np.asarray(np.digitize(charge_vals, bins=CHARGE_BIN_EDGES))
|
|
386
|
+
# Bin spin phases
|
|
387
|
+
binned_spin_phase = bin_spin_phases(spin_phase_angles)
|
|
388
|
+
# Bin longitude and latitude into the rectangular grid.
|
|
389
|
+
binned_longitude = np.asarray(np.digitize(longitude, bins=LON_BINS_EDGES))
|
|
390
|
+
# Latitude should be binned with the right edge included. 90 is a valid latitude
|
|
391
|
+
binned_latitude = np.asarray(np.digitize(latitude, bins=LAT_BINS_EDGES))
|
|
392
|
+
# Clip latitude value above the right edge to be in the last bin
|
|
393
|
+
binned_latitude = np.clip(binned_latitude, 1, len(LAT_BINS_EDGES) - 1)
|
|
394
|
+
# If the values in the array are beyond the bounds of bins, 0 or len(bins) it is
|
|
395
|
+
# returned as such. In this case, the desired result is to place the values
|
|
396
|
+
# beyond the first or last bin into the first or last bin, respectively.
|
|
397
|
+
binned_charge = np.clip(binned_charge, 1, len(CHARGE_BIN_EDGES) - 1)
|
|
398
|
+
binned_mass = np.clip(binned_mass, 1, len(MASS_BIN_EDGES) - 1)
|
|
399
|
+
|
|
400
|
+
# Count dust events for each spin phase, mass bin, charge bin, and bin into
|
|
401
|
+
# a rectangular grid
|
|
402
|
+
for mass_bin, charge_bin, spin_phase_bin, lon_bin, lat_bin in zip(
|
|
403
|
+
binned_mass,
|
|
404
|
+
binned_charge,
|
|
405
|
+
binned_spin_phase,
|
|
406
|
+
binned_longitude,
|
|
407
|
+
binned_latitude,
|
|
408
|
+
):
|
|
409
|
+
counts_by_mass[i, mass_bin - 1, spin_phase_bin] += 1
|
|
410
|
+
counts_by_charge[i, charge_bin - 1, spin_phase_bin] += 1
|
|
411
|
+
counts_by_mass_map[i, mass_bin - 1, lon_bin - 1, lat_bin - 1] += 1
|
|
412
|
+
counts_by_charge_map[i, charge_bin - 1, lon_bin - 1, lat_bin - 1] += 1
|
|
413
|
+
|
|
414
|
+
return (
|
|
415
|
+
counts_by_charge,
|
|
416
|
+
counts_by_mass,
|
|
417
|
+
counts_by_charge_map,
|
|
418
|
+
counts_by_mass_map,
|
|
419
|
+
daily_epoch,
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
def compute_rates(
|
|
424
|
+
counts: np.ndarray, epoch_doy_percent_on: np.ndarray, non_zero_inds: np.ndarray
|
|
425
|
+
) -> np.ndarray:
|
|
426
|
+
"""
|
|
427
|
+
Compute the count rates given the percent uptime of IDEX.
|
|
428
|
+
|
|
429
|
+
Parameters
|
|
430
|
+
----------
|
|
431
|
+
counts : np.ndarray
|
|
432
|
+
Count values for the dust events.
|
|
433
|
+
epoch_doy_percent_on : np.ndarray
|
|
434
|
+
Percentage of time science acquisition was on for each day of the year.
|
|
435
|
+
non_zero_inds : np.ndarray
|
|
436
|
+
Indices of the days with non-zero science acquisition percentage.
|
|
437
|
+
|
|
438
|
+
Returns
|
|
439
|
+
-------
|
|
440
|
+
np.ndarray
|
|
441
|
+
Count rates.
|
|
108
442
|
"""
|
|
109
|
-
|
|
443
|
+
while len(epoch_doy_percent_on.shape) < len(counts.shape):
|
|
444
|
+
epoch_doy_percent_on = np.expand_dims(epoch_doy_percent_on, axis=-1)
|
|
445
|
+
|
|
446
|
+
return counts[non_zero_inds] / (
|
|
447
|
+
0.01 * epoch_doy_percent_on[non_zero_inds] * SECONDS_IN_DAY
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
def compute_rates_by_charge_and_mass(
|
|
452
|
+
counts_by_charge: np.ndarray,
|
|
453
|
+
counts_by_mass: np.ndarray,
|
|
454
|
+
counts_by_charge_map: np.ndarray,
|
|
455
|
+
counts_by_mass_map: np.ndarray,
|
|
456
|
+
epoch_doy: np.ndarray,
|
|
457
|
+
daily_on_percentage: dict,
|
|
458
|
+
) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
|
459
|
+
"""
|
|
460
|
+
Compute the dust event counts rates by charge and mass by spin phase for each day.
|
|
461
|
+
|
|
462
|
+
Parameters
|
|
463
|
+
----------
|
|
464
|
+
counts_by_charge : np.ndarray
|
|
465
|
+
3D array containing counts by charge and spin phase for each dataset.
|
|
466
|
+
counts_by_mass : np.ndarray
|
|
467
|
+
3D array containing counts by mass and lon and lat for each dataset.
|
|
468
|
+
counts_by_charge_map : np.ndarray
|
|
469
|
+
4D array containing counts by charge and lon and lat for each dataset.
|
|
470
|
+
counts_by_mass_map : np.ndarray
|
|
471
|
+
4D array containing counts by mass and spin phase for each dataset.
|
|
472
|
+
epoch_doy : np.ndarray
|
|
473
|
+
Unique days of year corresponding to the epochs in the dataset.
|
|
474
|
+
daily_on_percentage : dict
|
|
475
|
+
Percentage of time science acquisition was on for each doy.
|
|
476
|
+
|
|
477
|
+
Returns
|
|
478
|
+
-------
|
|
479
|
+
tuple[np.ndarray, np.ndarray, np.ndarray]
|
|
480
|
+
Two 3D arrays containing counts rates by charge or mass, and by spin phase for
|
|
481
|
+
each dataset and the quality flags for each epoch.
|
|
482
|
+
"""
|
|
483
|
+
# Initialize arrays to hold rates.
|
|
484
|
+
rate_by_charge = np.full(counts_by_charge.shape, -1.0)
|
|
485
|
+
rate_by_mass = np.full(counts_by_mass.shape, -1.0)
|
|
486
|
+
rate_by_charge_map = np.full(counts_by_charge_map.shape, -1.0)
|
|
487
|
+
rate_by_mass_map = np.full(counts_by_mass_map.shape, -1.0)
|
|
488
|
+
# Initialize an array to hold quality flags for each epoch. A quality flag of 0
|
|
489
|
+
# indicates that there was no science acquisition data for that epoch, and the rate
|
|
490
|
+
# is not valid. A quality flag of 1 indicates that the rate is valid.
|
|
491
|
+
rate_quality_flags = np.ones(epoch_doy.shape, dtype=np.uint8)
|
|
492
|
+
|
|
493
|
+
# Get percentages in order of epoch_doy. Log any missing days.
|
|
494
|
+
epoch_doy_percent_on = np.array(
|
|
495
|
+
[daily_on_percentage.get(doy, -1) for doy in epoch_doy]
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
missing_doy_uptimes_inds = np.where(epoch_doy_percent_on == -1)[0]
|
|
499
|
+
if np.any(missing_doy_uptimes_inds):
|
|
500
|
+
rate_quality_flags[missing_doy_uptimes_inds] = 0
|
|
501
|
+
logger.warning(
|
|
502
|
+
f"Missing science acquisition uptime percentages for day(s) of"
|
|
503
|
+
f" year: {epoch_doy[missing_doy_uptimes_inds]}."
|
|
504
|
+
)
|
|
505
|
+
# Compute rates
|
|
506
|
+
# Create a boolean mask for DOYs that have a non-zero percentage of science
|
|
507
|
+
# acquisition time.
|
|
508
|
+
non_zero_inds = np.where(epoch_doy_percent_on > 0)[0]
|
|
509
|
+
# Compute rates only for days with non-zero science acquisition percentage
|
|
510
|
+
rate_by_charge[non_zero_inds] = compute_rates(
|
|
511
|
+
counts_by_charge, epoch_doy_percent_on, non_zero_inds
|
|
512
|
+
)
|
|
513
|
+
rate_by_mass[non_zero_inds] = compute_rates(
|
|
514
|
+
counts_by_mass, epoch_doy_percent_on, non_zero_inds
|
|
515
|
+
)
|
|
516
|
+
rate_by_charge_map[non_zero_inds] = compute_rates(
|
|
517
|
+
counts_by_charge_map, epoch_doy_percent_on, non_zero_inds
|
|
518
|
+
)
|
|
519
|
+
rate_by_mass_map[non_zero_inds] = compute_rates(
|
|
520
|
+
counts_by_mass_map, epoch_doy_percent_on, non_zero_inds
|
|
521
|
+
)
|
|
522
|
+
|
|
523
|
+
return (
|
|
524
|
+
rate_by_charge,
|
|
525
|
+
rate_by_mass,
|
|
526
|
+
rate_by_charge_map,
|
|
527
|
+
rate_by_mass_map,
|
|
528
|
+
rate_quality_flags,
|
|
529
|
+
)
|
|
530
|
+
|
|
531
|
+
|
|
532
|
+
def bin_spin_phases(spin_phases: xr.DataArray) -> np.ndarray:
|
|
533
|
+
"""
|
|
534
|
+
Bin spin phase angles into 4 quadrants: [315°-45°,45°-135°,135°-225°, 225°-315°].
|
|
110
535
|
|
|
111
536
|
Parameters
|
|
112
537
|
----------
|
|
@@ -115,21 +540,22 @@ def round_spin_phases(spin_phases: xr.DataArray) -> xr.DataArray:
|
|
|
115
540
|
|
|
116
541
|
Returns
|
|
117
542
|
-------
|
|
118
|
-
|
|
119
|
-
Spin phases
|
|
543
|
+
numpy.ndarray
|
|
544
|
+
Spin phases binned into quadrants.
|
|
120
545
|
"""
|
|
121
546
|
if np.any(spin_phases < 0) or np.any(spin_phases >= 360):
|
|
122
547
|
logger.warning(
|
|
123
548
|
f"Spin phase angles, {spin_phases.data} are outside of the expected spin "
|
|
124
549
|
f"phase angle range, [0, 360)."
|
|
125
550
|
)
|
|
126
|
-
|
|
127
|
-
#
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
#
|
|
132
|
-
|
|
551
|
+
# Shift spin phases by +45° so that the first bin starts at 0°.
|
|
552
|
+
# Use mod to wrap values >= 360 to 0.
|
|
553
|
+
shifted_spin_phases = (spin_phases + 45) % 360
|
|
554
|
+
# Use np.digitize to find the bin index for each spin phase.
|
|
555
|
+
bin_indices = np.digitize(shifted_spin_phases, SPIN_PHASE_BIN_EDGES, right=False)
|
|
556
|
+
# Shift bins to be zero-based.
|
|
557
|
+
bin_indices -= 1
|
|
558
|
+
return np.asarray(bin_indices)
|
|
133
559
|
|
|
134
560
|
|
|
135
561
|
def get_science_acquisition_timestamps(
|
|
@@ -169,7 +595,7 @@ def get_science_acquisition_timestamps(
|
|
|
169
595
|
evt_dataset["el3par_evtpkt"].data[sc_indices] << 8
|
|
170
596
|
| evt_dataset["el4par_evtpkt"].data[sc_indices]
|
|
171
597
|
)
|
|
172
|
-
epochs = evt_dataset["epoch"][sc_indices]
|
|
598
|
+
epochs = evt_dataset["epoch"][sc_indices].data
|
|
173
599
|
# Now the state change values and check if it is either a science
|
|
174
600
|
# acquisition start or science acquisition stop event.
|
|
175
601
|
for v1, v2, epoch in zip(val1, val2, epochs):
|
|
@@ -184,8 +610,78 @@ def get_science_acquisition_timestamps(
|
|
|
184
610
|
event_timestamps.append(epoch)
|
|
185
611
|
event_values.append(0)
|
|
186
612
|
|
|
613
|
+
logger.info(
|
|
614
|
+
f"Found science acquisition events: {event_logs} at times: {event_timestamps}"
|
|
615
|
+
)
|
|
187
616
|
return (
|
|
188
617
|
np.asarray(event_logs),
|
|
189
618
|
np.asarray(event_timestamps),
|
|
190
619
|
np.asarray(event_values),
|
|
191
620
|
)
|
|
621
|
+
|
|
622
|
+
|
|
623
|
+
def get_science_acquisition_on_percentage(evt_dataset: xr.Dataset) -> dict:
|
|
624
|
+
"""
|
|
625
|
+
Calculate the percentage of time science acquisition was occurring for each day.
|
|
626
|
+
|
|
627
|
+
Parameters
|
|
628
|
+
----------
|
|
629
|
+
evt_dataset : xarray.Dataset
|
|
630
|
+
Contains IDEX event message data.
|
|
631
|
+
|
|
632
|
+
Returns
|
|
633
|
+
-------
|
|
634
|
+
dict
|
|
635
|
+
Percentages of time the instrument was in science acquisition mode for each day
|
|
636
|
+
of year.
|
|
637
|
+
"""
|
|
638
|
+
# Get science acquisition start and stop times
|
|
639
|
+
evt_logs, evt_time, evt_values = get_science_acquisition_timestamps(evt_dataset)
|
|
640
|
+
# Track total and 'on' durations per day
|
|
641
|
+
daily_totals: collections.defaultdict = defaultdict(timedelta)
|
|
642
|
+
daily_on: collections.defaultdict = defaultdict(timedelta)
|
|
643
|
+
# Convert epoch event times to datetime
|
|
644
|
+
dates = et_to_datetime64(ttj2000ns_to_et(evt_time)).astype(datetime)
|
|
645
|
+
# Simulate an event at the start of the first day.
|
|
646
|
+
start_of_first_day = dates[0].replace(hour=0, minute=0, second=0, microsecond=0)
|
|
647
|
+
# Assume that the state at the start of the day is the opposite of what the first
|
|
648
|
+
# state is.
|
|
649
|
+
state_at_start = 0 if evt_values[0] == 1 else 1
|
|
650
|
+
dates = np.insert(dates, 0, start_of_first_day)
|
|
651
|
+
evt_values = np.insert(evt_values, 0, state_at_start)
|
|
652
|
+
for i in range(len(dates)):
|
|
653
|
+
start = dates[i]
|
|
654
|
+
state = evt_values[i]
|
|
655
|
+
if i == len(dates) - 1:
|
|
656
|
+
# If this is the last event, set the "end" value the end of the day.
|
|
657
|
+
end = (start + timedelta(days=1)).replace(
|
|
658
|
+
hour=0, minute=0, second=0, microsecond=0
|
|
659
|
+
)
|
|
660
|
+
else:
|
|
661
|
+
# Otherwise, use the next event time as the end time.
|
|
662
|
+
end = dates[i + 1]
|
|
663
|
+
|
|
664
|
+
# Split time span by day boundaries
|
|
665
|
+
current = start
|
|
666
|
+
while current < end:
|
|
667
|
+
next_day = (current + timedelta(days=1)).replace(
|
|
668
|
+
hour=0, minute=0, second=0, microsecond=0
|
|
669
|
+
)
|
|
670
|
+
segment_end = min(end, next_day)
|
|
671
|
+
duration = segment_end - current
|
|
672
|
+
doy = current.timetuple().tm_yday
|
|
673
|
+
daily_totals[doy] += duration
|
|
674
|
+
# If the state is 1, add to the 'on' duration for that day
|
|
675
|
+
if state == 1:
|
|
676
|
+
daily_on[doy] += duration
|
|
677
|
+
current = segment_end
|
|
678
|
+
|
|
679
|
+
# Calculate the percentage of time science acquisition was on for each day
|
|
680
|
+
percent_on_times = {}
|
|
681
|
+
for doy in sorted(daily_totals.keys()):
|
|
682
|
+
total = daily_totals[doy].total_seconds()
|
|
683
|
+
on_time = daily_on[doy].total_seconds()
|
|
684
|
+
pct_on = (on_time / total) * 100 if total > 0 else 0
|
|
685
|
+
percent_on_times[doy] = pct_on
|
|
686
|
+
|
|
687
|
+
return percent_on_times
|