imap-processing 0.14.0__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of imap-processing might be problematic. Click here for more details.
- imap_processing/_version.py +2 -2
- imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +60 -35
- imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +765 -287
- imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +1577 -288
- imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +1004 -0
- imap_processing/cdf/config/imap_enamaps_l2-common_variable_attrs.yaml +28 -0
- imap_processing/cdf/config/imap_enamaps_l2-healpix_variable_attrs.yaml +1 -1
- imap_processing/cdf/config/imap_enamaps_l2-rectangular_variable_attrs.yaml +18 -0
- imap_processing/cdf/config/imap_glows_l2_variable_attrs.yaml +39 -3
- imap_processing/cdf/config/imap_ialirt_global_cdf_attrs.yaml +18 -0
- imap_processing/cdf/config/imap_ialirt_l1_variable_attrs.yaml +353 -0
- imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +7 -0
- imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +11 -0
- imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +4 -0
- imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +7 -3
- imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +6 -0
- imap_processing/cdf/config/imap_mag_l2_variable_attrs.yaml +114 -0
- imap_processing/cdf/config/imap_swe_global_cdf_attrs.yaml +11 -5
- imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +23 -1
- imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +4 -0
- imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +2 -2
- imap_processing/cli.py +145 -80
- imap_processing/codice/codice_l1a.py +140 -84
- imap_processing/codice/codice_l1b.py +91 -18
- imap_processing/codice/codice_l2.py +81 -0
- imap_processing/codice/constants.py +68 -0
- imap_processing/ena_maps/ena_maps.py +43 -1
- imap_processing/glows/l2/glows_l2_data.py +3 -6
- imap_processing/hi/hi_l1a.py +447 -0
- imap_processing/hi/{l1b/hi_l1b.py → hi_l1b.py} +1 -1
- imap_processing/hi/{l1c/hi_l1c.py → hi_l1c.py} +21 -21
- imap_processing/hi/{l2/hi_l2.py → hi_l2.py} +13 -13
- imap_processing/hi/utils.py +6 -6
- imap_processing/hit/l1b/hit_l1b.py +30 -11
- imap_processing/ialirt/constants.py +38 -0
- imap_processing/ialirt/l0/parse_mag.py +1 -1
- imap_processing/ialirt/l0/process_codice.py +91 -0
- imap_processing/ialirt/l0/process_hit.py +12 -21
- imap_processing/ialirt/l0/process_swapi.py +172 -23
- imap_processing/ialirt/l0/process_swe.py +3 -10
- imap_processing/ialirt/utils/constants.py +62 -0
- imap_processing/ialirt/utils/create_xarray.py +135 -0
- imap_processing/idex/idex_l2c.py +9 -9
- imap_processing/lo/l1b/lo_l1b.py +6 -1
- imap_processing/lo/l1c/lo_l1c.py +22 -13
- imap_processing/lo/l2/lo_l2.py +213 -0
- imap_processing/mag/l1c/mag_l1c.py +8 -1
- imap_processing/mag/l2/mag_l2.py +6 -2
- imap_processing/mag/l2/mag_l2_data.py +7 -5
- imap_processing/swe/l1a/swe_l1a.py +6 -6
- imap_processing/swe/l1b/swe_l1b.py +70 -11
- imap_processing/ultra/l0/decom_ultra.py +1 -1
- imap_processing/ultra/l0/ultra_utils.py +0 -4
- imap_processing/ultra/l1b/badtimes.py +7 -3
- imap_processing/ultra/l1b/cullingmask.py +7 -2
- imap_processing/ultra/l1b/de.py +26 -12
- imap_processing/ultra/l1b/lookup_utils.py +8 -7
- imap_processing/ultra/l1b/ultra_l1b.py +59 -48
- imap_processing/ultra/l1b/ultra_l1b_culling.py +50 -18
- imap_processing/ultra/l1b/ultra_l1b_extended.py +4 -4
- imap_processing/ultra/l1c/helio_pset.py +53 -0
- imap_processing/ultra/l1c/spacecraft_pset.py +20 -12
- imap_processing/ultra/l1c/ultra_l1c.py +49 -26
- imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +40 -2
- imap_processing/ultra/l2/ultra_l2.py +47 -2
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +524 -526
- imap_processing/ultra/utils/ultra_l1_utils.py +51 -10
- {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/METADATA +2 -2
- {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/RECORD +72 -69
- imap_processing/hi/l1a/__init__.py +0 -0
- imap_processing/hi/l1a/hi_l1a.py +0 -98
- imap_processing/hi/l1a/histogram.py +0 -152
- imap_processing/hi/l1a/science_direct_event.py +0 -214
- imap_processing/hi/l1b/__init__.py +0 -0
- imap_processing/hi/l1c/__init__.py +0 -0
- imap_processing/hi/l2/__init__.py +0 -0
- imap_processing/ialirt/l0/process_codicehi.py +0 -156
- imap_processing/ialirt/l0/process_codicelo.py +0 -41
- {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/LICENSE +0 -0
- {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/WHEEL +0 -0
- {imap_processing-0.14.0.dist-info → imap_processing-0.16.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""Keys for I-ALiRT data products."""
|
|
2
|
+
|
|
3
|
+
IALIRT_KEYS = [
|
|
4
|
+
# H intensities in 15 energy ranges and binned into 4 azimuths and 4 spin angle bins
|
|
5
|
+
"codicehi_h",
|
|
6
|
+
# C/O abundance ratio
|
|
7
|
+
"codicelo_c_over_o_abundance",
|
|
8
|
+
# Mg/O abundance ratio
|
|
9
|
+
"codicelo_mg_over_o_abundance",
|
|
10
|
+
# Fe/O abundance ratio
|
|
11
|
+
"codicelo_fe_over_o_abundance",
|
|
12
|
+
# C+6/C+5 charge state ratio
|
|
13
|
+
"codicelo_c_plus_6_over_c_plus_5_ratio",
|
|
14
|
+
# O+7/O+6 charge state ratio
|
|
15
|
+
"codicelo_o_plus_7_over_o_plus_6_ratio",
|
|
16
|
+
# Fe low/Fe high charge state ratio
|
|
17
|
+
"codicelo_fe_low_over_fe_high_ratio",
|
|
18
|
+
# Low energy (~300 keV) electrons (A-side)
|
|
19
|
+
"hit_e_a_side_low_en",
|
|
20
|
+
# Medium energy (~3 MeV) electrons (A-side)
|
|
21
|
+
"hit_e_a_side_med_en",
|
|
22
|
+
# High energy (>3 MeV) electrons (A-side)
|
|
23
|
+
"hit_e_a_side_high_en",
|
|
24
|
+
# Low energy (~300 keV) electrons (B-side)
|
|
25
|
+
"hit_e_b_side_low_en",
|
|
26
|
+
# Medium energy (~3 MeV) electrons (B-side)
|
|
27
|
+
"hit_e_b_side_med_en",
|
|
28
|
+
# High energy (>3 MeV) electrons (B-side)
|
|
29
|
+
"hit_e_b_side_high_en",
|
|
30
|
+
# Medium energy (12 to 70 MeV) protons (Omnidirectional)
|
|
31
|
+
"hit_h_omni_med_en",
|
|
32
|
+
# High energy (>70 MeV) protons (A-side)
|
|
33
|
+
"hit_h_a_side_high_en",
|
|
34
|
+
# High energy (>70 MeV) protons (B-side)
|
|
35
|
+
"hit_h_b_side_high_en",
|
|
36
|
+
# Low energy (6 to 8 MeV/nuc) He (Omnidirectional)
|
|
37
|
+
"hit_he_omni_low_en",
|
|
38
|
+
# High energy (15 to 70 MeV/nuc) He (Omnidirectional)
|
|
39
|
+
"hit_he_omni_high_en",
|
|
40
|
+
# Magnetic field vector in GSE coordinates
|
|
41
|
+
"mag_4s_b_gse",
|
|
42
|
+
# Magnetic field vector in GSM coordinates
|
|
43
|
+
"mag_4s_b_gsm",
|
|
44
|
+
# Magnetic field vector in RTN coordinates
|
|
45
|
+
"mag_4s_b_rtn",
|
|
46
|
+
# Azimuth angle (φ) of the magnetic field in GSM coordinates
|
|
47
|
+
"mag_phi_4s_b_gsm",
|
|
48
|
+
# Elevation angle (θ) of the magnetic field in GSM coordinates
|
|
49
|
+
"mag_theta_4s_b_gsm",
|
|
50
|
+
# Pseudo density of solar wind protons
|
|
51
|
+
"swapi_pseudo_proton_density",
|
|
52
|
+
# Pseudo speed of solar wind protons in solar inertial frame
|
|
53
|
+
"swapi_pseudo_proton_speed",
|
|
54
|
+
# Pseudo temperature of solar wind protons in plasma frame
|
|
55
|
+
"swapi_pseudo_proton_temperature",
|
|
56
|
+
# SWE Normalized Counts - Half Cycle 1
|
|
57
|
+
"swe_normalized_counts_half_1",
|
|
58
|
+
# SWE Normalized Counts - Half Cycle 2
|
|
59
|
+
"swe_normalized_counts_half_2",
|
|
60
|
+
# SWE Counterstreaming flag
|
|
61
|
+
"swe_counterstreaming_electrons",
|
|
62
|
+
]
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""Creates xarray based on structure of queried DynamoDB."""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
import xarray as xr
|
|
5
|
+
|
|
6
|
+
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
|
|
7
|
+
from imap_processing.ialirt.utils.constants import IALIRT_KEYS
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def create_xarray_from_records(records: list[dict]) -> xr.Dataset: # noqa: PLR0912
|
|
11
|
+
"""
|
|
12
|
+
Create dataset from a list of records.
|
|
13
|
+
|
|
14
|
+
Parameters
|
|
15
|
+
----------
|
|
16
|
+
records : list of dict
|
|
17
|
+
Output of querying DynamoDB.
|
|
18
|
+
|
|
19
|
+
Returns
|
|
20
|
+
-------
|
|
21
|
+
dataset : xarray.Dataset
|
|
22
|
+
Dataset in standard format.
|
|
23
|
+
"""
|
|
24
|
+
cdf_manager = ImapCdfAttributes()
|
|
25
|
+
cdf_manager.add_instrument_global_attrs("ialirt")
|
|
26
|
+
cdf_manager.add_instrument_variable_attrs("ialirt", "l1")
|
|
27
|
+
|
|
28
|
+
instrument_keys: set[str] = set(IALIRT_KEYS)
|
|
29
|
+
n = len(records)
|
|
30
|
+
attrs = cdf_manager.get_variable_attributes("default_int64_attrs")
|
|
31
|
+
fillval = attrs.get("FILLVAL")
|
|
32
|
+
ttj2000ns_values = np.full(n, fillval, dtype=np.int64)
|
|
33
|
+
|
|
34
|
+
# Collect all keys that start with the instrument prefixes.
|
|
35
|
+
for i, record in enumerate(records):
|
|
36
|
+
ttj2000ns_values[i] = record["ttj2000ns"]
|
|
37
|
+
|
|
38
|
+
epoch = xr.DataArray(
|
|
39
|
+
data=ttj2000ns_values,
|
|
40
|
+
name="epoch",
|
|
41
|
+
dims=["epoch"],
|
|
42
|
+
attrs=cdf_manager.get_variable_attributes("epoch", check_schema=False),
|
|
43
|
+
)
|
|
44
|
+
component = xr.DataArray(
|
|
45
|
+
["x", "y", "z"],
|
|
46
|
+
name="component",
|
|
47
|
+
dims=["component"],
|
|
48
|
+
attrs=cdf_manager.get_variable_attributes("component", check_schema=False),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
esa_step = xr.DataArray(
|
|
52
|
+
data=np.arange(8, dtype=np.uint8),
|
|
53
|
+
name="esa_step",
|
|
54
|
+
dims=["esa_step"],
|
|
55
|
+
attrs=cdf_manager.get_variable_attributes("esa_step", check_schema=False),
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
energy_ranges = xr.DataArray(
|
|
59
|
+
data=np.arange(15, dtype=np.uint8),
|
|
60
|
+
name="energy_ranges",
|
|
61
|
+
dims=["energy_ranges"],
|
|
62
|
+
attrs=cdf_manager.get_variable_attributes("energy_ranges", check_schema=False),
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
azimuth = xr.DataArray(
|
|
66
|
+
data=np.arange(4, dtype=np.uint8),
|
|
67
|
+
name="azimuth",
|
|
68
|
+
dims=["azimuth"],
|
|
69
|
+
attrs=cdf_manager.get_variable_attributes("azimuth", check_schema=False),
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
spin_angle_bin = xr.DataArray(
|
|
73
|
+
data=np.arange(4, dtype=np.uint8),
|
|
74
|
+
name="spin_angle_bin",
|
|
75
|
+
dims=["spin_angle_bin"],
|
|
76
|
+
attrs=cdf_manager.get_variable_attributes("spin_angle_bin", check_schema=False),
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
coords = {
|
|
80
|
+
"epoch": epoch,
|
|
81
|
+
"component": component,
|
|
82
|
+
"esa_step": esa_step,
|
|
83
|
+
"energy_ranges": energy_ranges,
|
|
84
|
+
"azimuth": azimuth,
|
|
85
|
+
"spin_angle_bin": spin_angle_bin,
|
|
86
|
+
}
|
|
87
|
+
dataset = xr.Dataset(
|
|
88
|
+
coords=coords,
|
|
89
|
+
attrs=cdf_manager.get_global_attributes("imap_ialirt_l1_realtime"),
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Create empty dataset for each key.
|
|
93
|
+
for key in instrument_keys:
|
|
94
|
+
attrs = cdf_manager.get_variable_attributes(key, check_schema=False)
|
|
95
|
+
fillval = attrs.get("FILLVAL")
|
|
96
|
+
if key.startswith("mag"):
|
|
97
|
+
data = np.full((n, 3), fillval, dtype=np.float32)
|
|
98
|
+
dims = ["epoch", "component"]
|
|
99
|
+
dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
|
|
100
|
+
elif key.startswith("codicehi"):
|
|
101
|
+
data = np.full((n, 15, 4, 4), fillval, dtype=np.float32)
|
|
102
|
+
dims = ["epoch", "energy", "azimuth", "spin_angle_bin"]
|
|
103
|
+
dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
|
|
104
|
+
elif key == "swe_counterstreaming_electrons":
|
|
105
|
+
data = np.full(n, fillval, dtype=np.uint8)
|
|
106
|
+
dims = ["epoch"]
|
|
107
|
+
dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
|
|
108
|
+
elif key.startswith("swe"):
|
|
109
|
+
data = np.full((n, 8), fillval, dtype=np.uint32)
|
|
110
|
+
dims = ["epoch", "esa_step"]
|
|
111
|
+
dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
|
|
112
|
+
elif key.startswith("hit"):
|
|
113
|
+
data = np.full(n, fillval, dtype=np.uint32)
|
|
114
|
+
dims = ["epoch"]
|
|
115
|
+
dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
|
|
116
|
+
else:
|
|
117
|
+
data = np.full(n, fillval, dtype=np.float32)
|
|
118
|
+
dims = ["epoch"]
|
|
119
|
+
dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
|
|
120
|
+
|
|
121
|
+
# Populate the dataset variables
|
|
122
|
+
for i, record in enumerate(records):
|
|
123
|
+
for key, val in record.items():
|
|
124
|
+
if key in ["apid", "met", "met_in_utc", "ttj2000ns"]:
|
|
125
|
+
continue
|
|
126
|
+
elif key.startswith("mag"):
|
|
127
|
+
dataset[key].data[i, :] = val
|
|
128
|
+
elif key.startswith("swe_normalized_counts"):
|
|
129
|
+
dataset[key].data[i, :] = val
|
|
130
|
+
elif key.startswith("codicehi"):
|
|
131
|
+
dataset[key].data[i, :, :, :] = val
|
|
132
|
+
else:
|
|
133
|
+
dataset[key].data[i] = val
|
|
134
|
+
|
|
135
|
+
return dataset
|
imap_processing/idex/idex_l2c.py
CHANGED
|
@@ -125,7 +125,7 @@ def idex_healpix_map(
|
|
|
125
125
|
counts = np.histogram(hpix_idx, bins=n_pix, range=(0, n_pix))[0]
|
|
126
126
|
# Add epoch dimension
|
|
127
127
|
counts_da = xr.DataArray(
|
|
128
|
-
counts[np.newaxis, :].astype(np.
|
|
128
|
+
counts[np.newaxis, :].astype(np.int64),
|
|
129
129
|
name="counts",
|
|
130
130
|
dims=("epoch", CoordNames.HEALPIX_INDEX.value),
|
|
131
131
|
attrs=idex_attrs.get_variable_attributes("healpix_counts"),
|
|
@@ -147,10 +147,10 @@ def idex_healpix_map(
|
|
|
147
147
|
)
|
|
148
148
|
map_attrs = {
|
|
149
149
|
"Sky_tiling_type": SkyTilingType.HEALPIX.value,
|
|
150
|
-
"HEALPix_nside": nside,
|
|
151
|
-
"HEALPix_nest": nested,
|
|
152
|
-
"Spice_reference_frame": IDEX_EVENT_REFERENCE_FRAME,
|
|
153
|
-
"num_points": n_pix,
|
|
150
|
+
"HEALPix_nside": str(nside),
|
|
151
|
+
"HEALPix_nest": str(nested),
|
|
152
|
+
"Spice_reference_frame": IDEX_EVENT_REFERENCE_FRAME.name,
|
|
153
|
+
"num_points": str(n_pix),
|
|
154
154
|
} | idex_attrs.get_global_attributes("imap_idex_l2c_sci-healpix")
|
|
155
155
|
l2c_dataset.attrs.update(map_attrs)
|
|
156
156
|
|
|
@@ -193,7 +193,7 @@ def idex_rectangular_map(
|
|
|
193
193
|
longitude_wrapped, latitude, bins=[grid.az_bin_edges, grid.el_bin_edges]
|
|
194
194
|
)
|
|
195
195
|
counts_da = xr.DataArray(
|
|
196
|
-
counts[np.newaxis, :, :].astype(np.
|
|
196
|
+
counts[np.newaxis, :, :].astype(np.int64),
|
|
197
197
|
name="counts",
|
|
198
198
|
dims=("epoch", "rectangular_lon_pixel", "rectangular_lat_pixel"),
|
|
199
199
|
attrs=idex_attrs.get_variable_attributes("rectangular_counts"),
|
|
@@ -241,9 +241,9 @@ def idex_rectangular_map(
|
|
|
241
241
|
)
|
|
242
242
|
map_attrs = {
|
|
243
243
|
"sky_tiling_type": SkyTilingType.RECTANGULAR.value,
|
|
244
|
-
"Spacing_degrees": spacing_deg,
|
|
245
|
-
"Spice_reference_frame": IDEX_EVENT_REFERENCE_FRAME,
|
|
246
|
-
"num_points": counts.size,
|
|
244
|
+
"Spacing_degrees": str(spacing_deg),
|
|
245
|
+
"Spice_reference_frame": IDEX_EVENT_REFERENCE_FRAME.name,
|
|
246
|
+
"num_points": str(counts.size),
|
|
247
247
|
} | idex_attrs.get_global_attributes("imap_idex_l2c_sci-rectangular")
|
|
248
248
|
|
|
249
249
|
l2c_dataset.attrs.update(map_attrs)
|
imap_processing/lo/l1b/lo_l1b.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""IMAP-Lo L1B Data Processing."""
|
|
2
2
|
|
|
3
|
+
import logging
|
|
3
4
|
from dataclasses import Field
|
|
4
5
|
from pathlib import Path
|
|
5
6
|
from typing import Any, Union
|
|
@@ -17,6 +18,9 @@ from imap_processing.lo.l1b.tof_conversions import (
|
|
|
17
18
|
from imap_processing.spice.geometry import SpiceFrame, instrument_pointing
|
|
18
19
|
from imap_processing.spice.time import met_to_ttj2000ns, ttj2000ns_to_et
|
|
19
20
|
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
logger.setLevel(logging.INFO)
|
|
23
|
+
|
|
20
24
|
|
|
21
25
|
def lo_l1b(dependencies: dict) -> list[Path]:
|
|
22
26
|
"""
|
|
@@ -39,9 +43,10 @@ def lo_l1b(dependencies: dict) -> list[Path]:
|
|
|
39
43
|
# create the attribute manager to access L1A fillval attributes
|
|
40
44
|
attr_mgr_l1a = ImapCdfAttributes()
|
|
41
45
|
attr_mgr_l1a.add_instrument_variable_attrs(instrument="lo", level="l1a")
|
|
42
|
-
|
|
46
|
+
logger.info(f"\n Dependencies: {list(dependencies.keys())}\n")
|
|
43
47
|
# if the dependencies are used to create Annotated Direct Events
|
|
44
48
|
if "imap_lo_l1a_de" in dependencies and "imap_lo_l1a_spin" in dependencies:
|
|
49
|
+
logger.info("\nProcessing IMAP-Lo L1B Direct Events...")
|
|
45
50
|
logical_source = "imap_lo_l1b_de"
|
|
46
51
|
# get the dependency dataset for l1b direct events
|
|
47
52
|
l1a_de = dependencies["imap_lo_l1a_de"]
|
imap_processing/lo/l1c/lo_l1c.py
CHANGED
|
@@ -67,6 +67,19 @@ def lo_l1c(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
|
|
|
67
67
|
pset["exposure_time"] = calculate_exposure_times(
|
|
68
68
|
full_counts, l1b_goodtimes_only
|
|
69
69
|
)
|
|
70
|
+
pset.attrs = attr_mgr.get_global_attributes(logical_source)
|
|
71
|
+
# TODO: Temp fix before adding attribute variables.
|
|
72
|
+
# CDF won't open if DEPEND_0 is not deleted currently.
|
|
73
|
+
del pset["epoch"].attrs["DEPEND_0"]
|
|
74
|
+
|
|
75
|
+
pset = pset.assign_coords(
|
|
76
|
+
{
|
|
77
|
+
"energy": np.arange(1, 8),
|
|
78
|
+
"longitude": np.arange(3600),
|
|
79
|
+
"latitude": np.arange(40),
|
|
80
|
+
}
|
|
81
|
+
)
|
|
82
|
+
|
|
70
83
|
return [pset]
|
|
71
84
|
|
|
72
85
|
|
|
@@ -127,12 +140,8 @@ def filter_goodtimes(l1b_de: xr.Dataset, anc_dependencies: list) -> xr.Dataset:
|
|
|
127
140
|
l1b_de : xarray.Dataset
|
|
128
141
|
Filtered L1B Direct Event dataset.
|
|
129
142
|
"""
|
|
130
|
-
#
|
|
131
|
-
|
|
132
|
-
(item for item in anc_dependencies if "goodtimes" in item), None
|
|
133
|
-
)
|
|
134
|
-
# sweep table is a dependency so this should always be in the list
|
|
135
|
-
goodtimes_table_df = pd.read_csv(goodtimes_table)
|
|
143
|
+
# the goodtimes are currently the only ancillary file needed for L1C processing
|
|
144
|
+
goodtimes_table_df = pd.read_csv(anc_dependencies[0])
|
|
136
145
|
|
|
137
146
|
# convert goodtimes from MET to TTJ2000
|
|
138
147
|
goodtimes_start = met_to_ttj2000ns(goodtimes_table_df["GoodTime_strt"])
|
|
@@ -220,9 +229,9 @@ def create_pset_counts(
|
|
|
220
229
|
# stack the filtered data into the 3D array
|
|
221
230
|
data = np.column_stack(
|
|
222
231
|
(
|
|
232
|
+
de_filtered["esa_step"],
|
|
223
233
|
de_filtered["pointing_bin_lon"],
|
|
224
234
|
de_filtered["pointing_bin_lat"],
|
|
225
|
-
de_filtered["esa_step"],
|
|
226
235
|
)
|
|
227
236
|
)
|
|
228
237
|
# Create the histogram with 3600 longitude bins, 40 latitude bins, and 7 energy bins
|
|
@@ -232,7 +241,7 @@ def create_pset_counts(
|
|
|
232
241
|
|
|
233
242
|
hist, edges = np.histogramdd(
|
|
234
243
|
data,
|
|
235
|
-
bins=[lon_edges, lat_edges
|
|
244
|
+
bins=[energy_edges, lon_edges, lat_edges],
|
|
236
245
|
)
|
|
237
246
|
|
|
238
247
|
# add a new axis of size 1 for the epoch
|
|
@@ -240,7 +249,7 @@ def create_pset_counts(
|
|
|
240
249
|
|
|
241
250
|
counts = xr.DataArray(
|
|
242
251
|
data=hist.astype(np.int16),
|
|
243
|
-
dims=["epoch", "
|
|
252
|
+
dims=["epoch", "energy", "longitude", "latitude"],
|
|
244
253
|
)
|
|
245
254
|
|
|
246
255
|
return counts
|
|
@@ -272,7 +281,7 @@ def calculate_exposure_times(counts: xr.DataArray, l1b_de: xr.Dataset) -> xr.Dat
|
|
|
272
281
|
energy_edges = np.arange(8)
|
|
273
282
|
|
|
274
283
|
data = np.column_stack(
|
|
275
|
-
(l1b_de["
|
|
284
|
+
(l1b_de["esa_step"], l1b_de["pointing_bin_lon"], l1b_de["pointing_bin_lat"])
|
|
276
285
|
)
|
|
277
286
|
|
|
278
287
|
result = binned_statistic_dd(
|
|
@@ -280,14 +289,14 @@ def calculate_exposure_times(counts: xr.DataArray, l1b_de: xr.Dataset) -> xr.Dat
|
|
|
280
289
|
# exposure time equation from Lo Alg Document 10.1.1.4
|
|
281
290
|
4 * l1b_de["avg_spin_durations"].to_numpy() / 3600,
|
|
282
291
|
statistic="mean",
|
|
283
|
-
bins=[lon_edges, lat_edges
|
|
292
|
+
bins=[energy_edges, lon_edges, lat_edges],
|
|
284
293
|
)
|
|
285
294
|
|
|
286
295
|
stat = result.statistic[np.newaxis, :, :, :]
|
|
287
296
|
|
|
288
297
|
exposure_time = xr.DataArray(
|
|
289
298
|
data=stat.astype(np.float16),
|
|
290
|
-
dims=["epoch", "
|
|
299
|
+
dims=["epoch", "energy", "longitude", "latitude"],
|
|
291
300
|
)
|
|
292
301
|
|
|
293
302
|
return exposure_time
|
|
@@ -317,7 +326,7 @@ def create_datasets(
|
|
|
317
326
|
# and relative L1A DE time to calculate the absolute DE time,
|
|
318
327
|
# this epoch conversion will go away and the time in the DE dataclass
|
|
319
328
|
# can be used direction
|
|
320
|
-
epoch_converted_time = [
|
|
329
|
+
epoch_converted_time = [1]
|
|
321
330
|
|
|
322
331
|
# Create a data array for the epoch time
|
|
323
332
|
# TODO: might need to update the attrs to use new YAML file
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
"""IMAP-Lo L2 data processing."""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
import xarray as xr
|
|
5
|
+
|
|
6
|
+
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
|
|
7
|
+
from imap_processing.ena_maps import ena_maps
|
|
8
|
+
from imap_processing.ena_maps.ena_maps import RectangularSkyMap
|
|
9
|
+
from imap_processing.spice import geometry
|
|
10
|
+
from imap_processing.spice.geometry import SpiceFrame
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def lo_l2(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
|
|
14
|
+
"""
|
|
15
|
+
Will process IMAP-Lo L1C data into Le CDF data products.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
sci_dependencies : dict
|
|
20
|
+
Dictionary of datasets needed for L2 data product creation in xarray Datasets.
|
|
21
|
+
anc_dependencies : list
|
|
22
|
+
Ancillary files needed for L2 data product creation.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
created_file_paths : list[Path]
|
|
27
|
+
Location of created CDF files.
|
|
28
|
+
"""
|
|
29
|
+
# create the attribute manager for this data level
|
|
30
|
+
attr_mgr = ImapCdfAttributes()
|
|
31
|
+
attr_mgr.add_instrument_global_attrs(instrument="lo")
|
|
32
|
+
attr_mgr.add_instrument_variable_attrs(instrument="enamaps", level="l2-common")
|
|
33
|
+
attr_mgr.add_instrument_variable_attrs(instrument="enamaps", level="l2-rectangular")
|
|
34
|
+
|
|
35
|
+
# if the dependencies are used to create Annotated Direct Events
|
|
36
|
+
if "imap_lo_l1c_pset" in sci_dependencies:
|
|
37
|
+
logical_source = "imap_lo_l2_l090-ena-h-sf-nsp-ram-hae-6deg-3mo"
|
|
38
|
+
psets = sci_dependencies["imap_lo_l1c_pset"]
|
|
39
|
+
|
|
40
|
+
# Create the rectangular sky map from the pointing set.
|
|
41
|
+
lo_rect_map = project_pset_to_rect_map(
|
|
42
|
+
psets, spacing_deg=6, spice_frame=geometry.SpiceFrame.ECLIPJ2000
|
|
43
|
+
)
|
|
44
|
+
# Add the hydrogen rates to the rectangular map dataset.
|
|
45
|
+
lo_rect_map.data_1d["h_rate"] = calculate_rates(
|
|
46
|
+
lo_rect_map.data_1d["h_counts"], lo_rect_map.data_1d["exposure_time"]
|
|
47
|
+
)
|
|
48
|
+
# Add the hydrogen flux to the rectangular map dataset.
|
|
49
|
+
lo_rect_map.data_1d["h_flux"] = calculate_fluxes(lo_rect_map.data_1d["h_rate"])
|
|
50
|
+
# Create the dataset from the rectangular map.
|
|
51
|
+
lo_rect_map_ds = lo_rect_map.to_dataset()
|
|
52
|
+
# Add the attributes to the dataset.
|
|
53
|
+
lo_rect_map_ds = add_attributes(
|
|
54
|
+
lo_rect_map_ds, attr_mgr, logical_source=logical_source
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
return [lo_rect_map_ds]
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def project_pset_to_rect_map(
|
|
61
|
+
psets: list[xr.Dataset], spacing_deg: int, spice_frame: SpiceFrame
|
|
62
|
+
) -> RectangularSkyMap:
|
|
63
|
+
"""
|
|
64
|
+
Project the pointing set to a rectangular sky map.
|
|
65
|
+
|
|
66
|
+
This function is used to create a rectangular sky map from the pointing set
|
|
67
|
+
data in the L1C dataset.
|
|
68
|
+
|
|
69
|
+
Parameters
|
|
70
|
+
----------
|
|
71
|
+
psets : list[xr.Dataset]
|
|
72
|
+
List of pointing sets in xarray Dataset format.
|
|
73
|
+
spacing_deg : int
|
|
74
|
+
The spacing in degrees for the rectangular sky map.
|
|
75
|
+
spice_frame : SpiceFrame
|
|
76
|
+
The SPICE frame to use for the rectangular sky map projection.
|
|
77
|
+
|
|
78
|
+
Returns
|
|
79
|
+
-------
|
|
80
|
+
RectangularSkyMap
|
|
81
|
+
The rectangular sky map created from the pointing set data.
|
|
82
|
+
"""
|
|
83
|
+
lo_rect_map = ena_maps.RectangularSkyMap(
|
|
84
|
+
spacing_deg=spacing_deg,
|
|
85
|
+
spice_frame=spice_frame,
|
|
86
|
+
)
|
|
87
|
+
for pset in psets:
|
|
88
|
+
lo_pset = ena_maps.LoPointingSet(pset)
|
|
89
|
+
lo_rect_map.project_pset_values_to_map(
|
|
90
|
+
pointing_set=lo_pset,
|
|
91
|
+
value_keys=["h_counts", "exposure_time"],
|
|
92
|
+
index_match_method=ena_maps.IndexMatchMethod.PUSH,
|
|
93
|
+
)
|
|
94
|
+
return lo_rect_map
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def calculate_rates(counts: xr.DataArray, exposure_time: xr.DataArray) -> xr.DataArray:
|
|
98
|
+
"""
|
|
99
|
+
Calculate the hydrogen rates from the counts and exposure time.
|
|
100
|
+
|
|
101
|
+
Parameters
|
|
102
|
+
----------
|
|
103
|
+
counts : xr.DataArray
|
|
104
|
+
The counts of hydrogen or oxygen ENAs.
|
|
105
|
+
exposure_time : xr.DataArray
|
|
106
|
+
The exposure time for the counts.
|
|
107
|
+
|
|
108
|
+
Returns
|
|
109
|
+
-------
|
|
110
|
+
xr.DataArray
|
|
111
|
+
The calculated hydrogen rates.
|
|
112
|
+
"""
|
|
113
|
+
# Calculate the rates based on the h_counts and exposure_time
|
|
114
|
+
rate = counts / exposure_time
|
|
115
|
+
return rate
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def calculate_fluxes(rates: xr.DataArray) -> xr.DataArray:
|
|
119
|
+
"""
|
|
120
|
+
Calculate the flux from the hydrogen rate.
|
|
121
|
+
|
|
122
|
+
Parameters
|
|
123
|
+
----------
|
|
124
|
+
rates : xr.Dataset
|
|
125
|
+
The hydrogen or oxygen rates.
|
|
126
|
+
|
|
127
|
+
Returns
|
|
128
|
+
-------
|
|
129
|
+
xr.DataArray
|
|
130
|
+
The calculated flux.
|
|
131
|
+
"""
|
|
132
|
+
# Temporary values. These will all come from ancillary data when
|
|
133
|
+
# the data is available and integrated.
|
|
134
|
+
geometric_factor = 1.0
|
|
135
|
+
efficiency_factor = 1.0
|
|
136
|
+
energy_dict = {1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7}
|
|
137
|
+
energies = np.array([energy_dict[i] for i in range(1, 8)])
|
|
138
|
+
energies = energies.reshape(1, 7, 1)
|
|
139
|
+
|
|
140
|
+
flux = rates / (geometric_factor * energies * efficiency_factor)
|
|
141
|
+
return flux
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def add_attributes(
|
|
145
|
+
lo_map: xr.Dataset, attr_mgr: ImapCdfAttributes, logical_source: str
|
|
146
|
+
) -> xr.Dataset:
|
|
147
|
+
"""
|
|
148
|
+
Add attributes to the map dataset.
|
|
149
|
+
|
|
150
|
+
Parameters
|
|
151
|
+
----------
|
|
152
|
+
lo_map : xr.Dataset
|
|
153
|
+
The dataset to add attributes to.
|
|
154
|
+
attr_mgr : ImapCdfAttributes
|
|
155
|
+
The attribute manager to use for adding attributes.
|
|
156
|
+
logical_source : str
|
|
157
|
+
The logical source for the dataset.
|
|
158
|
+
|
|
159
|
+
Returns
|
|
160
|
+
-------
|
|
161
|
+
xr.Dataset
|
|
162
|
+
The dataset with added attributes.
|
|
163
|
+
"""
|
|
164
|
+
# Add the global attributes to the dataset.
|
|
165
|
+
lo_map.attrs.update(attr_mgr.get_global_attributes(logical_source))
|
|
166
|
+
|
|
167
|
+
# TODO: Lo is using different field names than what's in the attributes.
|
|
168
|
+
# check if the Lo should use exposure factor instead of exposure time.
|
|
169
|
+
# check if hydrogen and oxygen specific ena intensities should be added
|
|
170
|
+
# to the attributes or if general ena intensities can be used or updated
|
|
171
|
+
# in the code. This dictionary is temporary solution for SIT-4
|
|
172
|
+
map_fields = {
|
|
173
|
+
"epoch": "epoch",
|
|
174
|
+
"h_flux": "ena_intensity",
|
|
175
|
+
"h_rate": "ena_rate",
|
|
176
|
+
"h_counts": "ena_count",
|
|
177
|
+
"exposure_time": "exposure_factor",
|
|
178
|
+
"energy": "energy",
|
|
179
|
+
"solid_angle": "solid_angle",
|
|
180
|
+
"longitude": "longitude",
|
|
181
|
+
"latitude": "latitude",
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
# TODO: The mapping utility is supposed to handle at least some of these
|
|
185
|
+
# attributes but is not working. Need to investigate this after SIT-4
|
|
186
|
+
# Add the attributes to the dataset variables.
|
|
187
|
+
for field, attr_name in map_fields.items():
|
|
188
|
+
if field in lo_map.data_vars or field in lo_map.coords:
|
|
189
|
+
lo_map[field].attrs.update(
|
|
190
|
+
attr_mgr.get_variable_attributes(attr_name, check_schema=False)
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
labels = {
|
|
194
|
+
"energy": np.arange(1, 8).astype(str),
|
|
195
|
+
"longitude": lo_map["longitude"].values.astype(str),
|
|
196
|
+
"latitude": lo_map["latitude"].values.astype(str),
|
|
197
|
+
}
|
|
198
|
+
# add the coordinate labels to the dataset
|
|
199
|
+
for dim, values in labels.items():
|
|
200
|
+
lo_map = lo_map.assign_coords(
|
|
201
|
+
{
|
|
202
|
+
f"{dim}_label": xr.DataArray(
|
|
203
|
+
values,
|
|
204
|
+
name=f"{dim}_label",
|
|
205
|
+
dims=[dim],
|
|
206
|
+
attrs=attr_mgr.get_variable_attributes(
|
|
207
|
+
f"{dim}_label", check_schema=False
|
|
208
|
+
),
|
|
209
|
+
)
|
|
210
|
+
}
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return lo_map
|
|
@@ -415,6 +415,13 @@ def interpolate_gaps(
|
|
|
415
415
|
f"difference between gap start and burst start: "
|
|
416
416
|
f"{gap_timeline[0] - burst_epochs[burst_start]}"
|
|
417
417
|
)
|
|
418
|
+
|
|
419
|
+
short = (gap_timeline >= burst_epochs[burst_start]) & (
|
|
420
|
+
gap_timeline <= burst_epochs[burst_gap_end]
|
|
421
|
+
)
|
|
422
|
+
if len(gap_timeline) != (short).sum():
|
|
423
|
+
print(f"Chopping timeline from {len(gap_timeline)} to {short.sum()}")
|
|
424
|
+
|
|
418
425
|
# Limit timestamps to only include the areas with burst data
|
|
419
426
|
gap_timeline = gap_timeline[
|
|
420
427
|
(
|
|
@@ -573,7 +580,7 @@ def find_gaps(timeline_data: np.ndarray, vectors_per_second: int) -> np.ndarray:
|
|
|
573
580
|
diffs = abs(np.diff(timeline_data))
|
|
574
581
|
# 3.5e7 == 7.5% of 0.5s in nanoseconds, a common gap. In the future, this number
|
|
575
582
|
# will be calculated from the expected gap.
|
|
576
|
-
gap_index = np.asarray(diffs - expected_gap >
|
|
583
|
+
gap_index = np.asarray(diffs - expected_gap > expected_gap * 0.075).nonzero()[0]
|
|
577
584
|
output: np.ndarray = np.zeros((len(gap_index), 3))
|
|
578
585
|
|
|
579
586
|
for index, gap in enumerate(gap_index):
|
imap_processing/mag/l2/mag_l2.py
CHANGED
|
@@ -15,6 +15,7 @@ def mag_l2(
|
|
|
15
15
|
offsets_dataset: xr.Dataset,
|
|
16
16
|
input_data: xr.Dataset,
|
|
17
17
|
day_to_process: np.datetime64,
|
|
18
|
+
mode: DataMode = DataMode.NORM,
|
|
18
19
|
) -> list[xr.Dataset]:
|
|
19
20
|
"""
|
|
20
21
|
Complete MAG L2 processing.
|
|
@@ -63,6 +64,9 @@ def mag_l2(
|
|
|
63
64
|
day_to_process : numpy.datetime64['D']
|
|
64
65
|
The 24 hour day to process. This should match the day of the input data and
|
|
65
66
|
the offset file.
|
|
67
|
+
mode : DataMode
|
|
68
|
+
The data mode to process. Default is DataMode.NORM (normal mode).
|
|
69
|
+
Can also be DataMode.BURST for burst mode processing.
|
|
66
70
|
|
|
67
71
|
Returns
|
|
68
72
|
-------
|
|
@@ -96,14 +100,14 @@ def mag_l2(
|
|
|
96
100
|
{},
|
|
97
101
|
np.zeros(len(input_data["epoch"].data)),
|
|
98
102
|
np.zeros(len(input_data["epoch"].data)),
|
|
99
|
-
|
|
103
|
+
mode,
|
|
100
104
|
offsets=offsets_dataset["offsets"].data,
|
|
101
105
|
timedelta=offsets_dataset["timedeltas"].data,
|
|
102
106
|
)
|
|
103
107
|
attributes = ImapCdfAttributes()
|
|
104
108
|
attributes.add_instrument_global_attrs("mag")
|
|
105
109
|
# temporarily point to l1c
|
|
106
|
-
attributes.add_instrument_variable_attrs("mag", "
|
|
110
|
+
attributes.add_instrument_variable_attrs("mag", "l2")
|
|
107
111
|
return [input_data.generate_dataset(attributes, day)]
|
|
108
112
|
|
|
109
113
|
|
|
@@ -222,7 +222,9 @@ class MagL2:
|
|
|
222
222
|
self.epoch,
|
|
223
223
|
name="epoch",
|
|
224
224
|
dims=["epoch"],
|
|
225
|
-
attrs=attribute_manager.get_variable_attributes(
|
|
225
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
226
|
+
"epoch", check_schema=False
|
|
227
|
+
),
|
|
226
228
|
)
|
|
227
229
|
|
|
228
230
|
vectors = xr.DataArray(
|
|
@@ -236,14 +238,14 @@ class MagL2:
|
|
|
236
238
|
self.quality_flags,
|
|
237
239
|
name="quality_flags",
|
|
238
240
|
dims=["epoch"],
|
|
239
|
-
attrs=attribute_manager.get_variable_attributes("
|
|
241
|
+
attrs=attribute_manager.get_variable_attributes("qf_bitmask"),
|
|
240
242
|
)
|
|
241
243
|
|
|
242
244
|
quality_bitmask = xr.DataArray(
|
|
243
245
|
self.quality_flags,
|
|
244
246
|
name="quality_flags",
|
|
245
247
|
dims=["epoch"],
|
|
246
|
-
attrs=attribute_manager.get_variable_attributes("
|
|
248
|
+
attrs=attribute_manager.get_variable_attributes("qf"),
|
|
247
249
|
)
|
|
248
250
|
|
|
249
251
|
rng = xr.DataArray(
|
|
@@ -251,14 +253,14 @@ class MagL2:
|
|
|
251
253
|
name="range",
|
|
252
254
|
dims=["epoch"],
|
|
253
255
|
# TODO temp attrs
|
|
254
|
-
attrs=attribute_manager.get_variable_attributes("
|
|
256
|
+
attrs=attribute_manager.get_variable_attributes("fill"),
|
|
255
257
|
)
|
|
256
258
|
|
|
257
259
|
magnitude = xr.DataArray(
|
|
258
260
|
self.magnitude,
|
|
259
261
|
name="magnitude",
|
|
260
262
|
dims=["epoch"],
|
|
261
|
-
attrs=attribute_manager.get_variable_attributes("
|
|
263
|
+
attrs=attribute_manager.get_variable_attributes("fill"),
|
|
262
264
|
)
|
|
263
265
|
|
|
264
266
|
global_attributes = (
|