imap-processing 1.0.0__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- imap_processing/_version.py +2 -2
- imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +13 -1
- imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +97 -254
- imap_processing/cdf/config/imap_codice_l2-hi-omni_variable_attrs.yaml +635 -0
- imap_processing/cdf/config/imap_codice_l2-hi-sectored_variable_attrs.yaml +422 -0
- imap_processing/cdf/config/imap_enamaps_l2-common_variable_attrs.yaml +29 -22
- imap_processing/cdf/config/imap_enamaps_l2-healpix_variable_attrs.yaml +2 -0
- imap_processing/cdf/config/imap_enamaps_l2-rectangular_variable_attrs.yaml +12 -2
- imap_processing/cdf/config/imap_swapi_variable_attrs.yaml +2 -13
- imap_processing/cdf/utils.py +2 -2
- imap_processing/cli.py +10 -27
- imap_processing/codice/codice_l1a_lo_angular.py +362 -0
- imap_processing/codice/codice_l1a_lo_species.py +282 -0
- imap_processing/codice/codice_l1b.py +62 -97
- imap_processing/codice/codice_l2.py +801 -174
- imap_processing/codice/codice_new_l1a.py +64 -0
- imap_processing/codice/constants.py +96 -0
- imap_processing/codice/utils.py +270 -0
- imap_processing/ena_maps/ena_maps.py +157 -95
- imap_processing/ena_maps/utils/coordinates.py +5 -0
- imap_processing/ena_maps/utils/corrections.py +450 -0
- imap_processing/ena_maps/utils/map_utils.py +143 -42
- imap_processing/ena_maps/utils/naming.py +3 -1
- imap_processing/hi/hi_l1c.py +34 -12
- imap_processing/hi/hi_l2.py +82 -44
- imap_processing/ialirt/constants.py +7 -1
- imap_processing/ialirt/generate_coverage.py +3 -1
- imap_processing/ialirt/l0/parse_mag.py +1 -0
- imap_processing/ialirt/l0/process_codice.py +66 -0
- imap_processing/ialirt/l0/process_hit.py +1 -0
- imap_processing/ialirt/l0/process_swapi.py +1 -0
- imap_processing/ialirt/l0/process_swe.py +2 -0
- imap_processing/ialirt/process_ephemeris.py +6 -2
- imap_processing/ialirt/utils/create_xarray.py +4 -2
- imap_processing/idex/idex_l2a.py +2 -2
- imap_processing/idex/idex_l2b.py +1 -1
- imap_processing/lo/l1c/lo_l1c.py +62 -4
- imap_processing/lo/l2/lo_l2.py +85 -15
- imap_processing/mag/l1a/mag_l1a.py +2 -2
- imap_processing/mag/l1a/mag_l1a_data.py +71 -13
- imap_processing/mag/l1c/interpolation_methods.py +34 -13
- imap_processing/mag/l1c/mag_l1c.py +117 -67
- imap_processing/mag/l1d/mag_l1d_data.py +3 -1
- imap_processing/quality_flags.py +1 -0
- imap_processing/spice/geometry.py +11 -9
- imap_processing/spice/pointing_frame.py +77 -50
- imap_processing/swapi/constants.py +4 -0
- imap_processing/swapi/l1/swapi_l1.py +59 -24
- imap_processing/swapi/l2/swapi_l2.py +17 -3
- imap_processing/swe/utils/swe_constants.py +7 -7
- imap_processing/ultra/l1a/ultra_l1a.py +121 -72
- imap_processing/ultra/l1b/de.py +57 -1
- imap_processing/ultra/l1b/extendedspin.py +1 -1
- imap_processing/ultra/l1b/ultra_l1b_annotated.py +0 -1
- imap_processing/ultra/l1b/ultra_l1b_culling.py +2 -2
- imap_processing/ultra/l1b/ultra_l1b_extended.py +25 -12
- imap_processing/ultra/l1c/helio_pset.py +29 -6
- imap_processing/ultra/l1c/l1c_lookup_utils.py +4 -2
- imap_processing/ultra/l1c/spacecraft_pset.py +10 -6
- imap_processing/ultra/l1c/ultra_l1c.py +6 -6
- imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +82 -20
- imap_processing/ultra/l2/ultra_l2.py +2 -2
- imap_processing-1.0.2.dist-info/METADATA +121 -0
- {imap_processing-1.0.0.dist-info → imap_processing-1.0.2.dist-info}/RECORD +67 -61
- imap_processing-1.0.0.dist-info/METADATA +0 -120
- {imap_processing-1.0.0.dist-info → imap_processing-1.0.2.dist-info}/LICENSE +0 -0
- {imap_processing-1.0.0.dist-info → imap_processing-1.0.2.dist-info}/WHEEL +0 -0
- {imap_processing-1.0.0.dist-info → imap_processing-1.0.2.dist-info}/entry_points.txt +0 -0
imap_processing/cli.py
CHANGED
|
@@ -49,7 +49,7 @@ from imap_processing.cdf.utils import load_cdf, write_cdf
|
|
|
49
49
|
# from imap_processing import cdf
|
|
50
50
|
# In code:
|
|
51
51
|
# call cdf.utils.write_cdf
|
|
52
|
-
from imap_processing.codice import
|
|
52
|
+
from imap_processing.codice import codice_l1b, codice_l2, codice_new_l1a
|
|
53
53
|
from imap_processing.glows.l1a.glows_l1a import glows_l1a
|
|
54
54
|
from imap_processing.glows.l1b.glows_l1b import glows_l1b, glows_l1b_de
|
|
55
55
|
from imap_processing.glows.l2.glows_l2 import glows_l2
|
|
@@ -612,14 +612,8 @@ class Codice(ProcessInstrument):
|
|
|
612
612
|
datasets: list[xr.Dataset] = []
|
|
613
613
|
|
|
614
614
|
if self.data_level == "l1a":
|
|
615
|
-
science_files = dependencies.get_file_paths(source="codice")
|
|
616
|
-
if len(science_files) != 1:
|
|
617
|
-
raise ValueError(
|
|
618
|
-
f"CoDICE L1A requires exactly one input science file, received: "
|
|
619
|
-
f"{science_files}."
|
|
620
|
-
)
|
|
621
615
|
# process data
|
|
622
|
-
datasets =
|
|
616
|
+
datasets = codice_new_l1a.process_l1a(dependencies)
|
|
623
617
|
|
|
624
618
|
if self.data_level == "l1b":
|
|
625
619
|
science_files = dependencies.get_file_paths(source="codice")
|
|
@@ -632,13 +626,7 @@ class Codice(ProcessInstrument):
|
|
|
632
626
|
datasets = [codice_l1b.process_codice_l1b(science_files[0])]
|
|
633
627
|
|
|
634
628
|
if self.data_level == "l2":
|
|
635
|
-
|
|
636
|
-
if len(science_files) != 1:
|
|
637
|
-
raise ValueError(
|
|
638
|
-
f"CoDICE L2 requires exactly one input science file, received: "
|
|
639
|
-
f"{science_files}."
|
|
640
|
-
)
|
|
641
|
-
datasets = [codice_l2.process_codice_l2(science_files[0])]
|
|
629
|
+
datasets = [codice_l2.process_codice_l2(self.descriptor, dependencies)]
|
|
642
630
|
|
|
643
631
|
return datasets
|
|
644
632
|
|
|
@@ -1212,8 +1200,9 @@ class Mag(ProcessInstrument):
|
|
|
1212
1200
|
if "raw" not in ds.attrs["Logical_source"] and not np.all(
|
|
1213
1201
|
ds["epoch"].values[1:] > ds["epoch"].values[:-1]
|
|
1214
1202
|
):
|
|
1215
|
-
|
|
1216
|
-
"Timestamps for output file are not
|
|
1203
|
+
logger.warning(
|
|
1204
|
+
f"Timestamps for output file {ds.attrs['Logical_source']} are not "
|
|
1205
|
+
f"monotonically increasing."
|
|
1217
1206
|
)
|
|
1218
1207
|
return datasets
|
|
1219
1208
|
|
|
@@ -1255,7 +1244,7 @@ class Spacecraft(ProcessInstrument):
|
|
|
1255
1244
|
)
|
|
1256
1245
|
ah_paths = [path for path in spice_inputs if ".ah" in path.suffixes]
|
|
1257
1246
|
pointing_kernel_paths = pointing_frame.generate_pointing_attitude_kernel(
|
|
1258
|
-
ah_paths
|
|
1247
|
+
ah_paths
|
|
1259
1248
|
)
|
|
1260
1249
|
processed_dataset.extend(pointing_kernel_paths)
|
|
1261
1250
|
else:
|
|
@@ -1305,7 +1294,7 @@ class Swapi(ProcessInstrument):
|
|
|
1305
1294
|
)
|
|
1306
1295
|
|
|
1307
1296
|
# process science or housekeeping data
|
|
1308
|
-
datasets = swapi_l1(dependencies)
|
|
1297
|
+
datasets = swapi_l1(dependencies, descriptor=self.descriptor)
|
|
1309
1298
|
elif self.data_level == "l2":
|
|
1310
1299
|
if len(dependency_list) != 3:
|
|
1311
1300
|
raise ValueError(
|
|
@@ -1440,7 +1429,7 @@ class Ultra(ProcessInstrument):
|
|
|
1440
1429
|
f"Unexpected science_files found for ULTRA L1A:"
|
|
1441
1430
|
f"{science_files}. Expected only one dependency."
|
|
1442
1431
|
)
|
|
1443
|
-
datasets = ultra_l1a.ultra_l1a(science_files[0])
|
|
1432
|
+
datasets = ultra_l1a.ultra_l1a(science_files[0], create_derived_l1b=True)
|
|
1444
1433
|
elif self.data_level == "l1b":
|
|
1445
1434
|
science_files = dependencies.get_file_paths(source="ultra", data_type="l1a")
|
|
1446
1435
|
l1a_dict = {
|
|
@@ -1477,13 +1466,7 @@ class Ultra(ProcessInstrument):
|
|
|
1477
1466
|
ancillary_files = {}
|
|
1478
1467
|
for path in anc_paths:
|
|
1479
1468
|
ancillary_files[path.stem.split("_")[2]] = path
|
|
1480
|
-
|
|
1481
|
-
# Only the helio pset needs IMAP frames
|
|
1482
|
-
if any("imap_frames" in path.as_posix() for path in spice_paths):
|
|
1483
|
-
imap_frames = True
|
|
1484
|
-
else:
|
|
1485
|
-
imap_frames = False
|
|
1486
|
-
datasets = ultra_l1c.ultra_l1c(combined, ancillary_files, imap_frames)
|
|
1469
|
+
datasets = ultra_l1c.ultra_l1c(combined, ancillary_files, self.descriptor)
|
|
1487
1470
|
elif self.data_level == "l2":
|
|
1488
1471
|
all_pset_filepaths = dependencies.get_file_paths(
|
|
1489
1472
|
source="ultra", descriptor="pset"
|
|
@@ -0,0 +1,362 @@
|
|
|
1
|
+
"""CoDICE Lo Angular L1A processing functions."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
7
|
+
import xarray as xr
|
|
8
|
+
|
|
9
|
+
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
|
|
10
|
+
from imap_processing.codice import constants
|
|
11
|
+
from imap_processing.codice.decompress import decompress
|
|
12
|
+
from imap_processing.codice.utils import (
|
|
13
|
+
CODICEAPID,
|
|
14
|
+
ViewTabInfo,
|
|
15
|
+
calculate_acq_time_per_step,
|
|
16
|
+
get_codice_epoch_time,
|
|
17
|
+
get_collapse_pattern_shape,
|
|
18
|
+
get_view_tab_info,
|
|
19
|
+
index_to_position,
|
|
20
|
+
read_sci_lut,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _despin_species_data(
|
|
27
|
+
species_data: np.ndarray, sci_lut_data: dict, view_tab_obj: ViewTabInfo
|
|
28
|
+
) -> np.ndarray:
|
|
29
|
+
"""
|
|
30
|
+
Apply despinning mapping for angular products.
|
|
31
|
+
|
|
32
|
+
Despinned data shape is (num_packets, num_species, 24, inst_az) where
|
|
33
|
+
we expand spin_sector to 24 by filling with zeros in 12 to 24 or 0 to 11
|
|
34
|
+
based on pixel orientation.
|
|
35
|
+
|
|
36
|
+
Parameters
|
|
37
|
+
----------
|
|
38
|
+
species_data : np.ndarray
|
|
39
|
+
The species data array to be despun.
|
|
40
|
+
sci_lut_data : dict
|
|
41
|
+
The science LUT data used for despinning.
|
|
42
|
+
view_tab_obj : ViewTabInfo
|
|
43
|
+
The view table information object.
|
|
44
|
+
|
|
45
|
+
Returns
|
|
46
|
+
-------
|
|
47
|
+
np.ndarray
|
|
48
|
+
The despun species data array in
|
|
49
|
+
(num_packets, num_species, esa_steps, 24, inst_az).
|
|
50
|
+
"""
|
|
51
|
+
# species_data shape: (num_packets, num_species, esa_steps, *collapsed_dims)
|
|
52
|
+
num_packets, num_species, esa_steps = species_data.shape[:3]
|
|
53
|
+
collapsed_dims = species_data.shape[3:]
|
|
54
|
+
inst_az_dim = collapsed_dims[-1]
|
|
55
|
+
|
|
56
|
+
# Prepare despinning output: (num_packets, num_species, esa_steps, 24, inst_az_dim)
|
|
57
|
+
# 24 is derived by multiplying spin sector dim from collapse table by 2
|
|
58
|
+
spin_sector_len = constants.LO_DESPIN_SPIN_SECTORS
|
|
59
|
+
despun_shape = (num_packets, num_species, esa_steps, spin_sector_len, inst_az_dim)
|
|
60
|
+
despun_data = np.full(despun_shape, 0)
|
|
61
|
+
|
|
62
|
+
# Pixel orientation array and mapping positions
|
|
63
|
+
pixel_orientation = np.array(
|
|
64
|
+
sci_lut_data["lo_stepping_tab"]["pixel_orientation"]["data"]
|
|
65
|
+
)
|
|
66
|
+
# index_to_position gets the position from collapse table. Eg.
|
|
67
|
+
# [1, 2, 3, 23, 24] for SW angular
|
|
68
|
+
angular_position = index_to_position(sci_lut_data, 0, view_tab_obj.collapse_table)
|
|
69
|
+
orientation_a = pixel_orientation == "A"
|
|
70
|
+
orientation_b = pixel_orientation == "B"
|
|
71
|
+
|
|
72
|
+
# Despin data based on orientation and angular position
|
|
73
|
+
for pos_idx, position in enumerate(angular_position):
|
|
74
|
+
if position <= 12:
|
|
75
|
+
# Case 1: position 0-12, orientation A, append to first half
|
|
76
|
+
despun_data[:, :, orientation_a, :12, pos_idx] = species_data[
|
|
77
|
+
:, :, orientation_a, :, pos_idx
|
|
78
|
+
]
|
|
79
|
+
# Case 2: position 12-24, orientation B, append to second half
|
|
80
|
+
despun_data[:, :, orientation_b, 12:, pos_idx] = species_data[
|
|
81
|
+
:, :, orientation_b, :, pos_idx
|
|
82
|
+
]
|
|
83
|
+
else:
|
|
84
|
+
# Case 3: position 12-24, orientation A, append to second half
|
|
85
|
+
despun_data[:, :, orientation_a, 12:, pos_idx] = species_data[
|
|
86
|
+
:, :, orientation_a, :, pos_idx
|
|
87
|
+
]
|
|
88
|
+
# Case 4: position 0-12, orientation B, append to first half
|
|
89
|
+
despun_data[:, :, orientation_b, :12, pos_idx] = species_data[
|
|
90
|
+
:, :, orientation_b, :, pos_idx
|
|
91
|
+
]
|
|
92
|
+
|
|
93
|
+
return despun_data
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def l1a_lo_angular(unpacked_dataset: xr.Dataset, lut_file: Path) -> xr.Dataset:
|
|
97
|
+
"""
|
|
98
|
+
L1A processing code.
|
|
99
|
+
|
|
100
|
+
Parameters
|
|
101
|
+
----------
|
|
102
|
+
unpacked_dataset : xarray.Dataset
|
|
103
|
+
The decompressed and unpacked data from the packet file.
|
|
104
|
+
lut_file : pathlib.Path
|
|
105
|
+
Path to the LUT (Lookup Table) file used for processing.
|
|
106
|
+
|
|
107
|
+
Returns
|
|
108
|
+
-------
|
|
109
|
+
xarray.Dataset
|
|
110
|
+
The processed L1A dataset for the given species product.
|
|
111
|
+
"""
|
|
112
|
+
# Get these values from unpacked data. These are used to
|
|
113
|
+
# lookup in LUT table.
|
|
114
|
+
table_id = unpacked_dataset["table_id"].values[0]
|
|
115
|
+
view_id = unpacked_dataset["view_id"].values[0]
|
|
116
|
+
apid = unpacked_dataset["pkt_apid"].values[0]
|
|
117
|
+
plan_id = unpacked_dataset["plan_id"].values[0]
|
|
118
|
+
plan_step = unpacked_dataset["plan_step"].values[0]
|
|
119
|
+
|
|
120
|
+
logger.info(
|
|
121
|
+
f"Processing angular with - APID: 0x{apid:X}, View ID: {view_id}, "
|
|
122
|
+
f"Table ID: {table_id}, Plan ID: {plan_id}, Plan Step: {plan_step}"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
# ========== Get LUT Data ===========
|
|
126
|
+
# Read information from LUT
|
|
127
|
+
sci_lut_data = read_sci_lut(lut_file, table_id)
|
|
128
|
+
|
|
129
|
+
view_tab_info = get_view_tab_info(sci_lut_data, view_id, apid)
|
|
130
|
+
view_tab_obj = ViewTabInfo(
|
|
131
|
+
apid=apid,
|
|
132
|
+
view_id=view_id,
|
|
133
|
+
sensor=view_tab_info["sensor"],
|
|
134
|
+
three_d_collapsed=view_tab_info["3d_collapse"],
|
|
135
|
+
collapse_table=view_tab_info["collapse_table"],
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
if view_tab_obj.sensor != 0:
|
|
139
|
+
raise ValueError("Unsupported sensor ID for Lo angular processing.")
|
|
140
|
+
|
|
141
|
+
# ========= Decompress and Reshape Data ===========
|
|
142
|
+
# Lookup SW or NSW species based on APID
|
|
143
|
+
if view_tab_obj.apid == CODICEAPID.COD_LO_SW_ANGULAR_COUNTS:
|
|
144
|
+
species_names = sci_lut_data["data_product_lo_tab"]["0"]["angular"]["sw"][
|
|
145
|
+
"species_names"
|
|
146
|
+
]
|
|
147
|
+
logical_source_id = "imap_codice_l1a_lo-sw-angular"
|
|
148
|
+
elif view_tab_obj.apid == CODICEAPID.COD_LO_NSW_ANGULAR_COUNTS:
|
|
149
|
+
species_names = sci_lut_data["data_product_lo_tab"]["0"]["angular"]["nsw"][
|
|
150
|
+
"species_names"
|
|
151
|
+
]
|
|
152
|
+
logical_source_id = "imap_codice_l1a_lo-nsw-angular"
|
|
153
|
+
else:
|
|
154
|
+
raise ValueError(f"Unknown apid {view_tab_obj.apid} in Lo species processing.")
|
|
155
|
+
|
|
156
|
+
compression_algorithm = constants.LO_COMPRESSION_ID_LOOKUP[view_tab_obj.view_id]
|
|
157
|
+
# Decompress data using byte count information from decommed data
|
|
158
|
+
binary_data_list = unpacked_dataset["data"].values
|
|
159
|
+
byte_count_list = unpacked_dataset["byte_count"].values
|
|
160
|
+
|
|
161
|
+
# The decompressed data in the shape of (epoch, n). Then reshape later.
|
|
162
|
+
decompressed_data = [
|
|
163
|
+
decompress(
|
|
164
|
+
packet_data[:byte_count],
|
|
165
|
+
compression_algorithm,
|
|
166
|
+
)
|
|
167
|
+
for (packet_data, byte_count) in zip(
|
|
168
|
+
binary_data_list, byte_count_list, strict=False
|
|
169
|
+
)
|
|
170
|
+
]
|
|
171
|
+
|
|
172
|
+
# Look up collapse pattern using LUT table. This should return collapsed shape.
|
|
173
|
+
collapsed_shape = get_collapse_pattern_shape(
|
|
174
|
+
sci_lut_data, view_tab_obj.sensor, view_tab_obj.collapse_table
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# Reshape decompressed data to:
|
|
178
|
+
# (num_packets, num_species, esa_steps, 12, 5)
|
|
179
|
+
# 24 includes despinning spin sector. Then at later steps,
|
|
180
|
+
# we handle despinning.
|
|
181
|
+
num_packets = len(binary_data_list)
|
|
182
|
+
esa_steps = constants.NUM_ESA_STEPS
|
|
183
|
+
num_species = len(species_names)
|
|
184
|
+
species_data = np.array(decompressed_data).reshape(
|
|
185
|
+
num_packets, num_species, esa_steps, *collapsed_shape
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
# Despinning
|
|
189
|
+
# ----------------
|
|
190
|
+
species_data = _despin_species_data(species_data, sci_lut_data, view_tab_obj)
|
|
191
|
+
|
|
192
|
+
# ========== Get Voltage Data from LUT ===========
|
|
193
|
+
# Use plan id and plan step to get voltage data's table_number in ESA sweep table.
|
|
194
|
+
# Voltage data is (128,)
|
|
195
|
+
esa_table_number = sci_lut_data["plan_tab"][f"({plan_id}, {plan_step})"][
|
|
196
|
+
"lo_stepping"
|
|
197
|
+
]
|
|
198
|
+
voltage_data = sci_lut_data["esa_sweep_tab"][f"{esa_table_number}"]
|
|
199
|
+
|
|
200
|
+
# ========= Get Epoch Time Data ===========
|
|
201
|
+
# Epoch center time and delta
|
|
202
|
+
epoch_center, deltas = get_codice_epoch_time(
|
|
203
|
+
unpacked_dataset["acq_start_seconds"].values,
|
|
204
|
+
unpacked_dataset["acq_start_subseconds"].values,
|
|
205
|
+
unpacked_dataset["spin_period"].values,
|
|
206
|
+
view_tab_obj,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# ========== Create CDF Dataset with Metadata ===========
|
|
210
|
+
cdf_attrs = ImapCdfAttributes()
|
|
211
|
+
cdf_attrs.add_instrument_global_attrs("codice")
|
|
212
|
+
cdf_attrs.add_instrument_variable_attrs("codice", "l1a")
|
|
213
|
+
|
|
214
|
+
l1a_dataset = xr.Dataset(
|
|
215
|
+
coords={
|
|
216
|
+
"epoch": xr.DataArray(
|
|
217
|
+
epoch_center,
|
|
218
|
+
dims=("epoch",),
|
|
219
|
+
attrs=cdf_attrs.get_variable_attributes("epoch", check_schema=False),
|
|
220
|
+
),
|
|
221
|
+
"epoch_delta_minus": xr.DataArray(
|
|
222
|
+
deltas,
|
|
223
|
+
dims=("epoch",),
|
|
224
|
+
attrs=cdf_attrs.get_variable_attributes(
|
|
225
|
+
"epoch_delta_minus", check_schema=False
|
|
226
|
+
),
|
|
227
|
+
),
|
|
228
|
+
"epoch_delta_plus": xr.DataArray(
|
|
229
|
+
deltas,
|
|
230
|
+
dims=("epoch",),
|
|
231
|
+
attrs=cdf_attrs.get_variable_attributes(
|
|
232
|
+
"epoch_delta_plus", check_schema=False
|
|
233
|
+
),
|
|
234
|
+
),
|
|
235
|
+
"esa_step": xr.DataArray(
|
|
236
|
+
np.arange(128),
|
|
237
|
+
dims=("esa_step",),
|
|
238
|
+
attrs=cdf_attrs.get_variable_attributes("esa_step", check_schema=False),
|
|
239
|
+
),
|
|
240
|
+
"esa_step_label": xr.DataArray(
|
|
241
|
+
np.arange(128).astype(str),
|
|
242
|
+
dims=("esa_step",),
|
|
243
|
+
attrs=cdf_attrs.get_variable_attributes(
|
|
244
|
+
"esa_step_label", check_schema=False
|
|
245
|
+
),
|
|
246
|
+
),
|
|
247
|
+
"inst_az": xr.DataArray(
|
|
248
|
+
index_to_position(sci_lut_data, 0, view_tab_obj.collapse_table),
|
|
249
|
+
dims=("inst_az",),
|
|
250
|
+
attrs=cdf_attrs.get_variable_attributes("inst_az", check_schema=False),
|
|
251
|
+
),
|
|
252
|
+
"inst_az_label": xr.DataArray(
|
|
253
|
+
index_to_position(sci_lut_data, 0, view_tab_obj.collapse_table).astype(
|
|
254
|
+
str
|
|
255
|
+
),
|
|
256
|
+
dims=("inst_az",),
|
|
257
|
+
attrs=cdf_attrs.get_variable_attributes(
|
|
258
|
+
"inst_az_label", check_schema=False
|
|
259
|
+
),
|
|
260
|
+
),
|
|
261
|
+
"k_factor": xr.DataArray(
|
|
262
|
+
np.array([constants.K_FACTOR]),
|
|
263
|
+
dims=("k_factor",),
|
|
264
|
+
attrs=cdf_attrs.get_variable_attributes("k_factor", check_schema=False),
|
|
265
|
+
),
|
|
266
|
+
"spin_sector": xr.DataArray(
|
|
267
|
+
np.arange(24, dtype=np.uint8),
|
|
268
|
+
dims=("spin_sector",),
|
|
269
|
+
attrs=cdf_attrs.get_variable_attributes(
|
|
270
|
+
"spin_sector", check_schema=False
|
|
271
|
+
),
|
|
272
|
+
),
|
|
273
|
+
"spin_sector_label": xr.DataArray(
|
|
274
|
+
np.arange(24).astype(str),
|
|
275
|
+
dims=("spin_sector",),
|
|
276
|
+
attrs=cdf_attrs.get_variable_attributes(
|
|
277
|
+
"spin_sector_label", check_schema=False
|
|
278
|
+
),
|
|
279
|
+
),
|
|
280
|
+
},
|
|
281
|
+
attrs=cdf_attrs.get_global_attributes(logical_source_id),
|
|
282
|
+
)
|
|
283
|
+
# Add first few unique variables
|
|
284
|
+
l1a_dataset["k_factor"] = xr.DataArray(
|
|
285
|
+
np.array([constants.K_FACTOR]),
|
|
286
|
+
dims=("k_factor",),
|
|
287
|
+
attrs=cdf_attrs.get_variable_attributes("k_factor_attrs", check_schema=False),
|
|
288
|
+
)
|
|
289
|
+
l1a_dataset["spin_period"] = xr.DataArray(
|
|
290
|
+
unpacked_dataset["spin_period"].values * constants.SPIN_PERIOD_CONVERSION,
|
|
291
|
+
dims=("epoch",),
|
|
292
|
+
attrs=cdf_attrs.get_variable_attributes("spin_period"),
|
|
293
|
+
)
|
|
294
|
+
l1a_dataset["voltage_table"] = xr.DataArray(
|
|
295
|
+
np.array(voltage_data),
|
|
296
|
+
dims=("esa_step",),
|
|
297
|
+
attrs=cdf_attrs.get_variable_attributes("voltage_table", check_schema=False),
|
|
298
|
+
)
|
|
299
|
+
l1a_dataset["data_quality"] = xr.DataArray(
|
|
300
|
+
unpacked_dataset["suspect"].values,
|
|
301
|
+
dims=("epoch",),
|
|
302
|
+
attrs=cdf_attrs.get_variable_attributes("data_quality"),
|
|
303
|
+
)
|
|
304
|
+
l1a_dataset["acquisition_time_per_step"] = xr.DataArray(
|
|
305
|
+
calculate_acq_time_per_step(sci_lut_data["lo_stepping_tab"]),
|
|
306
|
+
dims=("esa_step",),
|
|
307
|
+
attrs=cdf_attrs.get_variable_attributes(
|
|
308
|
+
"acquisition_time_per_step", check_schema=False
|
|
309
|
+
),
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
# Carry over these variables from unpacked data to l1a_dataset
|
|
313
|
+
l1a_carryover_vars = [
|
|
314
|
+
"sw_bias_gain_mode",
|
|
315
|
+
"st_bias_gain_mode",
|
|
316
|
+
"rgfo_half_spin",
|
|
317
|
+
"nso_half_spin",
|
|
318
|
+
]
|
|
319
|
+
# Loop through them since we need to set their attrs too
|
|
320
|
+
for var in l1a_carryover_vars:
|
|
321
|
+
l1a_dataset[var] = xr.DataArray(
|
|
322
|
+
unpacked_dataset[var].values,
|
|
323
|
+
dims=("epoch",),
|
|
324
|
+
attrs=cdf_attrs.get_variable_attributes(var),
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
# Finally, add species data variables and their uncertainties
|
|
328
|
+
for species_data_idx, species in enumerate(species_names):
|
|
329
|
+
species_attrs = cdf_attrs.get_variable_attributes("lo-angular-attrs")
|
|
330
|
+
unc_attrs = cdf_attrs.get_variable_attributes("lo-angular-unc-attrs")
|
|
331
|
+
|
|
332
|
+
direction = (
|
|
333
|
+
"Sunward"
|
|
334
|
+
if view_tab_obj.apid == CODICEAPID.COD_LO_SW_ANGULAR_COUNTS
|
|
335
|
+
else "Non-Sunward"
|
|
336
|
+
)
|
|
337
|
+
# Replace {species} and {direction} in attrs
|
|
338
|
+
species_attrs["CATDESC"] = species_attrs["CATDESC"].format(
|
|
339
|
+
species=species, direction=direction
|
|
340
|
+
)
|
|
341
|
+
species_attrs["FIELDNAM"] = species_attrs["FIELDNAM"].format(
|
|
342
|
+
species=species, direction=direction
|
|
343
|
+
)
|
|
344
|
+
l1a_dataset[species] = xr.DataArray(
|
|
345
|
+
species_data[:, species_data_idx, :, :, :],
|
|
346
|
+
dims=("epoch", "esa_step", "spin_sector", "inst_az"),
|
|
347
|
+
attrs=species_attrs,
|
|
348
|
+
)
|
|
349
|
+
# Uncertainty data
|
|
350
|
+
unc_attrs["CATDESC"] = unc_attrs["CATDESC"].format(
|
|
351
|
+
species=species, direction=direction
|
|
352
|
+
)
|
|
353
|
+
unc_attrs["FIELDNAM"] = unc_attrs["FIELDNAM"].format(
|
|
354
|
+
species=species, direction=direction
|
|
355
|
+
)
|
|
356
|
+
l1a_dataset[f"unc_{species}"] = xr.DataArray(
|
|
357
|
+
np.sqrt(species_data[:, species_data_idx, :, :, :]),
|
|
358
|
+
dims=("epoch", "esa_step", "spin_sector", "inst_az"),
|
|
359
|
+
attrs=unc_attrs,
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
return l1a_dataset
|