imap-processing 0.17.0__py3-none-any.whl → 0.19.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of imap-processing might be problematic. Click here for more details.
- imap_processing/_version.py +2 -2
- imap_processing/ancillary/ancillary_dataset_combiner.py +161 -1
- imap_processing/ccsds/excel_to_xtce.py +12 -0
- imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +6 -6
- imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +312 -274
- imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +39 -28
- imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +1048 -183
- imap_processing/cdf/config/imap_constant_attrs.yaml +4 -2
- imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +12 -0
- imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +5 -0
- imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +10 -4
- imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +163 -100
- imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +4 -4
- imap_processing/cdf/config/imap_ialirt_l1_variable_attrs.yaml +97 -54
- imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +33 -4
- imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +44 -44
- imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +77 -61
- imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +30 -0
- imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +4 -15
- imap_processing/cdf/config/imap_lo_l1c_variable_attrs.yaml +189 -98
- imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +99 -2
- imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +24 -1
- imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +60 -0
- imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +99 -11
- imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +50 -7
- imap_processing/cli.py +121 -44
- imap_processing/codice/codice_l1a.py +165 -77
- imap_processing/codice/codice_l1b.py +1 -1
- imap_processing/codice/codice_l2.py +118 -19
- imap_processing/codice/constants.py +1217 -1089
- imap_processing/decom.py +1 -4
- imap_processing/ena_maps/ena_maps.py +32 -25
- imap_processing/ena_maps/utils/naming.py +8 -2
- imap_processing/glows/ancillary/imap_glows_exclusions-by-instr-team_20250923_v002.dat +10 -0
- imap_processing/glows/ancillary/imap_glows_map-of-excluded-regions_20250923_v002.dat +393 -0
- imap_processing/glows/ancillary/imap_glows_map-of-uv-sources_20250923_v002.dat +593 -0
- imap_processing/glows/ancillary/imap_glows_pipeline_settings_20250923_v002.json +54 -0
- imap_processing/glows/ancillary/imap_glows_suspected-transients_20250923_v002.dat +10 -0
- imap_processing/glows/l1b/glows_l1b.py +99 -9
- imap_processing/glows/l1b/glows_l1b_data.py +350 -38
- imap_processing/glows/l2/glows_l2.py +11 -0
- imap_processing/hi/hi_l1a.py +124 -3
- imap_processing/hi/hi_l1b.py +154 -71
- imap_processing/hi/hi_l2.py +84 -51
- imap_processing/hi/utils.py +153 -8
- imap_processing/hit/l0/constants.py +3 -0
- imap_processing/hit/l0/decom_hit.py +5 -8
- imap_processing/hit/l1a/hit_l1a.py +375 -45
- imap_processing/hit/l1b/constants.py +5 -0
- imap_processing/hit/l1b/hit_l1b.py +61 -131
- imap_processing/hit/l2/constants.py +1 -1
- imap_processing/hit/l2/hit_l2.py +10 -11
- imap_processing/ialirt/calculate_ingest.py +219 -0
- imap_processing/ialirt/constants.py +32 -1
- imap_processing/ialirt/generate_coverage.py +201 -0
- imap_processing/ialirt/l0/ialirt_spice.py +5 -2
- imap_processing/ialirt/l0/parse_mag.py +337 -29
- imap_processing/ialirt/l0/process_hit.py +5 -3
- imap_processing/ialirt/l0/process_swapi.py +41 -25
- imap_processing/ialirt/l0/process_swe.py +23 -7
- imap_processing/ialirt/process_ephemeris.py +70 -14
- imap_processing/ialirt/utils/constants.py +22 -16
- imap_processing/ialirt/utils/create_xarray.py +42 -19
- imap_processing/idex/idex_constants.py +1 -5
- imap_processing/idex/idex_l0.py +2 -2
- imap_processing/idex/idex_l1a.py +2 -3
- imap_processing/idex/idex_l1b.py +2 -3
- imap_processing/idex/idex_l2a.py +130 -4
- imap_processing/idex/idex_l2b.py +313 -119
- imap_processing/idex/idex_utils.py +1 -3
- imap_processing/lo/l0/lo_apid.py +1 -0
- imap_processing/lo/l0/lo_science.py +25 -24
- imap_processing/lo/l1a/lo_l1a.py +44 -0
- imap_processing/lo/l1b/lo_l1b.py +3 -3
- imap_processing/lo/l1c/lo_l1c.py +116 -50
- imap_processing/lo/l2/lo_l2.py +29 -29
- imap_processing/lo/lo_ancillary.py +55 -0
- imap_processing/lo/packet_definitions/lo_xtce.xml +5359 -106
- imap_processing/mag/constants.py +1 -0
- imap_processing/mag/l1a/mag_l1a.py +1 -0
- imap_processing/mag/l1a/mag_l1a_data.py +26 -0
- imap_processing/mag/l1b/mag_l1b.py +3 -2
- imap_processing/mag/l1c/interpolation_methods.py +14 -15
- imap_processing/mag/l1c/mag_l1c.py +23 -6
- imap_processing/mag/l1d/__init__.py +0 -0
- imap_processing/mag/l1d/mag_l1d.py +176 -0
- imap_processing/mag/l1d/mag_l1d_data.py +725 -0
- imap_processing/mag/l2/__init__.py +0 -0
- imap_processing/mag/l2/mag_l2.py +25 -20
- imap_processing/mag/l2/mag_l2_data.py +199 -130
- imap_processing/quality_flags.py +28 -2
- imap_processing/spice/geometry.py +101 -36
- imap_processing/spice/pointing_frame.py +1 -7
- imap_processing/spice/repoint.py +29 -2
- imap_processing/spice/spin.py +32 -8
- imap_processing/spice/time.py +60 -19
- imap_processing/swapi/l1/swapi_l1.py +10 -4
- imap_processing/swapi/l2/swapi_l2.py +66 -24
- imap_processing/swapi/swapi_utils.py +1 -1
- imap_processing/swe/l1b/swe_l1b.py +3 -6
- imap_processing/ultra/constants.py +28 -3
- imap_processing/ultra/l0/decom_tools.py +15 -8
- imap_processing/ultra/l0/decom_ultra.py +35 -11
- imap_processing/ultra/l0/ultra_utils.py +102 -12
- imap_processing/ultra/l1a/ultra_l1a.py +26 -6
- imap_processing/ultra/l1b/cullingmask.py +6 -3
- imap_processing/ultra/l1b/de.py +122 -26
- imap_processing/ultra/l1b/extendedspin.py +29 -2
- imap_processing/ultra/l1b/lookup_utils.py +424 -50
- imap_processing/ultra/l1b/quality_flag_filters.py +23 -0
- imap_processing/ultra/l1b/ultra_l1b_culling.py +356 -5
- imap_processing/ultra/l1b/ultra_l1b_extended.py +534 -90
- imap_processing/ultra/l1c/helio_pset.py +127 -7
- imap_processing/ultra/l1c/l1c_lookup_utils.py +256 -0
- imap_processing/ultra/l1c/spacecraft_pset.py +90 -15
- imap_processing/ultra/l1c/ultra_l1c.py +6 -0
- imap_processing/ultra/l1c/ultra_l1c_culling.py +85 -0
- imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +446 -341
- imap_processing/ultra/l2/ultra_l2.py +0 -1
- imap_processing/ultra/utils/ultra_l1_utils.py +40 -3
- imap_processing/utils.py +3 -4
- {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/METADATA +3 -3
- {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/RECORD +126 -126
- imap_processing/idex/idex_l2c.py +0 -250
- imap_processing/spice/kernels.py +0 -187
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_LeftSlit.csv +0 -526
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_RightSlit.csv +0 -526
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_LeftSlit.csv +0 -526
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +0 -524
- imap_processing/ultra/lookup_tables/EgyNorm.mem.csv +0 -32769
- imap_processing/ultra/lookup_tables/FM45_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
- imap_processing/ultra/lookup_tables/FM90_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
- imap_processing/ultra/lookup_tables/dps_grid45_compressed.cdf +0 -0
- imap_processing/ultra/lookup_tables/ultra45_back-pos-luts.csv +0 -4097
- imap_processing/ultra/lookup_tables/ultra45_tdc_norm.csv +0 -2050
- imap_processing/ultra/lookup_tables/ultra90_back-pos-luts.csv +0 -4097
- imap_processing/ultra/lookup_tables/ultra90_tdc_norm.csv +0 -2050
- imap_processing/ultra/lookup_tables/yadjust.csv +0 -257
- {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/LICENSE +0 -0
- {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/WHEEL +0 -0
- {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/entry_points.txt +0 -0
|
@@ -4,12 +4,227 @@ import dataclasses
|
|
|
4
4
|
import json
|
|
5
5
|
from dataclasses import InitVar, dataclass, field
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Optional
|
|
8
7
|
|
|
9
8
|
import numpy as np
|
|
9
|
+
import xarray as xr
|
|
10
|
+
from scipy.stats import circmean, circstd
|
|
10
11
|
|
|
11
12
|
from imap_processing.glows import FLAG_LENGTH
|
|
12
13
|
from imap_processing.glows.utils.constants import TimeTuple
|
|
14
|
+
from imap_processing.spice import geometry
|
|
15
|
+
from imap_processing.spice.geometry import SpiceBody, SpiceFrame
|
|
16
|
+
from imap_processing.spice.spin import (
|
|
17
|
+
get_instrument_spin_phase,
|
|
18
|
+
get_spin_angle,
|
|
19
|
+
get_spin_data,
|
|
20
|
+
)
|
|
21
|
+
from imap_processing.spice.time import met_to_datetime64, met_to_sclkticks, sct_to_et
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class PipelineSettings: # numpydoc ignore=PR02
|
|
26
|
+
"""
|
|
27
|
+
GLOWS L1B Pipeline Settings for controlling bad-angle and bad-time flag processing.
|
|
28
|
+
|
|
29
|
+
This class extracts pipeline settings from the JSON configuration file processed
|
|
30
|
+
through GlowsAncillaryCombiner.
|
|
31
|
+
|
|
32
|
+
Based on Section 3.12 of the GLOWS algorithm document, the pipeline settings
|
|
33
|
+
file contains parameters for the ground-processing pipeline including thresholds,
|
|
34
|
+
bad-time flags to be activated, bad-angle flags to be activated, and other
|
|
35
|
+
processing controls.
|
|
36
|
+
|
|
37
|
+
Parameters
|
|
38
|
+
----------
|
|
39
|
+
pipeline_dataset : xr.Dataset
|
|
40
|
+
Dataset from GlowsAncillaryCombiner.combined_dataset containing the
|
|
41
|
+
pipeline settings data extracted from the JSON file.
|
|
42
|
+
|
|
43
|
+
Attributes
|
|
44
|
+
----------
|
|
45
|
+
active_bad_angle_flags : list[bool]
|
|
46
|
+
Binary mask determining which of the 4 bad-angle flags are active:
|
|
47
|
+
[is_close_to_uv_source, is_inside_excluded_region,
|
|
48
|
+
is_excluded_by_instr_team, is_suspected_transient]
|
|
49
|
+
Default: All flags set to True (all active).
|
|
50
|
+
|
|
51
|
+
active_bad_time_flags : list[bool]
|
|
52
|
+
Binary mask determining which bad-time flags from onboard processing
|
|
53
|
+
should be used for quality control to identify "good time" L1B blocks.
|
|
54
|
+
|
|
55
|
+
sunrise_offset : float
|
|
56
|
+
Offset in hours to adjust sunrise time relative to onboard settings
|
|
57
|
+
for fine-tuning the day/night boundary determination.
|
|
58
|
+
|
|
59
|
+
sunset_offset : float
|
|
60
|
+
Offset in hours to adjust sunset time relative to onboard settings
|
|
61
|
+
for fine-tuning the day/night boundary determination.
|
|
62
|
+
|
|
63
|
+
processing_thresholds : dict
|
|
64
|
+
Various thresholds and parameters for ground processing pipeline
|
|
65
|
+
that control sensitivity and quality criteria for L1B data processing.
|
|
66
|
+
|
|
67
|
+
Notes
|
|
68
|
+
-----
|
|
69
|
+
Usage example:
|
|
70
|
+
|
|
71
|
+
.. code-block:: python
|
|
72
|
+
|
|
73
|
+
# Create combiner for pipeline settings file
|
|
74
|
+
pipeline_combiner = GlowsAncillaryCombiner(pipeline_settings_files, end_date)
|
|
75
|
+
|
|
76
|
+
# Create PipelineSettings object
|
|
77
|
+
pipeline_settings = PipelineSettings(pipeline_combiner.combined_dataset)
|
|
78
|
+
|
|
79
|
+
# Use the settings
|
|
80
|
+
if pipeline_settings.active_bad_angle_flags[0]: # is_close_to_uv_source
|
|
81
|
+
# Process UV source exclusions
|
|
82
|
+
pass
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
pipeline_dataset: InitVar[xr.Dataset]
|
|
86
|
+
|
|
87
|
+
# Extracted pipeline settings attributes
|
|
88
|
+
active_bad_angle_flags: list[bool] = field(init=False)
|
|
89
|
+
active_bad_time_flags: list[bool] = field(init=False)
|
|
90
|
+
sunrise_offset: float = field(init=False)
|
|
91
|
+
sunset_offset: float = field(init=False)
|
|
92
|
+
processing_thresholds: dict = field(init=False)
|
|
93
|
+
|
|
94
|
+
def __post_init__(self, pipeline_dataset: xr.Dataset) -> None:
|
|
95
|
+
"""
|
|
96
|
+
Extract pipeline settings from the dataset.
|
|
97
|
+
|
|
98
|
+
Parameters
|
|
99
|
+
----------
|
|
100
|
+
pipeline_dataset : xr.Dataset
|
|
101
|
+
Dataset containing pipeline settings data variables.
|
|
102
|
+
"""
|
|
103
|
+
# Extract active bad-angle flags (default to all True if not present)
|
|
104
|
+
if "active_bad_angle_flags" in pipeline_dataset.data_vars:
|
|
105
|
+
self.active_bad_angle_flags = list(
|
|
106
|
+
pipeline_dataset["active_bad_angle_flags"].values
|
|
107
|
+
)
|
|
108
|
+
else:
|
|
109
|
+
# Default: all 4 bad-angle flags are active
|
|
110
|
+
self.active_bad_angle_flags = [True, True, True, True]
|
|
111
|
+
|
|
112
|
+
# Extract active bad-time flags (default to all True if not present)
|
|
113
|
+
if "active_bad_time_flags" in pipeline_dataset.data_vars:
|
|
114
|
+
self.active_bad_time_flags = list(
|
|
115
|
+
pipeline_dataset["active_bad_time_flags"].values
|
|
116
|
+
)
|
|
117
|
+
else:
|
|
118
|
+
# Default: assume all bad-time flags are active
|
|
119
|
+
self.active_bad_time_flags = [True] * 16 # Typical number of bad-time flags
|
|
120
|
+
|
|
121
|
+
# Extract sunrise/sunset offsets (default to 0.0 if not present)
|
|
122
|
+
self.sunrise_offset = float(pipeline_dataset.get("sunrise_offset", 0.0))
|
|
123
|
+
self.sunset_offset = float(pipeline_dataset.get("sunset_offset", 0.0))
|
|
124
|
+
|
|
125
|
+
# Extract processing thresholds (collect all threshold-related variables)
|
|
126
|
+
self.processing_thresholds = {}
|
|
127
|
+
for var_name in pipeline_dataset.data_vars:
|
|
128
|
+
if "threshold" in var_name.lower() or "limit" in var_name.lower():
|
|
129
|
+
self.processing_thresholds[var_name] = pipeline_dataset[var_name].item()
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
@dataclass
|
|
133
|
+
class AncillaryExclusions:
|
|
134
|
+
"""
|
|
135
|
+
Organize input ancillary files for GLOWS L1B bad-angle flag processing.
|
|
136
|
+
|
|
137
|
+
This class holds the four types of ancillary datasets required for computing
|
|
138
|
+
bad-angle flags in GLOWS L1B histogram processing. All datasets should be
|
|
139
|
+
obtained from the GlowsAncillaryCombiner.combined_dataset property after
|
|
140
|
+
processing the respective ancillary files.
|
|
141
|
+
|
|
142
|
+
Attributes
|
|
143
|
+
----------
|
|
144
|
+
excluded_regions : xr.Dataset
|
|
145
|
+
Dataset containing excluded sky regions with ecliptic coordinates.
|
|
146
|
+
Expected structure from GlowsAncillaryCombiner:
|
|
147
|
+
- 'ecliptic_longitude_deg': DataArray with dimension ('epoch', 'region')
|
|
148
|
+
- 'ecliptic_latitude_deg': DataArray with dimension ('epoch', 'region')
|
|
149
|
+
|
|
150
|
+
uv_sources : xr.Dataset
|
|
151
|
+
Dataset containing UV sources (stars) with coordinates and masking radii.
|
|
152
|
+
Expected structure from GlowsAncillaryCombiner:
|
|
153
|
+
- 'object_name': DataArray with dimension ('epoch', 'source')
|
|
154
|
+
- 'ecliptic_longitude_deg': DataArray with dimension ('epoch', 'source')
|
|
155
|
+
- 'ecliptic_latitude_deg': DataArray with dimension ('epoch', 'source')
|
|
156
|
+
- 'angular_radius_for_masking': DataArray with dimension ('epoch', 'source')
|
|
157
|
+
|
|
158
|
+
suspected_transients : xr.Dataset
|
|
159
|
+
Dataset containing suspected transient signals with time-based masks.
|
|
160
|
+
Expected structure from GlowsAncillaryCombiner:
|
|
161
|
+
- 'l1b_unique_block_identifier', dimensions ('epoch', 'time_block')
|
|
162
|
+
- 'histogram_mask_array', dimensions ('epoch', 'time_block')
|
|
163
|
+
|
|
164
|
+
exclusions_by_instr_team : xr.Dataset
|
|
165
|
+
Dataset containing manual exclusions by instrument team with time-based masks.
|
|
166
|
+
Expected structure from GlowsAncillaryCombiner:
|
|
167
|
+
- 'l1b_unique_block_identifier', dimensions ('epoch', 'time_block')
|
|
168
|
+
- 'histogram_mask_array', dimensions ('epoch', 'time_block')
|
|
169
|
+
|
|
170
|
+
Notes
|
|
171
|
+
-----
|
|
172
|
+
Usage example:
|
|
173
|
+
|
|
174
|
+
.. code-block:: python
|
|
175
|
+
|
|
176
|
+
# Create combiners for each ancillary file type
|
|
177
|
+
excluded_regions_combiner = GlowsAncillaryCombiner(
|
|
178
|
+
excluded_regions_files, end_date)
|
|
179
|
+
uv_sources_combiner = GlowsAncillaryCombiner(uv_sources_files, end_date)
|
|
180
|
+
suspected_transients_combiner = GlowsAncillaryCombiner(
|
|
181
|
+
suspected_transients_files, end_date)
|
|
182
|
+
exclusions_combiner = GlowsAncillaryCombiner(exclusions_files, end_date)
|
|
183
|
+
|
|
184
|
+
# Create AncillaryExclusions object
|
|
185
|
+
exclusions = AncillaryExclusions(
|
|
186
|
+
excluded_regions=excluded_regions_combiner.combined_dataset,
|
|
187
|
+
uv_sources=uv_sources_combiner.combined_dataset,
|
|
188
|
+
suspected_transients=suspected_transients_combiner.combined_dataset,
|
|
189
|
+
exclusions_by_instr_team=exclusions_combiner.combined_dataset
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
# Filter for a specific day using limit_by_day method
|
|
193
|
+
day_exclusions = exclusions.limit_by_day(np.datetime64('2025-09-23'))
|
|
194
|
+
"""
|
|
195
|
+
|
|
196
|
+
excluded_regions: xr.Dataset
|
|
197
|
+
uv_sources: xr.Dataset
|
|
198
|
+
suspected_transients: xr.Dataset
|
|
199
|
+
exclusions_by_instr_team: xr.Dataset
|
|
200
|
+
|
|
201
|
+
def limit_by_day(self, day: np.datetime64) -> "AncillaryExclusions":
|
|
202
|
+
"""
|
|
203
|
+
Return a new AncillaryExclusions object with data filtered for a specified day.
|
|
204
|
+
|
|
205
|
+
This method does not mutate the original object and can be called multiple times
|
|
206
|
+
with different days.
|
|
207
|
+
|
|
208
|
+
Parameters
|
|
209
|
+
----------
|
|
210
|
+
day : np.datetime64
|
|
211
|
+
The day to filter data for.
|
|
212
|
+
|
|
213
|
+
Returns
|
|
214
|
+
-------
|
|
215
|
+
AncillaryExclusions
|
|
216
|
+
New instance with data filtered for the specified day.
|
|
217
|
+
"""
|
|
218
|
+
return AncillaryExclusions(
|
|
219
|
+
excluded_regions=self.excluded_regions.sel(epoch=day, method="nearest"),
|
|
220
|
+
uv_sources=self.uv_sources.sel(epoch=day, method="nearest"),
|
|
221
|
+
suspected_transients=self.suspected_transients.sel(
|
|
222
|
+
epoch=day, method="nearest"
|
|
223
|
+
),
|
|
224
|
+
exclusions_by_instr_team=self.exclusions_by_instr_team.sel(
|
|
225
|
+
epoch=day, method="nearest"
|
|
226
|
+
),
|
|
227
|
+
)
|
|
13
228
|
|
|
14
229
|
|
|
15
230
|
class AncillaryParameters:
|
|
@@ -246,11 +461,11 @@ class DirectEventL1B:
|
|
|
246
461
|
pulse_test_in_progress: InitVar[np.double]
|
|
247
462
|
memory_error_detected: InitVar[np.double]
|
|
248
463
|
# The following variables are created from the InitVar data
|
|
249
|
-
de_flags:
|
|
464
|
+
de_flags: np.ndarray | None = field(init=False, default=None)
|
|
250
465
|
# TODO: First two values of DE are sec/subsec
|
|
251
|
-
direct_event_glows_times:
|
|
466
|
+
direct_event_glows_times: np.ndarray | None = field(init=False, default=None)
|
|
252
467
|
# 3rd value is pulse length
|
|
253
|
-
direct_event_pulse_lengths:
|
|
468
|
+
direct_event_pulse_lengths: np.ndarray | None = field(init=False, default=None)
|
|
254
469
|
# TODO: where does the multi-event flag go?
|
|
255
470
|
|
|
256
471
|
def __post_init__(
|
|
@@ -495,17 +710,20 @@ class HistogramL1B:
|
|
|
495
710
|
# ) # Could be datetime TODO: Can't put a string in data
|
|
496
711
|
imap_spin_angle_bin_cntr: np.ndarray = field(init=False) # Same size as bins
|
|
497
712
|
histogram_flag_array: np.ndarray = field(init=False)
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
713
|
+
# These two are retrieved from spin data
|
|
714
|
+
spin_period_ground_average: np.double = field(init=False)
|
|
715
|
+
spin_period_ground_std_dev: np.double = field(init=False)
|
|
716
|
+
position_angle_offset_average: np.double = field(init=False) # from SPICE
|
|
501
717
|
position_angle_offset_std_dev: np.double = field(init=False) # from SPICE
|
|
502
|
-
spin_axis_orientation_std_dev: np.
|
|
503
|
-
spin_axis_orientation_average: np.
|
|
504
|
-
spacecraft_location_average: np.ndarray = field(init=False) #
|
|
505
|
-
spacecraft_location_std_dev: np.ndarray = field(init=False) #
|
|
506
|
-
spacecraft_velocity_average: np.ndarray = field(init=False) #
|
|
507
|
-
spacecraft_velocity_std_dev: np.ndarray = field(init=False) #
|
|
718
|
+
spin_axis_orientation_std_dev: np.ndarray = field(init=False) # from SPICE
|
|
719
|
+
spin_axis_orientation_average: np.ndarray = field(init=False) # from SPICE
|
|
720
|
+
spacecraft_location_average: np.ndarray = field(init=False) # from SPICE
|
|
721
|
+
spacecraft_location_std_dev: np.ndarray = field(init=False) # from SPICE
|
|
722
|
+
spacecraft_velocity_average: np.ndarray = field(init=False) # from SPICE
|
|
723
|
+
spacecraft_velocity_std_dev: np.ndarray = field(init=False) # from SPICE
|
|
508
724
|
flags: np.ndarray = field(init=False)
|
|
725
|
+
ancillary_exclusions: InitVar[AncillaryExclusions]
|
|
726
|
+
ancillary_parameters: InitVar[AncillaryParameters]
|
|
509
727
|
# TODO:
|
|
510
728
|
# - Determine a good way to output flags as "human readable"
|
|
511
729
|
# - Add spice pieces
|
|
@@ -519,6 +737,8 @@ class HistogramL1B:
|
|
|
519
737
|
hv_voltage_variance: np.double,
|
|
520
738
|
spin_period_variance: np.double,
|
|
521
739
|
pulse_length_variance: np.double,
|
|
740
|
+
ancillary_exclusions: AncillaryExclusions,
|
|
741
|
+
ancillary_parameters: AncillaryParameters,
|
|
522
742
|
) -> None:
|
|
523
743
|
"""
|
|
524
744
|
Will process data.
|
|
@@ -535,63 +755,130 @@ class HistogramL1B:
|
|
|
535
755
|
Encoded spin period variance.
|
|
536
756
|
pulse_length_variance : numpy.double
|
|
537
757
|
Encoded pulse length variance.
|
|
758
|
+
ancillary_exclusions : AncillaryExclusions
|
|
759
|
+
Ancillary exclusions data for bad-angle flag processing.
|
|
760
|
+
ancillary_parameters : AncillaryParameters
|
|
761
|
+
Ancillary parameters for decoding histogram data.
|
|
538
762
|
"""
|
|
539
763
|
# self.histogram_flag_array = np.zeros((2,))
|
|
764
|
+
day = met_to_datetime64(self.imap_start_time)
|
|
540
765
|
|
|
541
|
-
#
|
|
542
|
-
|
|
543
|
-
self.spin_period_ground_average = np.double(-999.9)
|
|
544
|
-
self.spin_period_ground_std_dev = np.double(-999.9)
|
|
545
|
-
self.position_angle_offset_average = np.double(-999.9)
|
|
546
|
-
self.position_angle_offset_std_dev = np.double(-999.9)
|
|
547
|
-
self.spin_axis_orientation_std_dev = np.double(-999.9)
|
|
548
|
-
self.spin_axis_orientation_average = np.double(-999.9)
|
|
549
|
-
self.spacecraft_location_average = np.array([-999.9, -999.9, -999.9])
|
|
550
|
-
self.spacecraft_location_std_dev = np.array([-999.9, -999.9, -999.9])
|
|
551
|
-
self.spacecraft_velocity_average = np.array([-999.9, -999.9, -999.9])
|
|
552
|
-
self.spacecraft_velocity_std_dev = np.array([-999.9, -999.9, -999.9])
|
|
766
|
+
# Add SPICE related variables
|
|
767
|
+
self.update_spice_parameters()
|
|
553
768
|
# Will require some additional inputs
|
|
554
769
|
self.imap_spin_angle_bin_cntr = np.zeros((3600,))
|
|
555
770
|
|
|
556
771
|
# TODO: This should probably be an AWS file
|
|
557
772
|
# TODO Pass in AncillaryParameters object instead of reading here.
|
|
558
|
-
with open(
|
|
559
|
-
Path(__file__).parents[1] / "ancillary" / "l1b_conversion_table_v001.json"
|
|
560
|
-
) as f:
|
|
561
|
-
self.ancillary_parameters = AncillaryParameters(json.loads(f.read()))
|
|
562
773
|
|
|
563
|
-
self.filter_temperature_average =
|
|
774
|
+
self.filter_temperature_average = ancillary_parameters.decode(
|
|
564
775
|
"filter_temperature", self.filter_temperature_average
|
|
565
776
|
)
|
|
566
|
-
self.filter_temperature_std_dev =
|
|
777
|
+
self.filter_temperature_std_dev = ancillary_parameters.decode_std_dev(
|
|
567
778
|
"filter_temperature", filter_temperature_variance
|
|
568
779
|
)
|
|
569
780
|
|
|
570
|
-
self.hv_voltage_average =
|
|
781
|
+
self.hv_voltage_average = ancillary_parameters.decode(
|
|
571
782
|
"hv_voltage", self.hv_voltage_average
|
|
572
783
|
)
|
|
573
|
-
self.hv_voltage_std_dev =
|
|
784
|
+
self.hv_voltage_std_dev = ancillary_parameters.decode_std_dev(
|
|
574
785
|
"hv_voltage", hv_voltage_variance
|
|
575
786
|
)
|
|
576
|
-
self.spin_period_average =
|
|
787
|
+
self.spin_period_average = ancillary_parameters.decode(
|
|
577
788
|
"spin_period", self.spin_period_average
|
|
578
789
|
)
|
|
579
|
-
self.spin_period_std_dev =
|
|
790
|
+
self.spin_period_std_dev = ancillary_parameters.decode_std_dev(
|
|
580
791
|
"spin_period", spin_period_variance
|
|
581
792
|
)
|
|
582
|
-
self.pulse_length_average =
|
|
793
|
+
self.pulse_length_average = ancillary_parameters.decode(
|
|
583
794
|
"pulse_length", self.pulse_length_average
|
|
584
795
|
)
|
|
585
|
-
self.pulse_length_std_dev =
|
|
796
|
+
self.pulse_length_std_dev = ancillary_parameters.decode_std_dev(
|
|
586
797
|
"pulse_length", pulse_length_variance
|
|
587
798
|
)
|
|
588
799
|
|
|
589
|
-
|
|
800
|
+
# get the data for the correct day
|
|
801
|
+
day_exclusions = ancillary_exclusions.limit_by_day(day)
|
|
802
|
+
|
|
803
|
+
# Initialize histogram flag array: [is_close_to_uv_source,
|
|
804
|
+
# is_inside_excluded_region, is_excluded_by_instr_team,
|
|
805
|
+
# is_suspected_transient] x 3600 bins
|
|
806
|
+
self.histogram_flag_array = self._compute_histogram_flag_array(day_exclusions)
|
|
590
807
|
# self.unique_block_identifier = np.datetime_as_string(
|
|
591
808
|
# np.datetime64(int(self.imap_start_time), "ns"), "s"
|
|
592
809
|
# )
|
|
593
810
|
self.flags = np.ones((FLAG_LENGTH,), dtype=np.uint8)
|
|
594
811
|
|
|
812
|
+
def update_spice_parameters(self) -> None:
|
|
813
|
+
"""Update SPICE parameters based on the current state."""
|
|
814
|
+
data_start_met = self.imap_start_time
|
|
815
|
+
# use of imap_start_time and glows_time_offset is correct.
|
|
816
|
+
data_end_met = np.double(self.imap_start_time) + np.double(
|
|
817
|
+
self.glows_time_offset
|
|
818
|
+
)
|
|
819
|
+
data_start_time_et = sct_to_et(met_to_sclkticks(data_start_met))
|
|
820
|
+
data_end_time_et = sct_to_et(met_to_sclkticks(data_end_met))
|
|
821
|
+
|
|
822
|
+
time_range = np.arange(data_start_time_et, data_end_time_et)
|
|
823
|
+
|
|
824
|
+
# Calculate spin period
|
|
825
|
+
# ---------------------
|
|
826
|
+
spin_data = get_spin_data()
|
|
827
|
+
# select spin data within the range from data start time to end time
|
|
828
|
+
spin_data = spin_data[
|
|
829
|
+
(spin_data["spin_start_met"] >= data_start_met)
|
|
830
|
+
& (spin_data["spin_start_met"] <= data_end_met)
|
|
831
|
+
]
|
|
832
|
+
|
|
833
|
+
self.spin_period_ground_average = np.average(spin_data["spin_period_sec"])
|
|
834
|
+
self.spin_period_ground_std_dev = np.std(spin_data["spin_period_sec"])
|
|
835
|
+
|
|
836
|
+
# Calculate position angle offset
|
|
837
|
+
# --------------------------------
|
|
838
|
+
angle_offset = 360 - get_spin_angle(
|
|
839
|
+
get_instrument_spin_phase(
|
|
840
|
+
self.imap_start_time, instrument=geometry.SpiceFrame.IMAP_GLOWS
|
|
841
|
+
),
|
|
842
|
+
degrees=True,
|
|
843
|
+
)
|
|
844
|
+
self.position_angle_offset_average = np.double(angle_offset)
|
|
845
|
+
self.position_angle_offset_std_dev = np.double(
|
|
846
|
+
0.0
|
|
847
|
+
) # Set to zero per algorithm document
|
|
848
|
+
|
|
849
|
+
# Calculate spin axis orientation
|
|
850
|
+
|
|
851
|
+
spin_axis_all_times = geometry.cartesian_to_latitudinal(
|
|
852
|
+
geometry.frame_transform(
|
|
853
|
+
time_range,
|
|
854
|
+
np.array([0, 0, 1]),
|
|
855
|
+
SpiceFrame.IMAP_DPS,
|
|
856
|
+
SpiceFrame.ECLIPJ2000,
|
|
857
|
+
)
|
|
858
|
+
)
|
|
859
|
+
# Calculate circular statistics for longitude (wraps around)
|
|
860
|
+
lon_mean = circmean(spin_axis_all_times[..., 1], low=-np.pi, high=np.pi)
|
|
861
|
+
lon_std = circstd(spin_axis_all_times[..., 1], low=-np.pi, high=np.pi)
|
|
862
|
+
lat_mean = circmean(spin_axis_all_times[..., 2], low=-np.pi, high=np.pi)
|
|
863
|
+
lat_std = circstd(spin_axis_all_times[..., 2], low=-np.pi, high=np.pi)
|
|
864
|
+
self.spin_axis_orientation_average = np.array([lon_mean, lat_mean])
|
|
865
|
+
self.spin_axis_orientation_std_dev = np.array([lon_std, lat_std])
|
|
866
|
+
|
|
867
|
+
# Calculate spacecraft location and velocity
|
|
868
|
+
# ------------------------------------------
|
|
869
|
+
# imap_state returns [x, y, z, vx, vy, vz].
|
|
870
|
+
# First three columns for position and last three for velocity.
|
|
871
|
+
imap_state = geometry.imap_state(
|
|
872
|
+
et=time_range, ref_frame=SpiceFrame.ECLIPJ2000, observer=SpiceBody.SUN
|
|
873
|
+
)
|
|
874
|
+
position = imap_state[:, :3]
|
|
875
|
+
velocity = imap_state[:, 3:]
|
|
876
|
+
# averange and standard deviation over time (rows)
|
|
877
|
+
self.spacecraft_location_average = np.average(position, axis=0)
|
|
878
|
+
self.spacecraft_location_std_dev = np.std(position, axis=0)
|
|
879
|
+
self.spacecraft_velocity_average = np.average(velocity, axis=0)
|
|
880
|
+
self.spacecraft_velocity_std_dev = np.std(velocity, axis=0)
|
|
881
|
+
|
|
595
882
|
def output_data(self) -> tuple:
|
|
596
883
|
"""
|
|
597
884
|
Output the L1B DataArrays as a tuple.
|
|
@@ -628,3 +915,28 @@ class HistogramL1B:
|
|
|
628
915
|
)
|
|
629
916
|
|
|
630
917
|
return flags
|
|
918
|
+
|
|
919
|
+
def _compute_histogram_flag_array(
|
|
920
|
+
self, exclusions: AncillaryExclusions
|
|
921
|
+
) -> np.ndarray:
|
|
922
|
+
"""
|
|
923
|
+
Compute the histogram flag array for bad-angle flags.
|
|
924
|
+
|
|
925
|
+
Creates a (4, 3600) array where each row represents a different flag type:
|
|
926
|
+
- Row 0: is_close_to_uv_source
|
|
927
|
+
- Row 1: is_inside_excluded_region
|
|
928
|
+
- Row 2: is_excluded_by_instr_team
|
|
929
|
+
- Row 3: is_suspected_transient
|
|
930
|
+
|
|
931
|
+
Parameters
|
|
932
|
+
----------
|
|
933
|
+
exclusions : AncillaryExclusions
|
|
934
|
+
Ancillary exclusions data filtered for the current day.
|
|
935
|
+
|
|
936
|
+
Returns
|
|
937
|
+
-------
|
|
938
|
+
np.ndarray
|
|
939
|
+
Array of shape (4, 3600) with bad-angle flags for each bin.
|
|
940
|
+
"""
|
|
941
|
+
# TODO: fill out once spice data is available
|
|
942
|
+
return np.zeros((4, 3600), dtype=np.uint8)
|
|
@@ -218,6 +218,11 @@ def create_l2_dataset(
|
|
|
218
218
|
"spacecraft_velocity_std_dev",
|
|
219
219
|
]
|
|
220
220
|
|
|
221
|
+
longitudinal_variables = [
|
|
222
|
+
"spin_axis_orientation_average",
|
|
223
|
+
"spin_axis_orientation_std_dev",
|
|
224
|
+
]
|
|
225
|
+
|
|
221
226
|
for key, value in dataclasses.asdict(histogram_l2).items():
|
|
222
227
|
if key in ecliptic_variables:
|
|
223
228
|
output[key] = xr.DataArray(
|
|
@@ -225,6 +230,12 @@ def create_l2_dataset(
|
|
|
225
230
|
dims=["epoch", "ecliptic"],
|
|
226
231
|
attrs=attrs.get_variable_attributes(key),
|
|
227
232
|
)
|
|
233
|
+
elif key in longitudinal_variables:
|
|
234
|
+
output[key] = xr.DataArray(
|
|
235
|
+
value,
|
|
236
|
+
dims=["epoch", "latitudinal"],
|
|
237
|
+
attrs=attrs.get_variable_attributes(key),
|
|
238
|
+
)
|
|
228
239
|
elif key == "bad_time_flag_occurrences":
|
|
229
240
|
output[key] = xr.DataArray(
|
|
230
241
|
value,
|
imap_processing/hi/hi_l1a.py
CHANGED
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
import logging
|
|
4
4
|
from collections import defaultdict
|
|
5
5
|
from pathlib import Path
|
|
6
|
-
from typing import Union
|
|
7
6
|
|
|
8
7
|
import numpy as np
|
|
9
8
|
import xarray as xr
|
|
@@ -54,10 +53,52 @@ LONG_COUNTERS = (
|
|
|
54
53
|
)
|
|
55
54
|
TOTAL_COUNTERS = ("a_total", "b_total", "c_total", "fee_de_recd", "fee_de_sent")
|
|
56
55
|
|
|
56
|
+
# MEMDMP Packet definition of uint32 fields
|
|
57
|
+
# This is a mapping of variable name to index when the dump_data in the
|
|
58
|
+
# HVSCI MEMDMP packet is interpreted as an array of uint32 values.
|
|
59
|
+
MEMDMP_DATA_INDS = {
|
|
60
|
+
"lastbin_shorten": 9,
|
|
61
|
+
"coinc_length": 60,
|
|
62
|
+
"de_timetag": 65,
|
|
63
|
+
"ab_min": 67,
|
|
64
|
+
"ab_max": 68,
|
|
65
|
+
"ac_min": 69,
|
|
66
|
+
"ac_max": 70,
|
|
67
|
+
"ba_min": 71,
|
|
68
|
+
"ba_max": 72,
|
|
69
|
+
"bc_min": 73,
|
|
70
|
+
"bc_max": 74,
|
|
71
|
+
"ca_min": 75,
|
|
72
|
+
"ca_max": 76,
|
|
73
|
+
"cb_min": 77,
|
|
74
|
+
"cb_max": 78,
|
|
75
|
+
"cc_min": 79,
|
|
76
|
+
"cc_max": 80,
|
|
77
|
+
"cfd_dac_a": 82,
|
|
78
|
+
"cfd_dac_b": 83,
|
|
79
|
+
"cfd_dac_c": 84,
|
|
80
|
+
"cfd_dac_d": 85,
|
|
81
|
+
"de_mask": 87,
|
|
82
|
+
"ab_rnk": 89,
|
|
83
|
+
"cc_rnk": 90,
|
|
84
|
+
"ac_rnk": 91,
|
|
85
|
+
"bc_rnk": 92,
|
|
86
|
+
"abc_rnk": 93,
|
|
87
|
+
"acc_rnk": 94,
|
|
88
|
+
"bcc_rnk": 95,
|
|
89
|
+
"abcc_rnk": 96,
|
|
90
|
+
"esa_table": 100,
|
|
91
|
+
"esa_steps": 101,
|
|
92
|
+
"sci_cull": 106,
|
|
93
|
+
"eng_cull": 107,
|
|
94
|
+
"spins_per_step": 108,
|
|
95
|
+
"spins_per_de": 109,
|
|
96
|
+
}
|
|
97
|
+
|
|
57
98
|
logger = logging.getLogger(__name__)
|
|
58
99
|
|
|
59
100
|
|
|
60
|
-
def hi_l1a(packet_file_path:
|
|
101
|
+
def hi_l1a(packet_file_path: str | Path) -> list[xr.Dataset]:
|
|
61
102
|
"""
|
|
62
103
|
Will process IMAP raw data to l1a.
|
|
63
104
|
|
|
@@ -95,6 +136,9 @@ def hi_l1a(packet_file_path: Union[str, Path]) -> list[xr.Dataset]:
|
|
|
95
136
|
elif apid_enum in [HIAPID.H45_DIAG_FEE, HIAPID.H90_DIAG_FEE]:
|
|
96
137
|
data = datasets_by_apid[apid]
|
|
97
138
|
gattr_key = "imap_hi_l1a_diagfee_attrs"
|
|
139
|
+
elif apid_enum in [HIAPID.H45_MEMDMP, HIAPID.H90_MEMDMP]:
|
|
140
|
+
data = finish_memdmp_dataset(datasets_by_apid[apid])
|
|
141
|
+
gattr_key = "imap_hi_l1a_memdmp_attrs"
|
|
98
142
|
|
|
99
143
|
# Update dataset global attributes
|
|
100
144
|
attr_mgr = ImapCdfAttributes()
|
|
@@ -111,7 +155,7 @@ def hi_l1a(packet_file_path: Union[str, Path]) -> list[xr.Dataset]:
|
|
|
111
155
|
|
|
112
156
|
|
|
113
157
|
def hi_packet_file_to_datasets(
|
|
114
|
-
packet_file_path:
|
|
158
|
+
packet_file_path: str | Path, use_derived_value: bool = False
|
|
115
159
|
) -> dict[int, xr.Dataset]:
|
|
116
160
|
"""
|
|
117
161
|
Extract hi datasets from packet file.
|
|
@@ -445,3 +489,80 @@ def unpack_hist_counter(counter_bytes: bytes) -> NDArray[np.uint16]:
|
|
|
445
489
|
odd_uint12 = ((split_unit8 & (2**4 - 1)) << 8) + lower_uint8
|
|
446
490
|
output_array = np.column_stack((even_uint12, odd_uint12)).reshape(-1, 90)
|
|
447
491
|
return output_array
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
def finish_memdmp_dataset(input_ds: xr.Dataset) -> xr.Dataset:
|
|
495
|
+
"""
|
|
496
|
+
Create dataset for a number of Hi Memory Dump packets.
|
|
497
|
+
|
|
498
|
+
Parameters
|
|
499
|
+
----------
|
|
500
|
+
input_ds : xarray.Dataset
|
|
501
|
+
Dataset of Hi-45 or Hi-90 MEMDMP packets generated using the
|
|
502
|
+
`imap_processing.utils.packet_file_to_datasets` function.
|
|
503
|
+
|
|
504
|
+
Returns
|
|
505
|
+
-------
|
|
506
|
+
dataset : xarray.Dataset
|
|
507
|
+
Dataset containing data from only MEMDMP packets generated upon entering
|
|
508
|
+
HVSCI. Specific memory items have been parsed out of the chunk of dumped
|
|
509
|
+
memory.
|
|
510
|
+
"""
|
|
511
|
+
attr_mgr = ImapCdfAttributes()
|
|
512
|
+
attr_mgr.add_instrument_global_attrs(instrument="hi")
|
|
513
|
+
attr_mgr.add_instrument_variable_attrs(instrument="hi", level=None)
|
|
514
|
+
|
|
515
|
+
# We only care about the MEMDMP packets that are generated upon
|
|
516
|
+
# entry to HVSCI mode. This is very hacky, but the suggested way
|
|
517
|
+
# to identify these MEMDMP packets is to check that pktlen == 521
|
|
518
|
+
# Here, we remove packets where pktlen != 521
|
|
519
|
+
dataset = input_ds.where(input_ds["pkt_len"] == 521, drop=True)
|
|
520
|
+
logger.debug(
|
|
521
|
+
f"After trimming MEMDMP packets with pkt_len != 521,"
|
|
522
|
+
f"{dataset['epoch'].data.size} packets remain with a set"
|
|
523
|
+
f"of MEMORY_IDs = {set(dataset['memory_id'].data)}"
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
# Rename shcoarse variable (do this first since it copies the input_ds)
|
|
527
|
+
dataset = dataset.rename_vars({"shcoarse": "ccsds_met"})
|
|
528
|
+
|
|
529
|
+
dataset.epoch.attrs.update(
|
|
530
|
+
attr_mgr.get_variable_attributes("epoch"),
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
# Update existing variable attributes
|
|
534
|
+
for var_name in [
|
|
535
|
+
"version",
|
|
536
|
+
"type",
|
|
537
|
+
"sec_hdr_flg",
|
|
538
|
+
"pkt_apid",
|
|
539
|
+
"seq_flgs",
|
|
540
|
+
"src_seq_ctr",
|
|
541
|
+
"pkt_len",
|
|
542
|
+
"ccsds_met",
|
|
543
|
+
"cksum",
|
|
544
|
+
]:
|
|
545
|
+
attrs = attr_mgr.get_variable_attributes(f"hi_hist_{var_name}")
|
|
546
|
+
dataset.data_vars[var_name].attrs.update(attrs)
|
|
547
|
+
|
|
548
|
+
new_vars = dict()
|
|
549
|
+
# Concatenate the dump_data from all packets into a single bytes string and
|
|
550
|
+
# interpret that bytes string as an array of uint32 values.
|
|
551
|
+
full_uint32_data = np.frombuffer(dataset["dump_data"].data.sum(), dtype=">u4")
|
|
552
|
+
# index_stride is the stride to traverse from packet to packet for a given
|
|
553
|
+
# item in the binary dump data.
|
|
554
|
+
index_stride = int(dataset["num_bytes"].data[0] // 4)
|
|
555
|
+
for new_var, offset in MEMDMP_DATA_INDS.items():
|
|
556
|
+
# The indices for each variable in the dump_data is the starting
|
|
557
|
+
# offset index with a stride of the number of bytes in the dump
|
|
558
|
+
# data divided by 4 (32-bit values).
|
|
559
|
+
new_vars[new_var] = xr.DataArray(
|
|
560
|
+
data=full_uint32_data[offset::index_stride],
|
|
561
|
+
dims=["epoch"],
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
# Remove binary memory dump data and add parsed variables
|
|
565
|
+
dataset = dataset.drop("dump_data")
|
|
566
|
+
dataset.update(new_vars)
|
|
567
|
+
|
|
568
|
+
return dataset
|