imap-processing 0.11.0__py3-none-any.whl → 0.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of imap-processing might be problematic. Click here for more details.
- imap_processing/__init__.py +11 -11
- imap_processing/_version.py +2 -2
- imap_processing/ccsds/ccsds_data.py +1 -2
- imap_processing/ccsds/excel_to_xtce.py +66 -18
- imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +24 -40
- imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +934 -42
- imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +1846 -128
- imap_processing/cdf/config/imap_glows_global_cdf_attrs.yaml +0 -5
- imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +10 -11
- imap_processing/cdf/config/imap_hi_variable_attrs.yaml +17 -19
- imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +27 -14
- imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +106 -116
- imap_processing/cdf/config/imap_hit_l1b_variable_attrs.yaml +120 -145
- imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +14 -0
- imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +25 -9
- imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +6 -4
- imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +3 -3
- imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +0 -12
- imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +1 -1
- imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +23 -20
- imap_processing/cdf/config/imap_mag_l1a_variable_attrs.yaml +361 -0
- imap_processing/cdf/config/imap_mag_l1b_variable_attrs.yaml +160 -0
- imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +160 -0
- imap_processing/cdf/config/imap_spacecraft_global_cdf_attrs.yaml +18 -0
- imap_processing/cdf/config/imap_spacecraft_variable_attrs.yaml +40 -0
- imap_processing/cdf/config/imap_swapi_global_cdf_attrs.yaml +1 -5
- imap_processing/cdf/config/imap_swapi_variable_attrs.yaml +22 -0
- imap_processing/cdf/config/imap_swe_global_cdf_attrs.yaml +12 -4
- imap_processing/cdf/config/imap_swe_l1a_variable_attrs.yaml +16 -2
- imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +64 -52
- imap_processing/cdf/config/imap_swe_l2_variable_attrs.yaml +71 -47
- imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +180 -19
- imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +5045 -41
- imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +80 -17
- imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +32 -57
- imap_processing/cdf/utils.py +52 -38
- imap_processing/cli.py +477 -233
- imap_processing/codice/codice_l1a.py +466 -131
- imap_processing/codice/codice_l1b.py +51 -152
- imap_processing/codice/constants.py +1360 -569
- imap_processing/codice/decompress.py +2 -6
- imap_processing/ena_maps/ena_maps.py +1103 -146
- imap_processing/ena_maps/utils/coordinates.py +19 -0
- imap_processing/ena_maps/utils/map_utils.py +14 -17
- imap_processing/ena_maps/utils/spatial_utils.py +55 -52
- imap_processing/glows/l1a/glows_l1a.py +28 -99
- imap_processing/glows/l1a/glows_l1a_data.py +2 -2
- imap_processing/glows/l1b/glows_l1b.py +1 -4
- imap_processing/glows/l1b/glows_l1b_data.py +1 -3
- imap_processing/glows/l2/glows_l2.py +2 -5
- imap_processing/hi/l1a/hi_l1a.py +54 -29
- imap_processing/hi/l1a/histogram.py +0 -1
- imap_processing/hi/l1a/science_direct_event.py +6 -8
- imap_processing/hi/l1b/hi_l1b.py +111 -82
- imap_processing/hi/l1c/hi_l1c.py +416 -32
- imap_processing/hi/utils.py +58 -12
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-sector-dt0-factors_20250219_v002.csv +81 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt0-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt1-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt2-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt3-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-summed-dt0-factors_20250219_v002.csv +68 -0
- imap_processing/hit/hit_utils.py +235 -5
- imap_processing/hit/l0/constants.py +20 -11
- imap_processing/hit/l0/decom_hit.py +21 -5
- imap_processing/hit/l1a/hit_l1a.py +71 -75
- imap_processing/hit/l1b/constants.py +321 -0
- imap_processing/hit/l1b/hit_l1b.py +377 -67
- imap_processing/hit/l2/constants.py +318 -0
- imap_processing/hit/l2/hit_l2.py +723 -0
- imap_processing/hit/packet_definitions/hit_packet_definitions.xml +1323 -71
- imap_processing/ialirt/l0/mag_l0_ialirt_data.py +155 -0
- imap_processing/ialirt/l0/parse_mag.py +374 -0
- imap_processing/ialirt/l0/process_swapi.py +69 -0
- imap_processing/ialirt/l0/process_swe.py +548 -0
- imap_processing/ialirt/packet_definitions/ialirt.xml +216 -208
- imap_processing/ialirt/packet_definitions/ialirt_codicehi.xml +1 -1
- imap_processing/ialirt/packet_definitions/ialirt_codicelo.xml +1 -1
- imap_processing/ialirt/packet_definitions/ialirt_mag.xml +115 -0
- imap_processing/ialirt/packet_definitions/ialirt_swapi.xml +14 -14
- imap_processing/ialirt/utils/grouping.py +114 -0
- imap_processing/ialirt/utils/time.py +29 -0
- imap_processing/idex/atomic_masses.csv +22 -0
- imap_processing/idex/decode.py +2 -2
- imap_processing/idex/idex_constants.py +33 -0
- imap_processing/idex/idex_l0.py +22 -8
- imap_processing/idex/idex_l1a.py +81 -51
- imap_processing/idex/idex_l1b.py +13 -39
- imap_processing/idex/idex_l2a.py +823 -0
- imap_processing/idex/idex_l2b.py +120 -0
- imap_processing/idex/idex_variable_unpacking_and_eu_conversion.csv +11 -11
- imap_processing/idex/packet_definitions/idex_housekeeping_packet_definition.xml +9130 -0
- imap_processing/lo/l0/lo_science.py +7 -2
- imap_processing/lo/l1a/lo_l1a.py +1 -5
- imap_processing/lo/l1b/lo_l1b.py +702 -29
- imap_processing/lo/l1b/tof_conversions.py +11 -0
- imap_processing/lo/l1c/lo_l1c.py +1 -4
- imap_processing/mag/constants.py +51 -0
- imap_processing/mag/imap_mag_sdc_configuration_v001.py +8 -0
- imap_processing/mag/l0/decom_mag.py +10 -3
- imap_processing/mag/l1a/mag_l1a.py +23 -19
- imap_processing/mag/l1a/mag_l1a_data.py +35 -10
- imap_processing/mag/l1b/mag_l1b.py +259 -50
- imap_processing/mag/l1c/interpolation_methods.py +388 -0
- imap_processing/mag/l1c/mag_l1c.py +621 -17
- imap_processing/mag/l2/mag_l2.py +140 -0
- imap_processing/mag/l2/mag_l2_data.py +288 -0
- imap_processing/quality_flags.py +1 -0
- imap_processing/spacecraft/packet_definitions/scid_x252.xml +538 -0
- imap_processing/spacecraft/quaternions.py +121 -0
- imap_processing/spice/geometry.py +19 -22
- imap_processing/spice/kernels.py +0 -276
- imap_processing/spice/pointing_frame.py +257 -0
- imap_processing/spice/repoint.py +149 -0
- imap_processing/spice/spin.py +38 -33
- imap_processing/spice/time.py +24 -0
- imap_processing/swapi/l1/swapi_l1.py +20 -12
- imap_processing/swapi/l2/swapi_l2.py +116 -5
- imap_processing/swapi/swapi_utils.py +32 -0
- imap_processing/swe/l1a/swe_l1a.py +44 -12
- imap_processing/swe/l1a/swe_science.py +13 -13
- imap_processing/swe/l1b/swe_l1b.py +898 -23
- imap_processing/swe/l2/swe_l2.py +75 -136
- imap_processing/swe/packet_definitions/swe_packet_definition.xml +1121 -1
- imap_processing/swe/utils/swe_constants.py +64 -0
- imap_processing/swe/utils/swe_utils.py +85 -28
- imap_processing/tests/ccsds/test_data/expected_output.xml +40 -1
- imap_processing/tests/ccsds/test_excel_to_xtce.py +24 -21
- imap_processing/tests/cdf/test_data/imap_instrument2_global_cdf_attrs.yaml +0 -2
- imap_processing/tests/cdf/test_utils.py +14 -16
- imap_processing/tests/codice/conftest.py +44 -33
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-counters-aggregated_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-counters-singles_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-ialirt_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-omni_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-pha_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-priorities_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-sectored_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-aggregated_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-singles_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-angular_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-priority_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-species_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-pha_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-angular_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-priority_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-species_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/test_codice_l1a.py +126 -53
- imap_processing/tests/codice/test_codice_l1b.py +6 -7
- imap_processing/tests/codice/test_decompress.py +4 -4
- imap_processing/tests/conftest.py +239 -27
- imap_processing/tests/ena_maps/conftest.py +51 -0
- imap_processing/tests/ena_maps/test_ena_maps.py +1068 -110
- imap_processing/tests/ena_maps/test_map_utils.py +66 -43
- imap_processing/tests/ena_maps/test_spatial_utils.py +17 -21
- imap_processing/tests/glows/conftest.py +10 -14
- imap_processing/tests/glows/test_glows_decom.py +4 -4
- imap_processing/tests/glows/test_glows_l1a_cdf.py +6 -27
- imap_processing/tests/glows/test_glows_l1a_data.py +6 -8
- imap_processing/tests/glows/test_glows_l1b.py +11 -11
- imap_processing/tests/glows/test_glows_l1b_data.py +5 -5
- imap_processing/tests/glows/test_glows_l2.py +2 -8
- imap_processing/tests/hi/conftest.py +1 -1
- imap_processing/tests/hi/data/l0/H45_diag_fee_20250208.bin +0 -0
- imap_processing/tests/hi/data/l0/H45_diag_fee_20250208_verify.csv +205 -0
- imap_processing/tests/hi/test_hi_l1b.py +22 -27
- imap_processing/tests/hi/test_hi_l1c.py +249 -18
- imap_processing/tests/hi/test_l1a.py +35 -7
- imap_processing/tests/hi/test_science_direct_event.py +3 -3
- imap_processing/tests/hi/test_utils.py +24 -2
- imap_processing/tests/hit/helpers/l1_validation.py +74 -73
- imap_processing/tests/hit/test_data/hskp_sample.ccsds +0 -0
- imap_processing/tests/hit/test_data/imap_hit_l0_raw_20100105_v001.pkts +0 -0
- imap_processing/tests/hit/test_decom_hit.py +5 -1
- imap_processing/tests/hit/test_hit_l1a.py +32 -36
- imap_processing/tests/hit/test_hit_l1b.py +300 -81
- imap_processing/tests/hit/test_hit_l2.py +716 -0
- imap_processing/tests/hit/test_hit_utils.py +184 -7
- imap_processing/tests/hit/validation_data/hit_l1b_standard_sample2_nsrl_v4_3decimals.csv +62 -62
- imap_processing/tests/hit/validation_data/hskp_sample_eu_3_6_2025.csv +89 -0
- imap_processing/tests/hit/validation_data/hskp_sample_raw.csv +89 -88
- imap_processing/tests/hit/validation_data/sci_sample_raw.csv +1 -1
- imap_processing/tests/ialirt/data/l0/461971383-404.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971384-405.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971385-406.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971386-407.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971387-408.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971388-409.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971389-410.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971390-411.bin +0 -0
- imap_processing/tests/ialirt/data/l0/461971391-412.bin +0 -0
- imap_processing/tests/ialirt/data/l0/sample_decoded_i-alirt_data.csv +383 -0
- imap_processing/tests/ialirt/unit/test_decom_ialirt.py +16 -81
- imap_processing/tests/ialirt/unit/test_grouping.py +81 -0
- imap_processing/tests/ialirt/unit/test_parse_mag.py +223 -0
- imap_processing/tests/ialirt/unit/test_process_codicehi.py +3 -3
- imap_processing/tests/ialirt/unit/test_process_codicelo.py +3 -10
- imap_processing/tests/ialirt/unit/test_process_ephemeris.py +4 -4
- imap_processing/tests/ialirt/unit/test_process_hit.py +3 -3
- imap_processing/tests/ialirt/unit/test_process_swapi.py +24 -16
- imap_processing/tests/ialirt/unit/test_process_swe.py +319 -6
- imap_processing/tests/ialirt/unit/test_time.py +16 -0
- imap_processing/tests/idex/conftest.py +127 -6
- imap_processing/tests/idex/test_data/imap_idex_l0_raw_20231218_v001.pkts +0 -0
- imap_processing/tests/idex/test_data/imap_idex_l0_raw_20241206_v001.pkts +0 -0
- imap_processing/tests/idex/test_data/imap_idex_l0_raw_20250108_v001.pkts +0 -0
- imap_processing/tests/idex/test_data/impact_14_tof_high_data.txt +4508 -4508
- imap_processing/tests/idex/test_idex_l0.py +33 -11
- imap_processing/tests/idex/test_idex_l1a.py +92 -21
- imap_processing/tests/idex/test_idex_l1b.py +106 -27
- imap_processing/tests/idex/test_idex_l2a.py +399 -0
- imap_processing/tests/idex/test_idex_l2b.py +93 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_de_20241022_v002.cdf +0 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_spin_20241022_v002.cdf +0 -0
- imap_processing/tests/lo/test_lo_l1a.py +3 -3
- imap_processing/tests/lo/test_lo_l1b.py +515 -6
- imap_processing/tests/lo/test_lo_l1c.py +1 -1
- imap_processing/tests/lo/test_lo_science.py +7 -7
- imap_processing/tests/lo/test_star_sensor.py +1 -1
- imap_processing/tests/mag/conftest.py +120 -2
- imap_processing/tests/mag/test_mag_decom.py +5 -4
- imap_processing/tests/mag/test_mag_l1a.py +51 -7
- imap_processing/tests/mag/test_mag_l1b.py +40 -59
- imap_processing/tests/mag/test_mag_l1c.py +354 -19
- imap_processing/tests/mag/test_mag_l2.py +130 -0
- imap_processing/tests/mag/test_mag_validation.py +247 -26
- imap_processing/tests/mag/validation/L1b/T009/MAGScience-normal-(2,2)-8s-20250204-16h39.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-magi-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-mago-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T010/MAGScience-normal-(2,2)-8s-20250206-12h05.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T011/MAGScience-normal-(2,2)-8s-20250204-16h08.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-magi-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-mago-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T012/MAGScience-normal-(2,2)-8s-20250204-16h08.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T012/data.bin +0 -0
- imap_processing/tests/mag/validation/L1b/T012/field_like_all_ranges.txt +19200 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-cal.cdf +0 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-in.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-magi-out.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-mago-out.csv +17 -0
- imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-magi-normal-in.csv +1217 -0
- imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-magi-normal-out.csv +1857 -0
- imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-mago-normal-in.csv +1217 -0
- imap_processing/tests/mag/validation/L1c/T013/mag-l1b-l1c-t013-mago-normal-out.csv +1857 -0
- imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-magi-normal-in.csv +1217 -0
- imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-magi-normal-out.csv +1793 -0
- imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-mago-normal-in.csv +1217 -0
- imap_processing/tests/mag/validation/L1c/T014/mag-l1b-l1c-t014-mago-normal-out.csv +1793 -0
- imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-magi-burst-in.csv +2561 -0
- imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-magi-normal-in.csv +961 -0
- imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-magi-normal-out.csv +1539 -0
- imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-mago-normal-in.csv +1921 -0
- imap_processing/tests/mag/validation/L1c/T015/mag-l1b-l1c-t015-mago-normal-out.csv +2499 -0
- imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-magi-normal-in.csv +865 -0
- imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-magi-normal-out.csv +1196 -0
- imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-mago-normal-in.csv +1729 -0
- imap_processing/tests/mag/validation/L1c/T016/mag-l1b-l1c-t016-mago-normal-out.csv +3053 -0
- imap_processing/tests/mag/validation/L2/imap_mag_l1b_norm-mago_20251017_v002.cdf +0 -0
- imap_processing/tests/mag/validation/calibration/imap_mag_l1b-calibration_20240229_v001.cdf +0 -0
- imap_processing/tests/mag/validation/calibration/imap_mag_l2-calibration-matrices_20251017_v004.cdf +0 -0
- imap_processing/tests/mag/validation/calibration/imap_mag_l2-offsets-norm_20251017_20251017_v001.cdf +0 -0
- imap_processing/tests/spacecraft/data/SSR_2024_190_20_08_12_0483851794_2_DA_apid0594_1packet.pkts +0 -0
- imap_processing/tests/spacecraft/test_quaternions.py +71 -0
- imap_processing/tests/spice/test_data/fake_repoint_data.csv +5 -0
- imap_processing/tests/spice/test_data/fake_spin_data.csv +11 -11
- imap_processing/tests/spice/test_geometry.py +9 -12
- imap_processing/tests/spice/test_kernels.py +1 -200
- imap_processing/tests/spice/test_pointing_frame.py +185 -0
- imap_processing/tests/spice/test_repoint.py +121 -0
- imap_processing/tests/spice/test_spin.py +50 -9
- imap_processing/tests/spice/test_time.py +14 -0
- imap_processing/tests/swapi/lut/imap_swapi_esa-unit-conversion_20250211_v000.csv +73 -0
- imap_processing/tests/swapi/lut/imap_swapi_lut-notes_20250211_v000.csv +1025 -0
- imap_processing/tests/swapi/test_swapi_l1.py +13 -11
- imap_processing/tests/swapi/test_swapi_l2.py +180 -8
- imap_processing/tests/swe/l0_data/2024051010_SWE_HK_packet.bin +0 -0
- imap_processing/tests/swe/l0_data/2024051011_SWE_CEM_RAW_packet.bin +0 -0
- imap_processing/tests/swe/l0_validation_data/idle_export_eu.SWE_APP_HK_20240510_092742.csv +49 -0
- imap_processing/tests/swe/l0_validation_data/idle_export_eu.SWE_CEM_RAW_20240510_092742.csv +593 -0
- imap_processing/tests/swe/lut/checker-board-indices.csv +24 -0
- imap_processing/tests/swe/lut/imap_swe_esa-lut_20250301_v000.csv +385 -0
- imap_processing/tests/swe/lut/imap_swe_l1b-in-flight-cal_20240510_20260716_v000.csv +3 -0
- imap_processing/tests/swe/test_swe_l1a.py +20 -2
- imap_processing/tests/swe/test_swe_l1a_cem_raw.py +52 -0
- imap_processing/tests/swe/test_swe_l1a_hk.py +68 -0
- imap_processing/tests/swe/test_swe_l1a_science.py +3 -3
- imap_processing/tests/swe/test_swe_l1b.py +162 -24
- imap_processing/tests/swe/test_swe_l2.py +153 -91
- imap_processing/tests/test_cli.py +171 -88
- imap_processing/tests/test_utils.py +140 -17
- imap_processing/tests/ultra/data/l0/FM45_UltraFM45_Functional_2024-01-22T0105_20240122T010548.CCSDS +0 -0
- imap_processing/tests/ultra/data/l0/ultra45_raw_sc_ultraimgrates_20220530_00.csv +164 -0
- imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultrarawimg_withFSWcalcs_FM45_40P_Phi28p5_BeamCal_LinearScan_phi2850_theta-000_20240207T102740.csv +3243 -3243
- imap_processing/tests/ultra/data/mock_data.py +369 -0
- imap_processing/tests/ultra/unit/conftest.py +115 -89
- imap_processing/tests/ultra/unit/test_badtimes.py +4 -4
- imap_processing/tests/ultra/unit/test_cullingmask.py +8 -6
- imap_processing/tests/ultra/unit/test_de.py +14 -13
- imap_processing/tests/ultra/unit/test_decom_apid_880.py +27 -76
- imap_processing/tests/ultra/unit/test_decom_apid_881.py +54 -11
- imap_processing/tests/ultra/unit/test_decom_apid_883.py +12 -10
- imap_processing/tests/ultra/unit/test_decom_apid_896.py +202 -55
- imap_processing/tests/ultra/unit/test_lookup_utils.py +23 -1
- imap_processing/tests/ultra/unit/test_spacecraft_pset.py +77 -0
- imap_processing/tests/ultra/unit/test_ultra_l1a.py +98 -305
- imap_processing/tests/ultra/unit/test_ultra_l1b.py +60 -14
- imap_processing/tests/ultra/unit/test_ultra_l1b_annotated.py +2 -2
- imap_processing/tests/ultra/unit/test_ultra_l1b_culling.py +26 -27
- imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +239 -70
- imap_processing/tests/ultra/unit/test_ultra_l1c.py +5 -5
- imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +114 -83
- imap_processing/tests/ultra/unit/test_ultra_l2.py +230 -0
- imap_processing/ultra/constants.py +1 -1
- imap_processing/ultra/l0/decom_tools.py +27 -39
- imap_processing/ultra/l0/decom_ultra.py +168 -204
- imap_processing/ultra/l0/ultra_utils.py +152 -136
- imap_processing/ultra/l1a/ultra_l1a.py +55 -271
- imap_processing/ultra/l1b/badtimes.py +1 -4
- imap_processing/ultra/l1b/cullingmask.py +2 -6
- imap_processing/ultra/l1b/de.py +116 -57
- imap_processing/ultra/l1b/extendedspin.py +20 -18
- imap_processing/ultra/l1b/lookup_utils.py +72 -9
- imap_processing/ultra/l1b/ultra_l1b.py +36 -16
- imap_processing/ultra/l1b/ultra_l1b_culling.py +66 -30
- imap_processing/ultra/l1b/ultra_l1b_extended.py +297 -94
- imap_processing/ultra/l1c/histogram.py +2 -6
- imap_processing/ultra/l1c/spacecraft_pset.py +84 -0
- imap_processing/ultra/l1c/ultra_l1c.py +8 -9
- imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +206 -108
- imap_processing/ultra/l2/ultra_l2.py +299 -0
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_LeftSlit.csv +526 -0
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_RightSlit.csv +526 -0
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_LeftSlit.csv +526 -0
- imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +526 -0
- imap_processing/ultra/lookup_tables/FM45_Startup1_ULTRA_IMGPARAMS_20240719.csv +2 -2
- imap_processing/ultra/lookup_tables/FM90_Startup1_ULTRA_IMGPARAMS_20240719.csv +2 -0
- imap_processing/ultra/packet_definitions/README.md +38 -0
- imap_processing/ultra/packet_definitions/ULTRA_SCI_COMBINED.xml +15302 -482
- imap_processing/ultra/utils/ultra_l1_utils.py +31 -12
- imap_processing/utils.py +69 -29
- {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/METADATA +10 -6
- imap_processing-0.13.0.dist-info/RECORD +578 -0
- imap_processing/cdf/config/imap_mag_l1_variable_attrs.yaml +0 -237
- imap_processing/hi/l1a/housekeeping.py +0 -27
- imap_processing/hi/l1b/hi_eng_unit_convert_table.csv +0 -154
- imap_processing/swe/l1b/swe_esa_lookup_table.csv +0 -1441
- imap_processing/swe/l1b/swe_l1b_science.py +0 -652
- imap_processing/tests/codice/data/imap_codice_l1a_hi-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hi-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hi-omni_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hi-sectored_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hskp_20100101_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-omni_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-sectored_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hskp_20100101_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/hi/data/l1/imap_hi_l1b_45sensor-de_20250415_v999.cdf +0 -0
- imap_processing/tests/hit/PREFLIGHT_raw_record_2023_256_15_59_04_apid1251.pkts +0 -0
- imap_processing/tests/hit/PREFLIGHT_raw_record_2023_256_15_59_04_apid1252.pkts +0 -0
- imap_processing/tests/hit/validation_data/hskp_sample_eu.csv +0 -89
- imap_processing/tests/hit/validation_data/sci_sample_raw1.csv +0 -29
- imap_processing/tests/idex/test_data/imap_idex_l0_raw_20231214_v001.pkts +0 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_de_20100101_v001.cdf +0 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_spin_20100101_v001.cdf +0 -0
- imap_processing/tests/swe/test_swe_l1b_science.py +0 -84
- imap_processing/tests/ultra/test_data/mock_data.py +0 -161
- imap_processing/ultra/l1c/pset.py +0 -40
- imap_processing/ultra/lookup_tables/dps_sensitivity45.cdf +0 -0
- imap_processing-0.11.0.dist-info/RECORD +0 -488
- /imap_processing/idex/packet_definitions/{idex_packet_definition.xml → idex_science_packet_definition.xml} +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/20240827095047_SWE_IALIRT_packet.bin +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/BinLog CCSDS_FRAG_TLM_20240826_152323Z_IALIRT_data_for_SDC.bin +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/IALiRT Raw Packet Telemetry.txt +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/apid01152.tlm +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/eu_SWP_IAL_20240826_152033.csv +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/hi_fsw_view_1_ccsds.bin +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/hit_ialirt_sample.ccsds +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/hit_ialirt_sample.csv +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/idle_export_eu.SWE_IALIRT_20240827_093852.csv +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/imap_codice_l1a_hi-ialirt_20240523200000_v0.0.0.cdf +0 -0
- /imap_processing/tests/ialirt/{test_data → data}/l0/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
- /imap_processing/{mag/l1b → tests/spacecraft}/__init__.py +0 -0
- /imap_processing/{swe/l1b/engineering_unit_convert_table.csv → tests/swe/lut/imap_swe_eu-conversion_20240510_v000.csv} +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/FM45_40P_Phi28p5_BeamCal_LinearScan_phi28.50_theta-0.00_20240207T102740.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/FM45_7P_Phi0.0_BeamCal_LinearScan_phi0.04_theta-0.01_20230821T121304.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/FM45_TV_Cycle6_Hot_Ops_Front212_20240124T063837.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_auxdata_Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_enaphxtofhangimg_FM45_TV_Cycle6_Hot_Ops_Front212_20240124T063837.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultraimgrates_Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultrarawimgevent_FM45_7P_Phi00_BeamCal_LinearScan_phi004_theta-001_20230821T121304.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E1.cdf +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E12.cdf +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E24.cdf +0 -0
- {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/LICENSE +0 -0
- {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/WHEEL +0 -0
- {imap_processing-0.11.0.dist-info → imap_processing-0.13.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,12 +1,22 @@
|
|
|
1
1
|
"""MAG L1C processing module."""
|
|
2
2
|
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
3
7
|
import xarray as xr
|
|
4
8
|
|
|
5
9
|
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
|
|
10
|
+
from imap_processing.mag import imap_mag_sdc_configuration_v001 as configuration
|
|
11
|
+
from imap_processing.mag.constants import ModeFlags, VecSec
|
|
12
|
+
from imap_processing.mag.l1c.interpolation_methods import InterpolationFunction
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
6
15
|
|
|
7
16
|
|
|
8
17
|
def mag_l1c(
|
|
9
|
-
first_input_dataset: xr.Dataset,
|
|
18
|
+
first_input_dataset: xr.Dataset,
|
|
19
|
+
second_input_dataset: xr.Dataset = None,
|
|
10
20
|
) -> xr.Dataset:
|
|
11
21
|
"""
|
|
12
22
|
Will process MAG L1C data from L1A data.
|
|
@@ -18,40 +28,634 @@ def mag_l1c(
|
|
|
18
28
|
first_input_dataset : xr.Dataset
|
|
19
29
|
The first input dataset to process. This can be either burst or norm data, for
|
|
20
30
|
mago or magi.
|
|
21
|
-
second_input_dataset : xr.Dataset
|
|
31
|
+
second_input_dataset : xr.Dataset, optional
|
|
22
32
|
The second input dataset to process. This should be burst if first_input_dataset
|
|
23
33
|
was norm, or norm if first_input_dataset was burst. It should match the
|
|
24
34
|
instrument - both inputs should be mago or magi.
|
|
25
|
-
version : str
|
|
26
|
-
The version of the output data.
|
|
27
35
|
|
|
28
36
|
Returns
|
|
29
37
|
-------
|
|
30
38
|
output_dataset : xr.Dataset
|
|
31
39
|
L1C data set.
|
|
32
40
|
"""
|
|
33
|
-
# TODO:
|
|
41
|
+
# TODO:
|
|
42
|
+
# find missing sequences and output them
|
|
43
|
+
# Fix gaps at the beginning of the day by going to previous day's file
|
|
44
|
+
# Fix gaps at the end of the day
|
|
45
|
+
# Allow for one input to be missing
|
|
46
|
+
# Missing burst file - just pass through norm file
|
|
47
|
+
# Missing norm file - go back to previous L1C file to find timestamps, then
|
|
48
|
+
# interpolate the entire day from burst
|
|
49
|
+
|
|
34
50
|
input_logical_source_1 = first_input_dataset.attrs["Logical_source"]
|
|
35
51
|
if isinstance(first_input_dataset.attrs["Logical_source"], list):
|
|
36
52
|
input_logical_source_1 = first_input_dataset.attrs["Logical_source"][0]
|
|
37
53
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
input_logical_source_2 = second_input_dataset.attrs["Logical_source"][0]
|
|
54
|
+
sensor = input_logical_source_1[-1:]
|
|
55
|
+
output_logical_source = f"imap_mag_l1c_norm-mag{sensor}"
|
|
41
56
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
57
|
+
normal_mode_dataset, burst_mode_dataset = select_datasets(
|
|
58
|
+
first_input_dataset, second_input_dataset
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
interp_function = InterpolationFunction[configuration.L1C_INTERPOLATION_METHOD]
|
|
62
|
+
if normal_mode_dataset and burst_mode_dataset:
|
|
63
|
+
full_interpolated_timeline = process_mag_l1c(
|
|
64
|
+
normal_mode_dataset, burst_mode_dataset, interp_function
|
|
65
|
+
)
|
|
66
|
+
elif normal_mode_dataset is not None:
|
|
67
|
+
full_interpolated_timeline = fill_normal_data(
|
|
68
|
+
normal_mode_dataset, normal_mode_dataset["epoch"].data
|
|
69
|
+
)
|
|
48
70
|
else:
|
|
49
|
-
|
|
71
|
+
# TODO: With only burst data, downsample by retrieving the timeline
|
|
72
|
+
raise NotImplementedError
|
|
73
|
+
|
|
74
|
+
completed_timeline = remove_missing_data(full_interpolated_timeline)
|
|
50
75
|
|
|
51
76
|
attribute_manager = ImapCdfAttributes()
|
|
52
77
|
attribute_manager.add_instrument_global_attrs("mag")
|
|
53
|
-
attribute_manager.
|
|
78
|
+
attribute_manager.add_instrument_variable_attrs("mag", "l1c")
|
|
79
|
+
compression = xr.DataArray(
|
|
80
|
+
np.arange(2),
|
|
81
|
+
name="compression",
|
|
82
|
+
dims=["compression"],
|
|
83
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
84
|
+
"compression_attrs", check_schema=False
|
|
85
|
+
),
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
direction = xr.DataArray(
|
|
89
|
+
np.arange(4),
|
|
90
|
+
name="direction",
|
|
91
|
+
dims=["direction"],
|
|
92
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
93
|
+
"direction_attrs", check_schema=False
|
|
94
|
+
),
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
epoch_time = xr.DataArray(
|
|
98
|
+
completed_timeline[:, 0],
|
|
99
|
+
name="epoch",
|
|
100
|
+
dims=["epoch"],
|
|
101
|
+
attrs=attribute_manager.get_variable_attributes("epoch"),
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
direction_label = xr.DataArray(
|
|
105
|
+
direction.values.astype(str),
|
|
106
|
+
name="direction_label",
|
|
107
|
+
dims=["direction_label"],
|
|
108
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
109
|
+
"direction_label", check_schema=False
|
|
110
|
+
),
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
compression_label = xr.DataArray(
|
|
114
|
+
compression.values.astype(str),
|
|
115
|
+
name="compression_label",
|
|
116
|
+
dims=["compression_label"],
|
|
117
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
118
|
+
"compression_label", check_schema=False
|
|
119
|
+
),
|
|
120
|
+
)
|
|
121
|
+
global_attributes = attribute_manager.get_global_attributes(output_logical_source)
|
|
122
|
+
# TODO merge missing sequences? replace?
|
|
123
|
+
global_attributes["missing_sequences"] = ""
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
global_attributes["is_mago"] = normal_mode_dataset.attrs["is_mago"]
|
|
127
|
+
global_attributes["is_active"] = normal_mode_dataset.attrs["is_active"]
|
|
128
|
+
global_attributes["missing_sequences"] = normal_mode_dataset.attrs[
|
|
129
|
+
"missing_sequences"
|
|
130
|
+
]
|
|
131
|
+
except KeyError as e:
|
|
132
|
+
logger.info(
|
|
133
|
+
f"Key error when assigning global attributes, attribute not found in "
|
|
134
|
+
f"L1B file with logical source "
|
|
135
|
+
f"{normal_mode_dataset.attrs['Logical_source']}: {e}"
|
|
136
|
+
)
|
|
54
137
|
|
|
55
|
-
|
|
138
|
+
global_attributes["interpolation_method"] = interp_function.name
|
|
139
|
+
|
|
140
|
+
output_dataset = xr.Dataset(
|
|
141
|
+
coords={
|
|
142
|
+
"epoch": epoch_time,
|
|
143
|
+
"direction": direction,
|
|
144
|
+
"direction_label": direction_label,
|
|
145
|
+
"compression": compression,
|
|
146
|
+
"compression_label": compression_label,
|
|
147
|
+
},
|
|
148
|
+
attrs=global_attributes,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
output_dataset["vectors"] = xr.DataArray(
|
|
152
|
+
completed_timeline[:, 1:5],
|
|
153
|
+
name="vectors",
|
|
154
|
+
dims=["epoch", "direction"],
|
|
155
|
+
attrs=attribute_manager.get_variable_attributes("vector_attrs"),
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
output_dataset["vector_magnitude"] = xr.apply_ufunc(
|
|
159
|
+
lambda x: np.linalg.norm(x[:4]),
|
|
160
|
+
output_dataset["vectors"],
|
|
161
|
+
input_core_dims=[["direction"]],
|
|
162
|
+
output_core_dims=[[]],
|
|
163
|
+
vectorize=True,
|
|
164
|
+
)
|
|
165
|
+
# output_dataset['vector_magnitude'].attrs =
|
|
166
|
+
# attribute_manager.get_variable_attributes("vector_magnitude_attrs")
|
|
167
|
+
|
|
168
|
+
output_dataset["compression_flags"] = xr.DataArray(
|
|
169
|
+
completed_timeline[:, 6:8],
|
|
170
|
+
name="compression_flags",
|
|
171
|
+
dims=["epoch", "compression"],
|
|
172
|
+
attrs=attribute_manager.get_variable_attributes("compression_flags_attrs"),
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
output_dataset["generated_flag"] = xr.DataArray(
|
|
176
|
+
completed_timeline[:, 5],
|
|
177
|
+
name="generated_flag",
|
|
178
|
+
dims=["epoch"],
|
|
179
|
+
# attrs=attribute_manager.get_variable_attributes("generated_flag_attrs"),
|
|
180
|
+
)
|
|
56
181
|
|
|
57
182
|
return output_dataset
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def select_datasets(
|
|
186
|
+
first_input_dataset: xr.Dataset, second_input_dataset: Optional[xr.Dataset] = None
|
|
187
|
+
) -> tuple[xr.Dataset, xr.Dataset]:
|
|
188
|
+
"""
|
|
189
|
+
Given one or two datasets, assign one to norm and one to burst.
|
|
190
|
+
|
|
191
|
+
If only one dataset is provided, the other will be marked as None. If two are
|
|
192
|
+
provided, they will be validated to ensure one is norm and one is burst.
|
|
193
|
+
|
|
194
|
+
Parameters
|
|
195
|
+
----------
|
|
196
|
+
first_input_dataset : xr.Dataset
|
|
197
|
+
The first input dataset.
|
|
198
|
+
second_input_dataset : xr.Dataset, optional
|
|
199
|
+
The second input dataset.
|
|
200
|
+
|
|
201
|
+
Returns
|
|
202
|
+
-------
|
|
203
|
+
tuple
|
|
204
|
+
Tuple containing norm_mode_dataset, burst_mode_dataset.
|
|
205
|
+
"""
|
|
206
|
+
normal_mode_dataset = None
|
|
207
|
+
burst_mode_dataset = None
|
|
208
|
+
|
|
209
|
+
input_logical_source_1 = first_input_dataset.attrs["Logical_source"]
|
|
210
|
+
|
|
211
|
+
if isinstance(first_input_dataset.attrs["Logical_source"], list):
|
|
212
|
+
input_logical_source_1 = first_input_dataset.attrs["Logical_source"][0]
|
|
213
|
+
|
|
214
|
+
if "norm" in input_logical_source_1:
|
|
215
|
+
normal_mode_dataset = first_input_dataset
|
|
216
|
+
|
|
217
|
+
if "burst" in input_logical_source_1:
|
|
218
|
+
burst_mode_dataset = first_input_dataset
|
|
219
|
+
|
|
220
|
+
if second_input_dataset is None:
|
|
221
|
+
logger.info(
|
|
222
|
+
f"Only one input dataset provided with logical source "
|
|
223
|
+
f"{input_logical_source_1}"
|
|
224
|
+
)
|
|
225
|
+
else:
|
|
226
|
+
input_logical_source_2 = second_input_dataset.attrs["Logical_source"]
|
|
227
|
+
if isinstance(second_input_dataset.attrs["Logical_source"], list):
|
|
228
|
+
input_logical_source_2 = second_input_dataset.attrs["Logical_source"][0]
|
|
229
|
+
|
|
230
|
+
if "burst" in input_logical_source_2:
|
|
231
|
+
burst_mode_dataset = second_input_dataset
|
|
232
|
+
|
|
233
|
+
elif "norm" in input_logical_source_2:
|
|
234
|
+
normal_mode_dataset = second_input_dataset
|
|
235
|
+
|
|
236
|
+
# If there are two inputs, one should be norm and one should be burst
|
|
237
|
+
if normal_mode_dataset is None or burst_mode_dataset is None:
|
|
238
|
+
raise RuntimeError(
|
|
239
|
+
"L1C requires one normal mode and one burst mode input file."
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
return normal_mode_dataset, burst_mode_dataset
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def process_mag_l1c(
|
|
246
|
+
normal_mode_dataset: xr.Dataset,
|
|
247
|
+
burst_mode_dataset: xr.Dataset,
|
|
248
|
+
interpolation_function: InterpolationFunction,
|
|
249
|
+
) -> np.ndarray:
|
|
250
|
+
"""
|
|
251
|
+
Create MAG L1C data from L1B datasets.
|
|
252
|
+
|
|
253
|
+
This function starts from the normal mode dataset and completes the following steps:
|
|
254
|
+
1. find all the gaps in the dataset
|
|
255
|
+
2. generate a new timeline with the gaps filled
|
|
256
|
+
3. fill the timeline with normal mode data (so, all the non-gap timestamps)
|
|
257
|
+
4. interpolate the gaps using the burst mode data and the method specified in
|
|
258
|
+
interpolation_function.
|
|
259
|
+
|
|
260
|
+
It returns an (n, 8) shaped array:
|
|
261
|
+
0 - epoch (timestamp)
|
|
262
|
+
1-4 - vector x, y, z, and range
|
|
263
|
+
5 - generated flag (0 for normal data, 1 for interpolated data, -1 for missing data)
|
|
264
|
+
6-7 - compression flags (is_compressed, compression_width)
|
|
265
|
+
|
|
266
|
+
Parameters
|
|
267
|
+
----------
|
|
268
|
+
normal_mode_dataset : xarray.Dataset
|
|
269
|
+
The normal mode dataset, which acts as a base for the output.
|
|
270
|
+
burst_mode_dataset : xarray.Dataset
|
|
271
|
+
The burst mode dataset, which is used to fill in the gaps in the normal mode.
|
|
272
|
+
interpolation_function : InterpolationFunction
|
|
273
|
+
The interpolation function to use to fill in the gaps.
|
|
274
|
+
|
|
275
|
+
Returns
|
|
276
|
+
-------
|
|
277
|
+
np.ndarray
|
|
278
|
+
An (n, 8) shaped array containing the completed timeline.
|
|
279
|
+
"""
|
|
280
|
+
norm_epoch = normal_mode_dataset["epoch"].data
|
|
281
|
+
if "vectors_per_second" in normal_mode_dataset.attrs:
|
|
282
|
+
normal_vecsec_dict = vectors_per_second_from_string(
|
|
283
|
+
normal_mode_dataset.attrs["vectors_per_second"]
|
|
284
|
+
)
|
|
285
|
+
else:
|
|
286
|
+
normal_vecsec_dict = None
|
|
287
|
+
|
|
288
|
+
output_dataset = normal_mode_dataset.copy(deep=True)
|
|
289
|
+
output_dataset["sample_interpolated"] = xr.DataArray(
|
|
290
|
+
np.zeros(len(normal_mode_dataset))
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
gaps = find_all_gaps(norm_epoch, normal_vecsec_dict)
|
|
294
|
+
|
|
295
|
+
new_timeline = generate_timeline(norm_epoch, gaps)
|
|
296
|
+
norm_filled = fill_normal_data(normal_mode_dataset, new_timeline)
|
|
297
|
+
interpolated = interpolate_gaps(
|
|
298
|
+
burst_mode_dataset, gaps, norm_filled, interpolation_function
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
return interpolated
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def fill_normal_data(
|
|
305
|
+
normal_dataset: xr.Dataset, new_timeline: np.ndarray
|
|
306
|
+
) -> np.ndarray:
|
|
307
|
+
"""
|
|
308
|
+
Fill the new timeline with the normal mode data.
|
|
309
|
+
|
|
310
|
+
If the timestamp exists in the normal mode data, it will be filled in the output.
|
|
311
|
+
|
|
312
|
+
Parameters
|
|
313
|
+
----------
|
|
314
|
+
normal_dataset : xr.Dataset
|
|
315
|
+
The normal mode dataset.
|
|
316
|
+
new_timeline : np.ndarray
|
|
317
|
+
A 1D array of timestamps to fill.
|
|
318
|
+
|
|
319
|
+
Returns
|
|
320
|
+
-------
|
|
321
|
+
np.ndarray
|
|
322
|
+
An (n, 8) shaped array containing the timeline filled with normal mode data.
|
|
323
|
+
Gaps are marked as -1 in the generated flag column at index 5.
|
|
324
|
+
Indices: 0 - epoch, 1-4 - vector x, y, z, and range, 5 - generated flag,
|
|
325
|
+
6-7 - compression flags.
|
|
326
|
+
"""
|
|
327
|
+
# TODO: fill with FILLVAL?
|
|
328
|
+
filled_timeline: np.ndarray = np.zeros((len(new_timeline), 8))
|
|
329
|
+
filled_timeline[:, 0] = new_timeline
|
|
330
|
+
# Flags, will also indicate any missed timestamps
|
|
331
|
+
filled_timeline[:, 5] = ModeFlags.MISSING.value
|
|
332
|
+
|
|
333
|
+
for index, timestamp in enumerate(normal_dataset["epoch"].data):
|
|
334
|
+
timeline_index = np.searchsorted(new_timeline, timestamp)
|
|
335
|
+
filled_timeline[timeline_index, 1:5] = normal_dataset["vectors"].data[index]
|
|
336
|
+
filled_timeline[timeline_index, 5] = ModeFlags.NORM.value
|
|
337
|
+
filled_timeline[timeline_index, 6:8] = normal_dataset["compression_flags"].data[
|
|
338
|
+
index
|
|
339
|
+
]
|
|
340
|
+
|
|
341
|
+
return filled_timeline
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def interpolate_gaps(
|
|
345
|
+
burst_dataset: xr.Dataset,
|
|
346
|
+
gaps: np.ndarray,
|
|
347
|
+
filled_norm_timeline: np.ndarray,
|
|
348
|
+
interpolation_function: InterpolationFunction,
|
|
349
|
+
) -> np.ndarray:
|
|
350
|
+
"""
|
|
351
|
+
Interpolate the gaps in the filled timeline using the burst mode data.
|
|
352
|
+
|
|
353
|
+
Returns an array that matches the format of filled_norm_timeline, with gaps filled
|
|
354
|
+
using interpolated burst data.
|
|
355
|
+
|
|
356
|
+
Parameters
|
|
357
|
+
----------
|
|
358
|
+
burst_dataset : xarray.Dataset
|
|
359
|
+
The L1B burst mode dataset.
|
|
360
|
+
gaps : numpy.ndarray
|
|
361
|
+
An array of gaps to fill, with shape (n, 2) where n is the number of gaps.
|
|
362
|
+
filled_norm_timeline : numpy.ndarray
|
|
363
|
+
Timeline filled with normal mode data in the shape (n, 8).
|
|
364
|
+
interpolation_function : InterpolationFunction
|
|
365
|
+
The interpolation function to use to fill in the gaps.
|
|
366
|
+
|
|
367
|
+
Returns
|
|
368
|
+
-------
|
|
369
|
+
numpy.ndarray
|
|
370
|
+
An array of shape (n, 8) containing the fully filled timeline.
|
|
371
|
+
Indices: 0 - epoch, 1-4 - vector x, y, z, and range, 5 - generated flag,
|
|
372
|
+
6-7 - compression flags.
|
|
373
|
+
"""
|
|
374
|
+
burst_epochs = burst_dataset["epoch"].data
|
|
375
|
+
# Exclude range values
|
|
376
|
+
burst_vectors = burst_dataset["vectors"].data
|
|
377
|
+
# Default to two vectors per second
|
|
378
|
+
burst_vecsec_dict = {0: VecSec.TWO_VECS_PER_S.value}
|
|
379
|
+
if "vectors_per_second" in burst_dataset.attrs:
|
|
380
|
+
burst_vecsec_dict = vectors_per_second_from_string(
|
|
381
|
+
burst_dataset.attrs["vectors_per_second"]
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
for gap in gaps:
|
|
385
|
+
# TODO: we might need a few inputs before or after start/end
|
|
386
|
+
burst_gap_start = (np.abs(burst_epochs - gap[0])).argmin()
|
|
387
|
+
burst_gap_end = (np.abs(burst_epochs - gap[1])).argmin()
|
|
388
|
+
|
|
389
|
+
# for the CIC filter, we need 2x normal mode cadence seconds
|
|
390
|
+
|
|
391
|
+
norm_rate = VecSec(int(gap[2]))
|
|
392
|
+
|
|
393
|
+
# Input rate
|
|
394
|
+
# Find where burst_start is after the start of the timeline
|
|
395
|
+
burst_vecsec_index = (
|
|
396
|
+
np.searchsorted(
|
|
397
|
+
list(burst_vecsec_dict.keys()),
|
|
398
|
+
burst_epochs[burst_gap_start],
|
|
399
|
+
side="right",
|
|
400
|
+
)
|
|
401
|
+
- 1
|
|
402
|
+
)
|
|
403
|
+
burst_rate = VecSec(list(burst_vecsec_dict.values())[burst_vecsec_index])
|
|
404
|
+
|
|
405
|
+
required_seconds = (1 / norm_rate.value) * 2
|
|
406
|
+
burst_buffer = int(required_seconds * burst_rate.value)
|
|
407
|
+
|
|
408
|
+
burst_start = max(0, burst_gap_start - burst_buffer)
|
|
409
|
+
burst_end = min(len(burst_epochs) - 1, burst_gap_end + burst_buffer)
|
|
410
|
+
|
|
411
|
+
gap_timeline = filled_norm_timeline[
|
|
412
|
+
(filled_norm_timeline > gap[0]) & (filled_norm_timeline < gap[1])
|
|
413
|
+
]
|
|
414
|
+
logger.info(
|
|
415
|
+
f"difference between gap start and burst start: "
|
|
416
|
+
f"{gap_timeline[0] - burst_epochs[burst_start]}"
|
|
417
|
+
)
|
|
418
|
+
# Limit timestamps to only include the areas with burst data
|
|
419
|
+
gap_timeline = gap_timeline[
|
|
420
|
+
(
|
|
421
|
+
(gap_timeline >= burst_epochs[burst_start])
|
|
422
|
+
& (gap_timeline <= burst_epochs[burst_gap_end])
|
|
423
|
+
)
|
|
424
|
+
]
|
|
425
|
+
# do not include range
|
|
426
|
+
gap_fill = interpolation_function(
|
|
427
|
+
burst_vectors[burst_start:burst_end, :3],
|
|
428
|
+
burst_epochs[burst_start:burst_end],
|
|
429
|
+
gap_timeline,
|
|
430
|
+
input_rate=burst_rate,
|
|
431
|
+
output_rate=norm_rate,
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
# gaps should not have data in timeline, still check it
|
|
435
|
+
for index, timestamp in enumerate(gap_timeline):
|
|
436
|
+
timeline_index = np.searchsorted(filled_norm_timeline[:, 0], timestamp)
|
|
437
|
+
if sum(
|
|
438
|
+
filled_norm_timeline[timeline_index, 1:4]
|
|
439
|
+
) == 0 and burst_gap_start + index < len(burst_vectors):
|
|
440
|
+
filled_norm_timeline[timeline_index, 1:4] = gap_fill[index]
|
|
441
|
+
|
|
442
|
+
filled_norm_timeline[timeline_index, 4] = burst_vectors[
|
|
443
|
+
burst_gap_start + index, 3
|
|
444
|
+
]
|
|
445
|
+
filled_norm_timeline[timeline_index, 5] = ModeFlags.BURST.value
|
|
446
|
+
filled_norm_timeline[timeline_index, 6:8] = burst_dataset[
|
|
447
|
+
"compression_flags"
|
|
448
|
+
].data[burst_gap_start + index]
|
|
449
|
+
|
|
450
|
+
return filled_norm_timeline
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
def generate_timeline(epoch_data: np.ndarray, gaps: np.ndarray) -> np.ndarray:
|
|
454
|
+
"""
|
|
455
|
+
Generate a new timeline from existing, gap-filled timeline and gaps.
|
|
456
|
+
|
|
457
|
+
The gaps are generated at a .5 second cadence, regardless of the cadence of the
|
|
458
|
+
existing data.
|
|
459
|
+
|
|
460
|
+
Parameters
|
|
461
|
+
----------
|
|
462
|
+
epoch_data : numpy.ndarray
|
|
463
|
+
The existing timeline data, in the shape (n,).
|
|
464
|
+
gaps : numpy.ndarray
|
|
465
|
+
An array of gaps to fill, with shape (n, 2) where n is the number of gaps.
|
|
466
|
+
The gap is specified as (start, end) where start and end both exist in the
|
|
467
|
+
timeline already.
|
|
468
|
+
|
|
469
|
+
Returns
|
|
470
|
+
-------
|
|
471
|
+
numpy.ndarray
|
|
472
|
+
The new timeline, filled with the existing data and the generated gaps.
|
|
473
|
+
"""
|
|
474
|
+
full_timeline: np.ndarray = np.zeros(0)
|
|
475
|
+
|
|
476
|
+
# When we have our gaps, generate the full timeline
|
|
477
|
+
last_gap = 0
|
|
478
|
+
for gap in gaps:
|
|
479
|
+
gap_start_index = np.where(epoch_data == gap[0])[0]
|
|
480
|
+
gap_end_index = np.where(epoch_data == gap[1])[0]
|
|
481
|
+
if gap_start_index.size != 1 or gap_end_index.size != 1:
|
|
482
|
+
raise ValueError("Gap start or end not found in input timeline")
|
|
483
|
+
|
|
484
|
+
full_timeline = np.concatenate(
|
|
485
|
+
(
|
|
486
|
+
full_timeline,
|
|
487
|
+
epoch_data[last_gap : gap_start_index[0]],
|
|
488
|
+
generate_missing_timestamps(gap),
|
|
489
|
+
)
|
|
490
|
+
)
|
|
491
|
+
last_gap = gap_end_index[0]
|
|
492
|
+
|
|
493
|
+
full_timeline = np.concatenate((full_timeline, epoch_data[last_gap:]))
|
|
494
|
+
|
|
495
|
+
return full_timeline
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
def find_all_gaps(
|
|
499
|
+
epoch_data: np.ndarray, vecsec_dict: Optional[dict] = None
|
|
500
|
+
) -> np.ndarray:
|
|
501
|
+
"""
|
|
502
|
+
Find all the gaps in the epoch data.
|
|
503
|
+
|
|
504
|
+
If vectors_per_second_attr is provided, it will be used to find the gaps. Otherwise,
|
|
505
|
+
it will assume a nominal 1/2 second gap. A gap is defined as missing data from the
|
|
506
|
+
expected sequence as defined by vectors_per_second_attr.
|
|
507
|
+
|
|
508
|
+
Parameters
|
|
509
|
+
----------
|
|
510
|
+
epoch_data : numpy.ndarray
|
|
511
|
+
The epoch data to find gaps in.
|
|
512
|
+
vecsec_dict : dict, optional
|
|
513
|
+
A dictionary of the form {start: vecsec, start: vecsec} where start is the time
|
|
514
|
+
in nanoseconds and vecsec is the number of vectors per second. This will be
|
|
515
|
+
used to find the gaps. If not provided, a 1/2 second gap is assumed.
|
|
516
|
+
|
|
517
|
+
Returns
|
|
518
|
+
-------
|
|
519
|
+
numpy.ndarray
|
|
520
|
+
An array of gaps with shape (n, 3) where n is the number of gaps. The gaps are
|
|
521
|
+
specified as (start, end, vector_rate) where start and end both exist in the
|
|
522
|
+
timeline.
|
|
523
|
+
"""
|
|
524
|
+
gaps: np.ndarray = np.zeros((0, 3))
|
|
525
|
+
if vecsec_dict is None:
|
|
526
|
+
# TODO: when we go back to the previous file, also retrieve expected
|
|
527
|
+
# vectors per second
|
|
528
|
+
# If no vecsec is provided, assume 2 vectors per second
|
|
529
|
+
vecsec_dict = {0: VecSec.TWO_VECS_PER_S.value}
|
|
530
|
+
|
|
531
|
+
end_index = epoch_data.shape[0]
|
|
532
|
+
for start_time in reversed(sorted(vecsec_dict.keys())):
|
|
533
|
+
start_index = np.where(start_time == epoch_data)[0][0]
|
|
534
|
+
gaps = np.concatenate(
|
|
535
|
+
(
|
|
536
|
+
find_gaps(
|
|
537
|
+
epoch_data[start_index : end_index + 1], vecsec_dict[start_time]
|
|
538
|
+
),
|
|
539
|
+
gaps,
|
|
540
|
+
)
|
|
541
|
+
)
|
|
542
|
+
end_index = start_index
|
|
543
|
+
|
|
544
|
+
return gaps
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
def find_gaps(timeline_data: np.ndarray, vectors_per_second: int) -> np.ndarray:
|
|
548
|
+
"""
|
|
549
|
+
Find gaps in timeline_data that are larger than 1/vectors_per_second.
|
|
550
|
+
|
|
551
|
+
Returns timestamps (start_gap, end_gap, vectors_per_second) where startgap and
|
|
552
|
+
endgap both exist in timeline data.
|
|
553
|
+
|
|
554
|
+
Parameters
|
|
555
|
+
----------
|
|
556
|
+
timeline_data : numpy.ndarray
|
|
557
|
+
Array of timestamps.
|
|
558
|
+
vectors_per_second : int
|
|
559
|
+
Number of vectors expected per second.
|
|
560
|
+
|
|
561
|
+
Returns
|
|
562
|
+
-------
|
|
563
|
+
numpy.ndarray
|
|
564
|
+
Array of timestamps of shape (n, 3) containing n gaps with start_gap and
|
|
565
|
+
end_gap, as well as vectors_per_second. Start_gap and end_gap both correspond
|
|
566
|
+
to points in timeline_data.
|
|
567
|
+
"""
|
|
568
|
+
# Expected difference between timestamps in nanoseconds.
|
|
569
|
+
expected_gap = 1 / vectors_per_second * 1e9
|
|
570
|
+
|
|
571
|
+
# TODO: timestamps can vary by a few ms. Per Alastair, this can be around 7.5% of
|
|
572
|
+
# cadence without counting as a "gap".
|
|
573
|
+
diffs = abs(np.diff(timeline_data))
|
|
574
|
+
# 3.5e7 == 7.5% of 0.5s in nanoseconds, a common gap. In the future, this number
|
|
575
|
+
# will be calculated from the expected gap.
|
|
576
|
+
gap_index = np.asarray(diffs - expected_gap > 3.5e7).nonzero()[0]
|
|
577
|
+
output: np.ndarray = np.zeros((len(gap_index), 3))
|
|
578
|
+
|
|
579
|
+
for index, gap in enumerate(gap_index):
|
|
580
|
+
output[index, :] = [
|
|
581
|
+
timeline_data[gap],
|
|
582
|
+
timeline_data[gap + 1],
|
|
583
|
+
vectors_per_second,
|
|
584
|
+
]
|
|
585
|
+
|
|
586
|
+
# TODO: How should I handle/find gaps at the end?
|
|
587
|
+
return output
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
def generate_missing_timestamps(gap: np.ndarray) -> np.ndarray:
|
|
591
|
+
"""
|
|
592
|
+
Generate a new timeline from input gaps.
|
|
593
|
+
|
|
594
|
+
Any gaps specified in gaps will be filled with timestamps that are 0.5 seconds
|
|
595
|
+
apart.
|
|
596
|
+
|
|
597
|
+
Parameters
|
|
598
|
+
----------
|
|
599
|
+
gap : numpy.ndarray
|
|
600
|
+
Array of timestamps of shape (2,) containing n gaps with start_gap and
|
|
601
|
+
end_gap. Start_gap and end_gap both correspond to points in timeline_data.
|
|
602
|
+
|
|
603
|
+
Returns
|
|
604
|
+
-------
|
|
605
|
+
full_timeline: numpy.ndarray
|
|
606
|
+
Completed timeline.
|
|
607
|
+
"""
|
|
608
|
+
# Generated timestamps should always be 0.5 seconds apart
|
|
609
|
+
# TODO: is this in the configuration file?
|
|
610
|
+
difference_ns = 0.5 * 1e9
|
|
611
|
+
|
|
612
|
+
output: np.ndarray = np.arange(gap[0], gap[1], difference_ns)
|
|
613
|
+
return output
|
|
614
|
+
|
|
615
|
+
|
|
616
|
+
def vectors_per_second_from_string(vecsec_string: str) -> dict:
|
|
617
|
+
"""
|
|
618
|
+
Extract the vectors per second from a string into a dictionary.
|
|
619
|
+
|
|
620
|
+
Dictionary format: {start_time: vecsec, start_time: vecsec}.
|
|
621
|
+
|
|
622
|
+
Parameters
|
|
623
|
+
----------
|
|
624
|
+
vecsec_string : str
|
|
625
|
+
A string of the form "start:vecsec,start:vecsec" where start is the time in
|
|
626
|
+
nanoseconds and vecsec is the number of vectors per second.
|
|
627
|
+
|
|
628
|
+
Returns
|
|
629
|
+
-------
|
|
630
|
+
dict
|
|
631
|
+
A dictionary of the form {start_time: vecsec, start_time: vecsec}.
|
|
632
|
+
"""
|
|
633
|
+
vecsec_dict = {}
|
|
634
|
+
vecsec_segments = vecsec_string.split(",")
|
|
635
|
+
for vecsec_segment in vecsec_segments:
|
|
636
|
+
start_time, vecsec = vecsec_segment.split(":")
|
|
637
|
+
vecsec_dict[int(start_time)] = int(vecsec)
|
|
638
|
+
|
|
639
|
+
return vecsec_dict
|
|
640
|
+
|
|
641
|
+
|
|
642
|
+
def remove_missing_data(filled_timeline: np.ndarray) -> np.ndarray:
|
|
643
|
+
"""
|
|
644
|
+
Remove timestamps with no data from the filled timeline.
|
|
645
|
+
|
|
646
|
+
Anywhere that the generated flag is equal to -1, the data will be removed.
|
|
647
|
+
|
|
648
|
+
Parameters
|
|
649
|
+
----------
|
|
650
|
+
filled_timeline : numpy.ndarray
|
|
651
|
+
An (n, 8) shaped array containing the filled timeline.
|
|
652
|
+
Indices: 0 - epoch, 1-4 - vector x, y, z, and range, 5 - generated flag,
|
|
653
|
+
6-7 - compression flags.
|
|
654
|
+
|
|
655
|
+
Returns
|
|
656
|
+
-------
|
|
657
|
+
cleaned_array : numpy.ndarray
|
|
658
|
+
The filled timeline with missing data removed.
|
|
659
|
+
"""
|
|
660
|
+
cleaned_array: np.ndarray = filled_timeline[filled_timeline[:, 5] != -1]
|
|
661
|
+
return cleaned_array
|