imap-processing 0.11.0__py3-none-any.whl → 0.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of imap-processing might be problematic. Click here for more details.
- imap_processing/__init__.py +10 -11
- imap_processing/_version.py +2 -2
- imap_processing/ccsds/excel_to_xtce.py +65 -16
- imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +6 -28
- imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +365 -42
- imap_processing/cdf/config/imap_glows_global_cdf_attrs.yaml +0 -5
- imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +10 -11
- imap_processing/cdf/config/imap_hi_variable_attrs.yaml +17 -19
- imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +26 -13
- imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +106 -116
- imap_processing/cdf/config/imap_hit_l1b_variable_attrs.yaml +120 -145
- imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +14 -0
- imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +6 -9
- imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +1 -1
- imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +0 -12
- imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +1 -1
- imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +9 -21
- imap_processing/cdf/config/imap_mag_l1a_variable_attrs.yaml +361 -0
- imap_processing/cdf/config/imap_mag_l1b_variable_attrs.yaml +160 -0
- imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +160 -0
- imap_processing/cdf/config/imap_spacecraft_global_cdf_attrs.yaml +18 -0
- imap_processing/cdf/config/imap_spacecraft_variable_attrs.yaml +40 -0
- imap_processing/cdf/config/imap_swapi_global_cdf_attrs.yaml +1 -5
- imap_processing/cdf/config/imap_swe_global_cdf_attrs.yaml +12 -4
- imap_processing/cdf/config/imap_swe_l1a_variable_attrs.yaml +16 -2
- imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +48 -52
- imap_processing/cdf/config/imap_swe_l2_variable_attrs.yaml +71 -47
- imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +2 -14
- imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +51 -2
- imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +29 -14
- imap_processing/cdf/utils.py +13 -7
- imap_processing/cli.py +23 -8
- imap_processing/codice/codice_l1a.py +207 -85
- imap_processing/codice/constants.py +1322 -568
- imap_processing/codice/decompress.py +2 -6
- imap_processing/ena_maps/ena_maps.py +480 -116
- imap_processing/ena_maps/utils/coordinates.py +19 -0
- imap_processing/ena_maps/utils/map_utils.py +14 -17
- imap_processing/ena_maps/utils/spatial_utils.py +45 -47
- imap_processing/hi/l1a/hi_l1a.py +24 -18
- imap_processing/hi/l1a/histogram.py +0 -1
- imap_processing/hi/l1a/science_direct_event.py +6 -8
- imap_processing/hi/l1b/hi_l1b.py +31 -39
- imap_processing/hi/l1c/hi_l1c.py +405 -17
- imap_processing/hi/utils.py +58 -12
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt0-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt1-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt2-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-standard-dt3-factors_20250219_v002.csv +205 -0
- imap_processing/hit/ancillary/imap_hit_l1b-to-l2-summed-dt0-factors_20250219_v002.csv +68 -0
- imap_processing/hit/hit_utils.py +173 -1
- imap_processing/hit/l0/constants.py +20 -11
- imap_processing/hit/l0/decom_hit.py +18 -4
- imap_processing/hit/l1a/hit_l1a.py +45 -54
- imap_processing/hit/l1b/constants.py +317 -0
- imap_processing/hit/l1b/hit_l1b.py +367 -18
- imap_processing/hit/l2/constants.py +281 -0
- imap_processing/hit/l2/hit_l2.py +614 -0
- imap_processing/hit/packet_definitions/hit_packet_definitions.xml +1323 -71
- imap_processing/ialirt/l0/mag_l0_ialirt_data.py +155 -0
- imap_processing/ialirt/l0/parse_mag.py +246 -0
- imap_processing/ialirt/l0/process_swe.py +252 -0
- imap_processing/ialirt/packet_definitions/ialirt.xml +7 -3
- imap_processing/ialirt/packet_definitions/ialirt_mag.xml +115 -0
- imap_processing/ialirt/utils/grouping.py +114 -0
- imap_processing/ialirt/utils/time.py +29 -0
- imap_processing/idex/atomic_masses.csv +22 -0
- imap_processing/idex/decode.py +2 -2
- imap_processing/idex/idex_constants.py +25 -0
- imap_processing/idex/idex_l1a.py +6 -7
- imap_processing/idex/idex_l1b.py +4 -31
- imap_processing/idex/idex_l2a.py +789 -0
- imap_processing/idex/idex_variable_unpacking_and_eu_conversion.csv +39 -33
- imap_processing/lo/l0/lo_science.py +6 -0
- imap_processing/lo/l1a/lo_l1a.py +0 -1
- imap_processing/lo/l1b/lo_l1b.py +177 -25
- imap_processing/mag/constants.py +8 -0
- imap_processing/mag/imap_mag_sdc-configuration_v001.yaml +6 -0
- imap_processing/mag/l0/decom_mag.py +10 -3
- imap_processing/mag/l1a/mag_l1a.py +22 -11
- imap_processing/mag/l1a/mag_l1a_data.py +28 -3
- imap_processing/mag/l1b/mag_l1b.py +190 -48
- imap_processing/mag/l1c/interpolation_methods.py +211 -0
- imap_processing/mag/l1c/mag_l1c.py +447 -9
- imap_processing/quality_flags.py +1 -0
- imap_processing/spacecraft/packet_definitions/scid_x252.xml +538 -0
- imap_processing/spacecraft/quaternions.py +123 -0
- imap_processing/spice/geometry.py +16 -19
- imap_processing/spice/repoint.py +120 -0
- imap_processing/swapi/l1/swapi_l1.py +4 -0
- imap_processing/swapi/l2/swapi_l2.py +0 -1
- imap_processing/swe/l1a/swe_l1a.py +47 -8
- imap_processing/swe/l1a/swe_science.py +5 -2
- imap_processing/swe/l1b/swe_l1b_science.py +103 -56
- imap_processing/swe/l2/swe_l2.py +60 -65
- imap_processing/swe/packet_definitions/swe_packet_definition.xml +1121 -1
- imap_processing/swe/utils/swe_constants.py +63 -0
- imap_processing/swe/utils/swe_utils.py +85 -28
- imap_processing/tests/ccsds/test_data/expected_output.xml +40 -1
- imap_processing/tests/ccsds/test_excel_to_xtce.py +23 -20
- imap_processing/tests/cdf/test_data/imap_instrument2_global_cdf_attrs.yaml +0 -2
- imap_processing/tests/codice/conftest.py +1 -1
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-counters-aggregated_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-counters-singles_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-ialirt_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-omni_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-pha_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-priorities_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_hi-sectored_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-aggregated_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-counters-singles_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-ialirt_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-angular_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-priority_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-nsw-species_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-pha_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-angular_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-priority_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/data/validation/imap_codice_l1a_lo-sw-species_20241110193700_v0.0.0.cdf +0 -0
- imap_processing/tests/codice/test_codice_l1a.py +110 -46
- imap_processing/tests/codice/test_decompress.py +4 -4
- imap_processing/tests/conftest.py +166 -10
- imap_processing/tests/ena_maps/conftest.py +51 -0
- imap_processing/tests/ena_maps/test_ena_maps.py +638 -109
- imap_processing/tests/ena_maps/test_map_utils.py +66 -43
- imap_processing/tests/ena_maps/test_spatial_utils.py +16 -20
- imap_processing/tests/hi/data/l0/H45_diag_fee_20250208.bin +0 -0
- imap_processing/tests/hi/data/l0/H45_diag_fee_20250208_verify.csv +205 -0
- imap_processing/tests/hi/test_hi_l1b.py +12 -15
- imap_processing/tests/hi/test_hi_l1c.py +234 -6
- imap_processing/tests/hi/test_l1a.py +30 -0
- imap_processing/tests/hi/test_science_direct_event.py +1 -1
- imap_processing/tests/hi/test_utils.py +24 -2
- imap_processing/tests/hit/helpers/l1_validation.py +39 -39
- imap_processing/tests/hit/test_data/hskp_sample.ccsds +0 -0
- imap_processing/tests/hit/test_data/imap_hit_l0_raw_20100105_v001.pkts +0 -0
- imap_processing/tests/hit/test_decom_hit.py +4 -0
- imap_processing/tests/hit/test_hit_l1a.py +24 -28
- imap_processing/tests/hit/test_hit_l1b.py +304 -40
- imap_processing/tests/hit/test_hit_l2.py +454 -0
- imap_processing/tests/hit/test_hit_utils.py +112 -2
- imap_processing/tests/hit/validation_data/hskp_sample_eu_3_6_2025.csv +89 -0
- imap_processing/tests/hit/validation_data/hskp_sample_raw.csv +89 -88
- imap_processing/tests/ialirt/test_data/l0/461971383-404.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971384-405.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971385-406.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971386-407.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971387-408.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971388-409.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971389-410.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971390-411.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/461971391-412.bin +0 -0
- imap_processing/tests/ialirt/test_data/l0/sample_decoded_i-alirt_data.csv +383 -0
- imap_processing/tests/ialirt/unit/test_grouping.py +81 -0
- imap_processing/tests/ialirt/unit/test_parse_mag.py +168 -0
- imap_processing/tests/ialirt/unit/test_process_swe.py +208 -3
- imap_processing/tests/ialirt/unit/test_time.py +16 -0
- imap_processing/tests/idex/conftest.py +62 -6
- imap_processing/tests/idex/test_data/imap_idex_l0_raw_20231218_v001.pkts +0 -0
- imap_processing/tests/idex/test_data/impact_14_tof_high_data.txt +4508 -4508
- imap_processing/tests/idex/test_idex_l1a.py +48 -4
- imap_processing/tests/idex/test_idex_l1b.py +3 -3
- imap_processing/tests/idex/test_idex_l2a.py +383 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_de_20241022_v002.cdf +0 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_spin_20241022_v002.cdf +0 -0
- imap_processing/tests/lo/test_lo_l1b.py +148 -4
- imap_processing/tests/lo/test_lo_science.py +1 -0
- imap_processing/tests/mag/conftest.py +69 -0
- imap_processing/tests/mag/test_mag_decom.py +1 -1
- imap_processing/tests/mag/test_mag_l1a.py +38 -0
- imap_processing/tests/mag/test_mag_l1b.py +34 -53
- imap_processing/tests/mag/test_mag_l1c.py +251 -20
- imap_processing/tests/mag/test_mag_validation.py +109 -25
- imap_processing/tests/mag/validation/L1b/T009/MAGScience-normal-(2,2)-8s-20250204-16h39.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-magi-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T009/mag-l1a-l1b-t009-mago-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T010/MAGScience-normal-(2,2)-8s-20250206-12h05.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T011/MAGScience-normal-(2,2)-8s-20250204-16h08.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-magi-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T011/mag-l1a-l1b-t011-mago-out.csv +16 -16
- imap_processing/tests/mag/validation/L1b/T012/MAGScience-normal-(2,2)-8s-20250204-16h08.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T012/data.bin +0 -0
- imap_processing/tests/mag/validation/L1b/T012/field_like_all_ranges.txt +19200 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-cal.cdf +0 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-in.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-magi-out.csv +17 -0
- imap_processing/tests/mag/validation/L1b/T012/mag-l1a-l1b-t012-mago-out.csv +17 -0
- imap_processing/tests/mag/validation/imap_calibration_mag_20240229_v01.cdf +0 -0
- imap_processing/tests/spacecraft/__init__.py +0 -0
- imap_processing/tests/spacecraft/data/SSR_2024_190_20_08_12_0483851794_2_DA_apid0594_1packet.pkts +0 -0
- imap_processing/tests/spacecraft/test_quaternions.py +71 -0
- imap_processing/tests/spice/test_data/fake_repoint_data.csv +5 -0
- imap_processing/tests/spice/test_geometry.py +6 -9
- imap_processing/tests/spice/test_repoint.py +111 -0
- imap_processing/tests/swapi/test_swapi_l1.py +7 -3
- imap_processing/tests/swe/l0_data/2024051010_SWE_HK_packet.bin +0 -0
- imap_processing/tests/swe/l0_data/2024051011_SWE_CEM_RAW_packet.bin +0 -0
- imap_processing/tests/swe/l0_validation_data/idle_export_eu.SWE_APP_HK_20240510_092742.csv +49 -0
- imap_processing/tests/swe/l0_validation_data/idle_export_eu.SWE_CEM_RAW_20240510_092742.csv +593 -0
- imap_processing/tests/swe/test_swe_l1a.py +18 -0
- imap_processing/tests/swe/test_swe_l1a_cem_raw.py +52 -0
- imap_processing/tests/swe/test_swe_l1a_hk.py +68 -0
- imap_processing/tests/swe/test_swe_l1b_science.py +23 -4
- imap_processing/tests/swe/test_swe_l2.py +112 -30
- imap_processing/tests/test_cli.py +2 -2
- imap_processing/tests/test_utils.py +138 -16
- imap_processing/tests/ultra/data/l0/FM45_UltraFM45_Functional_2024-01-22T0105_20240122T010548.CCSDS +0 -0
- imap_processing/tests/ultra/data/l0/ultra45_raw_sc_ultraimgrates_20220530_00.csv +164 -0
- imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultrarawimg_withFSWcalcs_FM45_40P_Phi28p5_BeamCal_LinearScan_phi2850_theta-000_20240207T102740.csv +3243 -3243
- imap_processing/tests/ultra/data/mock_data.py +341 -0
- imap_processing/tests/ultra/unit/conftest.py +69 -26
- imap_processing/tests/ultra/unit/test_badtimes.py +2 -0
- imap_processing/tests/ultra/unit/test_cullingmask.py +4 -0
- imap_processing/tests/ultra/unit/test_de.py +12 -4
- imap_processing/tests/ultra/unit/test_decom_apid_881.py +44 -0
- imap_processing/tests/ultra/unit/test_spacecraft_pset.py +78 -0
- imap_processing/tests/ultra/unit/test_ultra_l1a.py +28 -12
- imap_processing/tests/ultra/unit/test_ultra_l1b.py +34 -6
- imap_processing/tests/ultra/unit/test_ultra_l1b_culling.py +22 -26
- imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +86 -51
- imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +94 -52
- imap_processing/ultra/l0/decom_tools.py +6 -5
- imap_processing/ultra/l1a/ultra_l1a.py +28 -56
- imap_processing/ultra/l1b/de.py +72 -28
- imap_processing/ultra/l1b/extendedspin.py +12 -14
- imap_processing/ultra/l1b/ultra_l1b.py +34 -9
- imap_processing/ultra/l1b/ultra_l1b_culling.py +65 -29
- imap_processing/ultra/l1b/ultra_l1b_extended.py +64 -19
- imap_processing/ultra/l1c/spacecraft_pset.py +86 -0
- imap_processing/ultra/l1c/ultra_l1c.py +7 -4
- imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +112 -61
- imap_processing/ultra/lookup_tables/ultra_90_dps_exposure_compressed.cdf +0 -0
- imap_processing/ultra/utils/ultra_l1_utils.py +20 -2
- imap_processing/utils.py +68 -28
- {imap_processing-0.11.0.dist-info → imap_processing-0.12.0.dist-info}/METADATA +8 -5
- {imap_processing-0.11.0.dist-info → imap_processing-0.12.0.dist-info}/RECORD +250 -199
- imap_processing/cdf/config/imap_mag_l1_variable_attrs.yaml +0 -237
- imap_processing/hi/l1a/housekeeping.py +0 -27
- imap_processing/tests/codice/data/imap_codice_l1a_hi-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hi-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hi-omni_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hi-sectored_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_hskp_20100101_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-nsw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1a_lo-sw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-omni_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hi-sectored_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_hskp_20100101_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-counters-aggregated_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-counters-singles_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-nsw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-angular_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-priority_20240429_v001.cdf +0 -0
- imap_processing/tests/codice/data/imap_codice_l1b_lo-sw-species_20240429_v001.cdf +0 -0
- imap_processing/tests/hi/data/l1/imap_hi_l1b_45sensor-de_20250415_v999.cdf +0 -0
- imap_processing/tests/hit/PREFLIGHT_raw_record_2023_256_15_59_04_apid1251.pkts +0 -0
- imap_processing/tests/hit/PREFLIGHT_raw_record_2023_256_15_59_04_apid1252.pkts +0 -0
- imap_processing/tests/hit/validation_data/hskp_sample_eu.csv +0 -89
- imap_processing/tests/hit/validation_data/sci_sample_raw1.csv +0 -29
- imap_processing/tests/idex/test_data/imap_idex_l0_raw_20231214_v001.pkts +0 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_de_20100101_v001.cdf +0 -0
- imap_processing/tests/lo/test_cdfs/imap_lo_l1a_spin_20100101_v001.cdf +0 -0
- imap_processing/tests/ultra/test_data/mock_data.py +0 -161
- imap_processing/ultra/l1c/pset.py +0 -40
- /imap_processing/tests/ultra/{test_data → data}/l0/FM45_40P_Phi28p5_BeamCal_LinearScan_phi28.50_theta-0.00_20240207T102740.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/FM45_7P_Phi0.0_BeamCal_LinearScan_phi0.04_theta-0.01_20230821T121304.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/FM45_TV_Cycle6_Hot_Ops_Front212_20240124T063837.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.CCSDS +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_auxdata_Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_enaphxtofhangimg_FM45_TV_Cycle6_Hot_Ops_Front212_20240124T063837.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultraimgrates_Ultra45_EM_SwRI_Cal_Run7_ThetaScan_20220530T225054.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l0/ultra45_raw_sc_ultrarawimgevent_FM45_7P_Phi00_BeamCal_LinearScan_phi004_theta-001_20230821T121304.csv +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E1.cdf +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E12.cdf +0 -0
- /imap_processing/tests/ultra/{test_data → data}/l1/dps_exposure_helio_45_E24.cdf +0 -0
- {imap_processing-0.11.0.dist-info → imap_processing-0.12.0.dist-info}/LICENSE +0 -0
- {imap_processing-0.11.0.dist-info → imap_processing-0.12.0.dist-info}/WHEEL +0 -0
- {imap_processing-0.11.0.dist-info → imap_processing-0.12.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,8 +1,18 @@
|
|
|
1
1
|
"""MAG L1C processing module."""
|
|
2
2
|
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
3
8
|
import xarray as xr
|
|
9
|
+
import yaml
|
|
4
10
|
|
|
5
11
|
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
|
|
12
|
+
from imap_processing.mag.constants import ModeFlags
|
|
13
|
+
from imap_processing.mag.l1c.interpolation_methods import InterpolationFunction
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
6
16
|
|
|
7
17
|
|
|
8
18
|
def mag_l1c(
|
|
@@ -30,7 +40,10 @@ def mag_l1c(
|
|
|
30
40
|
output_dataset : xr.Dataset
|
|
31
41
|
L1C data set.
|
|
32
42
|
"""
|
|
33
|
-
# TODO:
|
|
43
|
+
# TODO:
|
|
44
|
+
# find missing sequences and output them
|
|
45
|
+
# add missing interpolation methods
|
|
46
|
+
|
|
34
47
|
input_logical_source_1 = first_input_dataset.attrs["Logical_source"]
|
|
35
48
|
if isinstance(first_input_dataset.attrs["Logical_source"], list):
|
|
36
49
|
input_logical_source_1 = first_input_dataset.attrs["Logical_source"][0]
|
|
@@ -39,19 +52,444 @@ def mag_l1c(
|
|
|
39
52
|
if isinstance(second_input_dataset.attrs["Logical_source"], list):
|
|
40
53
|
input_logical_source_2 = second_input_dataset.attrs["Logical_source"][0]
|
|
41
54
|
|
|
42
|
-
if "norm" in input_logical_source_1:
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
55
|
+
if "norm" in input_logical_source_1 and "burst" in input_logical_source_2:
|
|
56
|
+
normal_mode_dataset = first_input_dataset
|
|
57
|
+
burst_mode_dataset = second_input_dataset
|
|
58
|
+
output_logical_source = input_logical_source_1.replace("l1b", "l1c")
|
|
59
|
+
elif "norm" in input_logical_source_2 and "burst" in input_logical_source_1:
|
|
60
|
+
normal_mode_dataset = second_input_dataset
|
|
61
|
+
burst_mode_dataset = first_input_dataset
|
|
62
|
+
output_logical_source = input_logical_source_2.replace("l1b", "l1c")
|
|
63
|
+
|
|
48
64
|
else:
|
|
49
|
-
raise RuntimeError(
|
|
65
|
+
raise RuntimeError(
|
|
66
|
+
"L1C requires one normal mode and one burst mode input " "file."
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
with open(
|
|
70
|
+
Path(__file__).parent.parent / "imap_mag_sdc-configuration_v001.yaml"
|
|
71
|
+
) as f:
|
|
72
|
+
configuration = yaml.safe_load(f)
|
|
73
|
+
|
|
74
|
+
interp_function = InterpolationFunction[configuration["L1C_interpolation_method"]]
|
|
75
|
+
completed_timeline = process_mag_l1c(
|
|
76
|
+
normal_mode_dataset, burst_mode_dataset, interp_function
|
|
77
|
+
)
|
|
50
78
|
|
|
51
79
|
attribute_manager = ImapCdfAttributes()
|
|
52
80
|
attribute_manager.add_instrument_global_attrs("mag")
|
|
53
81
|
attribute_manager.add_global_attribute("Data_version", version)
|
|
82
|
+
attribute_manager.add_instrument_variable_attrs("mag", "l1c")
|
|
83
|
+
compression = xr.DataArray(
|
|
84
|
+
np.arange(2),
|
|
85
|
+
name="compression",
|
|
86
|
+
dims=["compression"],
|
|
87
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
88
|
+
"compression_attrs", check_schema=False
|
|
89
|
+
),
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
direction = xr.DataArray(
|
|
93
|
+
np.arange(4),
|
|
94
|
+
name="direction",
|
|
95
|
+
dims=["direction"],
|
|
96
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
97
|
+
"direction_attrs", check_schema=False
|
|
98
|
+
),
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
epoch_time = xr.DataArray(
|
|
102
|
+
completed_timeline[:, 0],
|
|
103
|
+
name="epoch",
|
|
104
|
+
dims=["epoch"],
|
|
105
|
+
attrs=attribute_manager.get_variable_attributes("epoch"),
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
direction_label = xr.DataArray(
|
|
109
|
+
direction.values.astype(str),
|
|
110
|
+
name="direction_label",
|
|
111
|
+
dims=["direction_label"],
|
|
112
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
113
|
+
"direction_label", check_schema=False
|
|
114
|
+
),
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
compression_label = xr.DataArray(
|
|
118
|
+
compression.values.astype(str),
|
|
119
|
+
name="compression_label",
|
|
120
|
+
dims=["compression_label"],
|
|
121
|
+
attrs=attribute_manager.get_variable_attributes(
|
|
122
|
+
"compression_label", check_schema=False
|
|
123
|
+
),
|
|
124
|
+
)
|
|
125
|
+
global_attributes = attribute_manager.get_global_attributes(output_logical_source)
|
|
126
|
+
# TODO merge missing sequences? replace?
|
|
127
|
+
global_attributes["missing_sequences"] = ""
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
global_attributes["is_mago"] = normal_mode_dataset.attrs["is_mago"]
|
|
131
|
+
global_attributes["is_active"] = normal_mode_dataset.attrs["is_active"]
|
|
132
|
+
global_attributes["missing_sequences"] = normal_mode_dataset.attrs[
|
|
133
|
+
"missing_sequences"
|
|
134
|
+
]
|
|
135
|
+
except KeyError as e:
|
|
136
|
+
logger.info(
|
|
137
|
+
f"Key error when assigning global attributes, attribute not found in "
|
|
138
|
+
f"L1B file with logical source "
|
|
139
|
+
f"{normal_mode_dataset.attrs['Logical_source']}: {e}"
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
global_attributes["interpolation_method"] = interp_function.name
|
|
143
|
+
|
|
144
|
+
output_dataset = xr.Dataset(
|
|
145
|
+
coords={
|
|
146
|
+
"epoch": epoch_time,
|
|
147
|
+
"direction": direction,
|
|
148
|
+
"direction_label": direction_label,
|
|
149
|
+
"compression": compression,
|
|
150
|
+
"compression_label": compression_label,
|
|
151
|
+
},
|
|
152
|
+
attrs=global_attributes,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
output_dataset["vectors"] = xr.DataArray(
|
|
156
|
+
completed_timeline[:, 1:5],
|
|
157
|
+
name="vectors",
|
|
158
|
+
dims=["epoch", "direction"],
|
|
159
|
+
attrs=attribute_manager.get_variable_attributes("vector_attrs"),
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
output_dataset["vector_magnitude"] = xr.apply_ufunc(
|
|
163
|
+
lambda x: np.linalg.norm(x[:4]),
|
|
164
|
+
output_dataset["vectors"],
|
|
165
|
+
input_core_dims=[["direction"]],
|
|
166
|
+
output_core_dims=[[]],
|
|
167
|
+
vectorize=True,
|
|
168
|
+
)
|
|
169
|
+
# output_dataset['vector_magnitude'].attrs =
|
|
170
|
+
# attribute_manager.get_variable_attributes("vector_magnitude_attrs")
|
|
54
171
|
|
|
55
|
-
output_dataset
|
|
172
|
+
output_dataset["compression_flags"] = xr.DataArray(
|
|
173
|
+
completed_timeline[:, 6:8],
|
|
174
|
+
name="compression_flags",
|
|
175
|
+
dims=["epoch", "compression"],
|
|
176
|
+
attrs=attribute_manager.get_variable_attributes("compression_flags_attrs"),
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
output_dataset["generated_flag"] = xr.DataArray(
|
|
180
|
+
completed_timeline[:, 5],
|
|
181
|
+
name="generated_flag",
|
|
182
|
+
dims=["epoch"],
|
|
183
|
+
# attrs=attribute_manager.get_variable_attributes("generated_flag_attrs"),
|
|
184
|
+
)
|
|
56
185
|
|
|
57
186
|
return output_dataset
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def process_mag_l1c(
|
|
190
|
+
normal_mode_dataset: xr.Dataset,
|
|
191
|
+
burst_mode_dataset: xr.Dataset,
|
|
192
|
+
interpolation_function: InterpolationFunction,
|
|
193
|
+
) -> np.ndarray:
|
|
194
|
+
"""
|
|
195
|
+
Create MAG L1C data from L1B datasets.
|
|
196
|
+
|
|
197
|
+
This function starts from the normal mode dataset and completes the following steps:
|
|
198
|
+
1. find all the gaps in the dataset
|
|
199
|
+
2. generate a new timeline with the gaps filled
|
|
200
|
+
3. fill the timeline with normal mode data (so, all the non-gap timestamps)
|
|
201
|
+
4. interpolate the gaps using the burst mode data and the method specified in
|
|
202
|
+
interpolation_function.
|
|
203
|
+
|
|
204
|
+
It returns an (n, 8) shaped array:
|
|
205
|
+
0 - epoch (timestamp)
|
|
206
|
+
1-4 - vector x, y, z, and range
|
|
207
|
+
5 - generated flag (0 for normal data, 1 for interpolated data, -1 for missing data)
|
|
208
|
+
6-7 - compression flags (is_compressed, compression_width)
|
|
209
|
+
|
|
210
|
+
Parameters
|
|
211
|
+
----------
|
|
212
|
+
normal_mode_dataset : xarray.Dataset
|
|
213
|
+
The normal mode dataset, which acts as a base for the output.
|
|
214
|
+
burst_mode_dataset : xarray.Dataset
|
|
215
|
+
The burst mode dataset, which is used to fill in the gaps in the normal mode.
|
|
216
|
+
interpolation_function : InterpolationFunction
|
|
217
|
+
The interpolation function to use to fill in the gaps.
|
|
218
|
+
|
|
219
|
+
Returns
|
|
220
|
+
-------
|
|
221
|
+
np.ndarray
|
|
222
|
+
An (n, 8) shaped array containing the completed timeline.
|
|
223
|
+
"""
|
|
224
|
+
norm_epoch = normal_mode_dataset["epoch"].data
|
|
225
|
+
vecsec_attr = normal_mode_dataset.attrs["vectors_per_second"]
|
|
226
|
+
|
|
227
|
+
output_dataset = normal_mode_dataset.copy(deep=True)
|
|
228
|
+
output_dataset["sample_interpolated"] = xr.DataArray(
|
|
229
|
+
np.zeros(len(normal_mode_dataset))
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
gaps = find_all_gaps(norm_epoch, vecsec_attr)
|
|
233
|
+
|
|
234
|
+
new_timeline = generate_timeline(norm_epoch, gaps)
|
|
235
|
+
norm_filled = fill_normal_data(normal_mode_dataset, new_timeline)
|
|
236
|
+
interpolated = interpolate_gaps(
|
|
237
|
+
burst_mode_dataset, gaps, norm_filled, interpolation_function
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
return interpolated
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def fill_normal_data(
|
|
244
|
+
normal_dataset: xr.Dataset, new_timeline: np.ndarray
|
|
245
|
+
) -> np.ndarray:
|
|
246
|
+
"""
|
|
247
|
+
Fill the new timeline with the normal mode data.
|
|
248
|
+
|
|
249
|
+
If the timestamp exists in the normal mode data, it will be filled in the output.
|
|
250
|
+
|
|
251
|
+
Parameters
|
|
252
|
+
----------
|
|
253
|
+
normal_dataset : xr.Dataset
|
|
254
|
+
The normal mode dataset.
|
|
255
|
+
new_timeline : np.ndarray
|
|
256
|
+
A 1D array of timestamps to fill.
|
|
257
|
+
|
|
258
|
+
Returns
|
|
259
|
+
-------
|
|
260
|
+
np.ndarray
|
|
261
|
+
An (n, 8) shaped array containing the timeline filled with normal mode data.
|
|
262
|
+
Gaps are marked as -1 in the generated flag column at index 5.
|
|
263
|
+
Indices: 0 - epoch, 1-4 - vector x, y, z, and range, 5 - generated flag,
|
|
264
|
+
6-7 - compression flags.
|
|
265
|
+
"""
|
|
266
|
+
# TODO: fill with FILLVAL?
|
|
267
|
+
filled_timeline: np.ndarray = np.zeros((len(new_timeline), 8))
|
|
268
|
+
filled_timeline[:, 0] = new_timeline
|
|
269
|
+
# Flags, will also indicate any missed timestamps
|
|
270
|
+
filled_timeline[:, 5] = ModeFlags.MISSING.value
|
|
271
|
+
|
|
272
|
+
for index, timestamp in enumerate(normal_dataset["epoch"].data):
|
|
273
|
+
timeline_index = np.searchsorted(new_timeline, timestamp)
|
|
274
|
+
filled_timeline[timeline_index, 1:5] = normal_dataset["vectors"].data[index]
|
|
275
|
+
filled_timeline[timeline_index, 5] = ModeFlags.NORM.value
|
|
276
|
+
filled_timeline[timeline_index, 6:8] = normal_dataset["compression_flags"].data[
|
|
277
|
+
index
|
|
278
|
+
]
|
|
279
|
+
|
|
280
|
+
return filled_timeline
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def interpolate_gaps(
|
|
284
|
+
burst_dataset: xr.Dataset,
|
|
285
|
+
gaps: np.ndarray,
|
|
286
|
+
filled_norm_timeline: np.ndarray,
|
|
287
|
+
interpolation_function: InterpolationFunction,
|
|
288
|
+
) -> np.ndarray:
|
|
289
|
+
"""
|
|
290
|
+
Interpolate the gaps in the filled timeline using the burst mode data.
|
|
291
|
+
|
|
292
|
+
Returns an array that matches the format of filled_norm_timeline, with gaps filled
|
|
293
|
+
using interpolated burst data.
|
|
294
|
+
|
|
295
|
+
Parameters
|
|
296
|
+
----------
|
|
297
|
+
burst_dataset : xarray.Dataset
|
|
298
|
+
The L1B burst mode dataset.
|
|
299
|
+
gaps : numpy.ndarray
|
|
300
|
+
An array of gaps to fill, with shape (n, 2) where n is the number of gaps.
|
|
301
|
+
filled_norm_timeline : numpy.ndarray
|
|
302
|
+
Timeline filled with normal mode data in the shape (n, 8).
|
|
303
|
+
interpolation_function : InterpolationFunction
|
|
304
|
+
The interpolation function to use to fill in the gaps.
|
|
305
|
+
|
|
306
|
+
Returns
|
|
307
|
+
-------
|
|
308
|
+
numpy.ndarray
|
|
309
|
+
An array of shape (n, 8) containing the fully filled timeline.
|
|
310
|
+
Indices: 0 - epoch, 1-4 - vector x, y, z, and range, 5 - generated flag,
|
|
311
|
+
6-7 - compression flags.
|
|
312
|
+
"""
|
|
313
|
+
burst_epochs = burst_dataset["epoch"].data
|
|
314
|
+
# Exclude range values
|
|
315
|
+
burst_vectors = burst_dataset["vectors"].data
|
|
316
|
+
|
|
317
|
+
for gap in gaps:
|
|
318
|
+
# TODO: we might need a few inputs before or after start/end
|
|
319
|
+
burst_start = (np.abs(burst_epochs - gap[0])).argmin()
|
|
320
|
+
burst_end = (np.abs(burst_epochs - gap[1])).argmin()
|
|
321
|
+
gap_timeline = filled_norm_timeline[
|
|
322
|
+
np.nonzero(
|
|
323
|
+
(filled_norm_timeline > gap[0]) & (filled_norm_timeline < gap[1])
|
|
324
|
+
)
|
|
325
|
+
]
|
|
326
|
+
# do not include range
|
|
327
|
+
gap_fill = interpolation_function(
|
|
328
|
+
burst_vectors[burst_start:burst_end, :3],
|
|
329
|
+
burst_epochs[burst_start:burst_end],
|
|
330
|
+
gap_timeline,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# gaps should not have data in timeline, still check it
|
|
334
|
+
for index, timestamp in enumerate(gap_timeline):
|
|
335
|
+
timeline_index = np.searchsorted(filled_norm_timeline[:, 0], timestamp)
|
|
336
|
+
if sum(filled_norm_timeline[timeline_index, 1:4]) == 0:
|
|
337
|
+
filled_norm_timeline[timeline_index, 1:4] = gap_fill[index]
|
|
338
|
+
filled_norm_timeline[timeline_index, 4] = burst_vectors[
|
|
339
|
+
burst_start + index, 3
|
|
340
|
+
]
|
|
341
|
+
filled_norm_timeline[timeline_index, 5] = ModeFlags.BURST.value
|
|
342
|
+
filled_norm_timeline[timeline_index, 6:8] = burst_dataset[
|
|
343
|
+
"compression_flags"
|
|
344
|
+
].data[burst_start + index]
|
|
345
|
+
|
|
346
|
+
return filled_norm_timeline
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
def generate_timeline(epoch_data: np.ndarray, gaps: np.ndarray) -> np.ndarray:
|
|
350
|
+
"""
|
|
351
|
+
Generate a new timeline from existing, gap-filled timeline and gaps.
|
|
352
|
+
|
|
353
|
+
The gaps are generated at a .5 second cadence, regardless of the cadence of the
|
|
354
|
+
existing data.
|
|
355
|
+
|
|
356
|
+
Parameters
|
|
357
|
+
----------
|
|
358
|
+
epoch_data : numpy.ndarray
|
|
359
|
+
The existing timeline data, in the shape (n,).
|
|
360
|
+
gaps : numpy.ndarray
|
|
361
|
+
An array of gaps to fill, with shape (n, 2) where n is the number of gaps.
|
|
362
|
+
The gap is specified as (start, end) where start and end both exist in the
|
|
363
|
+
timeline already.
|
|
364
|
+
|
|
365
|
+
Returns
|
|
366
|
+
-------
|
|
367
|
+
numpy.ndarray
|
|
368
|
+
The new timeline, filled with the existing data and the generated gaps.
|
|
369
|
+
"""
|
|
370
|
+
full_timeline: np.ndarray = np.zeros(0)
|
|
371
|
+
|
|
372
|
+
# When we have our gaps, generate the full timeline
|
|
373
|
+
last_gap = 0
|
|
374
|
+
for gap in gaps:
|
|
375
|
+
gap_start_index = np.where(epoch_data == gap[0])[0]
|
|
376
|
+
gap_end_index = np.where(epoch_data == gap[1])[0]
|
|
377
|
+
if gap_start_index.size != 1 or gap_end_index.size != 1:
|
|
378
|
+
raise ValueError("Gap start or end not found in input timeline")
|
|
379
|
+
|
|
380
|
+
full_timeline = np.concatenate(
|
|
381
|
+
(
|
|
382
|
+
full_timeline,
|
|
383
|
+
epoch_data[last_gap : gap_start_index[0]],
|
|
384
|
+
generate_missing_timestamps(gap),
|
|
385
|
+
)
|
|
386
|
+
)
|
|
387
|
+
last_gap = gap_end_index[0]
|
|
388
|
+
|
|
389
|
+
full_timeline = np.concatenate((full_timeline, epoch_data[last_gap:]))
|
|
390
|
+
|
|
391
|
+
return full_timeline
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def find_all_gaps(
|
|
395
|
+
epoch_data: np.ndarray, vectors_per_second_attr: Optional[str] = None
|
|
396
|
+
) -> np.ndarray:
|
|
397
|
+
"""
|
|
398
|
+
Find all the gaps in the epoch data.
|
|
399
|
+
|
|
400
|
+
If vectors_per_second_attr is provided, it will be used to find the gaps. Otherwise,
|
|
401
|
+
it will assume a nominal 1/2 second gap. A gap is defined as missing data from the
|
|
402
|
+
expected sequence as defined by vectors_per_second_attr.
|
|
403
|
+
|
|
404
|
+
Parameters
|
|
405
|
+
----------
|
|
406
|
+
epoch_data : numpy.ndarray
|
|
407
|
+
The epoch data to find gaps in.
|
|
408
|
+
vectors_per_second_attr : str, optional
|
|
409
|
+
A string of the form "start:vecsec,start:vecsec" where start is the time in
|
|
410
|
+
seconds and vecsec is the number of vectors per second. This will be used to
|
|
411
|
+
find the gaps. If not provided, a 1/2 second gap is assumed.
|
|
412
|
+
|
|
413
|
+
Returns
|
|
414
|
+
-------
|
|
415
|
+
numpy.ndarray
|
|
416
|
+
An array of gaps with shape (n, 2) where n is the number of gaps. The gaps are
|
|
417
|
+
specified as (start, end) where start and end both exist in the timeline.
|
|
418
|
+
"""
|
|
419
|
+
gaps: np.ndarray = np.zeros((0, 2))
|
|
420
|
+
if vectors_per_second_attr is not None and vectors_per_second_attr != "":
|
|
421
|
+
vecsec_segments = vectors_per_second_attr.split(",")
|
|
422
|
+
end_index = epoch_data.shape[0]
|
|
423
|
+
for vecsec_segment in reversed(vecsec_segments):
|
|
424
|
+
start_time, vecsec = vecsec_segment.split(":")
|
|
425
|
+
start_index = np.where(int(start_time) == epoch_data)[0][0]
|
|
426
|
+
gaps = np.concatenate(
|
|
427
|
+
(find_gaps(epoch_data[start_index : end_index + 1], int(vecsec)), gaps)
|
|
428
|
+
)
|
|
429
|
+
end_index = start_index
|
|
430
|
+
else:
|
|
431
|
+
# TODO: How to handle this case
|
|
432
|
+
gaps = find_gaps(epoch_data, 2) # Assume half second gaps
|
|
433
|
+
# alternatively, I could try and find the average time between vectors
|
|
434
|
+
|
|
435
|
+
return gaps
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
def find_gaps(timeline_data: np.ndarray, vectors_per_second: int) -> np.ndarray:
|
|
439
|
+
"""
|
|
440
|
+
Find gaps in timeline_data that are larger than 1/vectors_per_second.
|
|
441
|
+
|
|
442
|
+
Returns timestamps (start_gap, end_gap) where startgap and endgap both
|
|
443
|
+
exist in timeline data.
|
|
444
|
+
|
|
445
|
+
Parameters
|
|
446
|
+
----------
|
|
447
|
+
timeline_data : numpy.ndarray
|
|
448
|
+
Array of timestamps.
|
|
449
|
+
vectors_per_second : int
|
|
450
|
+
Number of vectors expected per second.
|
|
451
|
+
|
|
452
|
+
Returns
|
|
453
|
+
-------
|
|
454
|
+
numpy.ndarray
|
|
455
|
+
Array of timestamps of shape (n, 2) containing n gaps with start_gap and
|
|
456
|
+
end_gap. Start_gap and end_gap both correspond to points in timeline_data.
|
|
457
|
+
"""
|
|
458
|
+
# Expected difference between timestamps in nanoseconds.
|
|
459
|
+
expected_gap = 1 / vectors_per_second * 1e9
|
|
460
|
+
|
|
461
|
+
diffs = abs(timeline_data[:-1] - np.roll(timeline_data, -1)[:-1])
|
|
462
|
+
gap_index = np.where(diffs != expected_gap)[0]
|
|
463
|
+
output: np.ndarray = np.zeros((len(gap_index), 2))
|
|
464
|
+
|
|
465
|
+
for index, gap in enumerate(gap_index):
|
|
466
|
+
output[index, :] = [timeline_data[gap], timeline_data[gap + 1]]
|
|
467
|
+
|
|
468
|
+
# TODO: How should I handle/find gaps at the end?
|
|
469
|
+
return output
|
|
470
|
+
|
|
471
|
+
|
|
472
|
+
def generate_missing_timestamps(gap: np.ndarray) -> np.ndarray:
|
|
473
|
+
"""
|
|
474
|
+
Generate a new timeline from input gaps.
|
|
475
|
+
|
|
476
|
+
Any gaps specified in gaps will be filled with timestamps that are 0.5 seconds
|
|
477
|
+
apart.
|
|
478
|
+
|
|
479
|
+
Parameters
|
|
480
|
+
----------
|
|
481
|
+
gap : numpy.ndarray
|
|
482
|
+
Array of timestamps of shape (2,) containing n gaps with start_gap and
|
|
483
|
+
end_gap. Start_gap and end_gap both correspond to points in timeline_data.
|
|
484
|
+
|
|
485
|
+
Returns
|
|
486
|
+
-------
|
|
487
|
+
full_timeline: numpy.ndarray
|
|
488
|
+
Completed timeline.
|
|
489
|
+
"""
|
|
490
|
+
# Generated timestamps should always be 0.5 seconds apart
|
|
491
|
+
# TODO: is this in the configuration file?
|
|
492
|
+
difference_ns = 0.5 * 1e9
|
|
493
|
+
|
|
494
|
+
output: np.ndarray = np.arange(gap[0], gap[1], difference_ns)
|
|
495
|
+
return output
|
imap_processing/quality_flags.py
CHANGED