imap-processing 0.19.3__py3-none-any.whl → 0.19.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (33) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +90 -91
  3. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +6 -6
  4. imap_processing/cdf/config/imap_enamaps_l2-common_variable_attrs.yaml +18 -23
  5. imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +1 -2
  6. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +1 -0
  7. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +8 -6
  8. imap_processing/cdf/utils.py +5 -0
  9. imap_processing/cli.py +72 -54
  10. imap_processing/codice/codice_l1a.py +44 -6
  11. imap_processing/codice/codice_l1b.py +35 -6
  12. imap_processing/codice/constants.py +10 -6
  13. imap_processing/ena_maps/ena_maps.py +2 -7
  14. imap_processing/glows/l1b/glows_l1b.py +29 -21
  15. imap_processing/hi/hi_l1a.py +49 -29
  16. imap_processing/hi/hi_l1b.py +34 -0
  17. imap_processing/hi/hi_l1c.py +23 -17
  18. imap_processing/hi/hi_l2.py +225 -81
  19. imap_processing/ialirt/utils/create_xarray.py +11 -1
  20. imap_processing/lo/l1b/lo_l1b.py +111 -77
  21. imap_processing/lo/l1c/lo_l1c.py +10 -11
  22. imap_processing/lo/l2/lo_l2.py +43 -22
  23. imap_processing/mag/l1c/interpolation_methods.py +9 -1
  24. imap_processing/mag/l1c/mag_l1c.py +99 -45
  25. imap_processing/ultra/l1c/helio_pset.py +2 -2
  26. imap_processing/ultra/l1c/spacecraft_pset.py +7 -4
  27. imap_processing/ultra/l2/ultra_l2.py +51 -24
  28. imap_processing/ultra/utils/ultra_l1_utils.py +4 -4
  29. {imap_processing-0.19.3.dist-info → imap_processing-0.19.4.dist-info}/METADATA +1 -1
  30. {imap_processing-0.19.3.dist-info → imap_processing-0.19.4.dist-info}/RECORD +33 -33
  31. {imap_processing-0.19.3.dist-info → imap_processing-0.19.4.dist-info}/LICENSE +0 -0
  32. {imap_processing-0.19.3.dist-info → imap_processing-0.19.4.dist-info}/WHEEL +0 -0
  33. {imap_processing-0.19.3.dist-info → imap_processing-0.19.4.dist-info}/entry_points.txt +0 -0
@@ -3,9 +3,9 @@
3
3
  import logging
4
4
  from dataclasses import Field
5
5
  from pathlib import Path
6
- from typing import Any
7
6
 
8
7
  import numpy as np
8
+ import pandas as pd
9
9
  import xarray as xr
10
10
 
11
11
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
@@ -16,7 +16,11 @@ from imap_processing.lo.l1b.tof_conversions import (
16
16
  TOF2_CONV,
17
17
  TOF3_CONV,
18
18
  )
19
- from imap_processing.spice.geometry import SpiceFrame, instrument_pointing
19
+ from imap_processing.spice.geometry import (
20
+ SpiceFrame,
21
+ cartesian_to_latitudinal,
22
+ instrument_pointing,
23
+ )
20
24
  from imap_processing.spice.repoint import get_pointing_times
21
25
  from imap_processing.spice.spin import get_spin_number
22
26
  from imap_processing.spice.time import met_to_ttj2000ns, ttj2000ns_to_et
@@ -68,11 +72,6 @@ def lo_l1b(sci_dependencies: dict, anc_dependencies: list) -> list[Path]:
68
72
  avg_spin_durations_per_cycle = get_avg_spin_durations_per_cycle(
69
73
  acq_start, acq_end
70
74
  )
71
- # get spin angle (0 - 360 degrees) for each DE
72
- spin_angle = get_spin_angle(l1a_de)
73
- # calculate and set the spin bin based on the spin angle
74
- # spin bins are 0 - 60 bins
75
- l1b_de = set_spin_bin(l1b_de, spin_angle)
76
75
  # set the spin cycle for each direct event
77
76
  l1b_de = set_spin_cycle(pointing_start_met, l1a_de, l1b_de)
78
77
  # get spin start times for each event
@@ -100,13 +99,13 @@ def lo_l1b(sci_dependencies: dict, anc_dependencies: list) -> list[Path]:
100
99
  l1b_de = convert_tofs_to_eu(l1a_de, l1b_de, attr_mgr_l1a, attr_mgr_l1b)
101
100
  # set the species for each direct event
102
101
  l1b_de = identify_species(l1b_de)
103
- # set the badtimes
104
- l1b_de = set_bad_times(l1b_de)
105
102
  # set the pointing direction for each direct event
106
103
  l1b_de = set_pointing_direction(l1b_de)
107
104
  # calculate and set the pointing bin based on the spin phase
108
105
  # pointing bin is 3600 x 40 bins
109
106
  l1b_de = set_pointing_bin(l1b_de)
107
+ # set the badtimes
108
+ l1b_de = set_bad_times(l1b_de, anc_dependencies)
110
109
 
111
110
  return [l1b_de]
112
111
 
@@ -275,54 +274,6 @@ def get_avg_spin_durations_per_cycle(
275
274
  return avg_spin_durations_per_cycle
276
275
 
277
276
 
278
- def get_spin_angle(l1a_de: xr.Dataset) -> np.ndarray[np.float64] | Any:
279
- """
280
- Get the spin angle (0 - 360 degrees) for each DE.
281
-
282
- Parameters
283
- ----------
284
- l1a_de : xarray.Dataset
285
- The L1A DE dataset.
286
-
287
- Returns
288
- -------
289
- spin_angle : np.ndarray
290
- The spin angle for each DE.
291
- """
292
- de_times = l1a_de["de_time"].values
293
- # DE Time is 12 bit DN. The max possible value is 4096
294
- spin_angle = np.array(de_times / 4096 * 360, dtype=np.float64)
295
- return spin_angle
296
-
297
-
298
- def set_spin_bin(l1b_de: xr.Dataset, spin_angle: np.ndarray) -> xr.Dataset:
299
- """
300
- Set the spin bin (0 - 60 bins) for each Direct Event where each bin is 6 degrees.
301
-
302
- Parameters
303
- ----------
304
- l1b_de : xarray.Dataset
305
- The L1B Direct Event dataset.
306
- spin_angle : np.ndarray
307
- The spin angle (0-360 degrees) for each Direct Event.
308
-
309
- Returns
310
- -------
311
- l1b_de : xarray.Dataset
312
- The L1B DE dataset with the spin bin added.
313
- """
314
- # Get the spin bin for each DE
315
- # Spin bins are 0 - 60 where each bin is 6 degrees
316
- spin_bin = (spin_angle // 6).astype(int)
317
- l1b_de["spin_bin"] = xr.DataArray(
318
- spin_bin,
319
- dims=["epoch"],
320
- # TODO: Add spin angle to YAML file
321
- # attrs=attr_mgr.get_variable_attributes("spin_bin"),
322
- )
323
- return l1b_de
324
-
325
-
326
277
  def set_spin_cycle(
327
278
  pointing_start_met: float, l1a_de: xr.Dataset, l1b_de: xr.Dataset
328
279
  ) -> xr.Dataset:
@@ -734,7 +685,7 @@ def identify_species(l1b_de: xr.Dataset) -> xr.Dataset:
734
685
  return l1b_de
735
686
 
736
687
 
737
- def set_bad_times(l1b_de: xr.Dataset) -> xr.Dataset:
688
+ def set_bad_times(l1b_de: xr.Dataset, anc_dependencies: list) -> xr.Dataset:
738
689
  """
739
690
  Set the bad times for each direct event.
740
691
 
@@ -742,18 +693,27 @@ def set_bad_times(l1b_de: xr.Dataset) -> xr.Dataset:
742
693
  ----------
743
694
  l1b_de : xarray.Dataset
744
695
  The L1B DE dataset.
696
+ anc_dependencies : list
697
+ List of ancillary file paths.
745
698
 
746
699
  Returns
747
700
  -------
748
701
  l1b_de : xarray.Dataset
749
702
  The L1B DE dataset with the bad times added.
750
703
  """
751
- # Initialize all times as not bad for now
752
- # TODO: Update to set badtimes based on criteria that
753
- # will be defined in the algorithm document
704
+ badtimes_df = lo_ancillary.read_ancillary_file(
705
+ next(str(s) for s in anc_dependencies if "bad-times" in str(s))
706
+ )
707
+
708
+ esa_steps = l1b_de["esa_step"].values
709
+ epochs = l1b_de["epoch"].values
710
+ spin_bins = l1b_de["spin_bin"].values
711
+
712
+ badtimes = set_bad_or_goodtimes(badtimes_df, epochs, esa_steps, spin_bins)
713
+
754
714
  # 1 = badtime, 0 = not badtime
755
715
  l1b_de["badtimes"] = xr.DataArray(
756
- np.zeros(len(l1b_de["epoch"]), dtype=int),
716
+ badtimes,
757
717
  dims=["epoch"],
758
718
  # TODO: Add to yaml
759
719
  # attrs=attr_mgr.get_variable_attributes("bad_times"),
@@ -762,6 +722,65 @@ def set_bad_times(l1b_de: xr.Dataset) -> xr.Dataset:
762
722
  return l1b_de
763
723
 
764
724
 
725
+ def set_bad_or_goodtimes(
726
+ times_df: pd.DataFrame,
727
+ epochs: np.ndarray,
728
+ esa_steps: np.ndarray,
729
+ spin_bins: np.ndarray,
730
+ ) -> np.ndarray:
731
+ """
732
+ Find the good/bad time flags for each epoch based on the provided times DataFrame.
733
+
734
+ Parameters
735
+ ----------
736
+ times_df : pd.DataFrame
737
+ Good or Bad times dataframe containing time ranges and corresponding flags.
738
+ epochs : np.ndarray
739
+ Array of epochs in TTJ2000ns format.
740
+ esa_steps : np.ndarray
741
+ Array of ESA steps corresponding to each epoch.
742
+ spin_bins : np.ndarray
743
+ Array of spin bins corresponding to each epoch.
744
+
745
+ Returns
746
+ -------
747
+ time_flags : np.ndarray
748
+ Array of time good or bad time flags for each epoch.
749
+ """
750
+ if "BadTime_start" in times_df.columns and "BadTime_end" in times_df.columns:
751
+ times_start = met_to_ttj2000ns(times_df["BadTime_start"])
752
+ times_end = met_to_ttj2000ns(times_df["BadTime_end"])
753
+ elif "GoodTime_start" in times_df.columns and "GoodTime_end" in times_df.columns:
754
+ times_start = met_to_ttj2000ns(times_df["GoodTime_start"])
755
+ times_end = met_to_ttj2000ns(times_df["GoodTime_end"])
756
+ else:
757
+ raise ValueError("DataFrame must contain either BadTime or GoodTime columns.")
758
+
759
+ # Create masks for time and bin ranges using broadcasting
760
+ # the bin_start and bin_end are 6 degree bins and need to be converted to
761
+ # 0.1 degree bins to align with the spin_bins, so multiply by 60
762
+ time_mask = (epochs[:, None] >= times_start) & (epochs[:, None] <= times_end)
763
+ bin_mask = (spin_bins[:, None] >= times_df["bin_start"].values * 60) & (
764
+ spin_bins[:, None] <= times_df["bin_end"].values * 60
765
+ )
766
+
767
+ # Combined mask for epochs that fall within the time and bin ranges
768
+ combined_mask = time_mask & bin_mask
769
+
770
+ # Get the time flags for each epoch's esa_step from matching rows
771
+ time_flags = np.zeros(len(epochs), dtype=int)
772
+ for epoch_idx in range(len(epochs)):
773
+ matching_rows = np.where(combined_mask[epoch_idx])[0]
774
+ if len(matching_rows) > 0:
775
+ # Use the first matching row
776
+ row_idx = matching_rows[0]
777
+ esa_step = esa_steps[epoch_idx]
778
+ if f"E-Step{esa_step}" in times_df.columns:
779
+ time_flags[epoch_idx] = times_df[f"E-Step{esa_step}"].iloc[row_idx]
780
+
781
+ return time_flags
782
+
783
+
765
784
  def set_pointing_direction(l1b_de: xr.Dataset) -> xr.Dataset:
766
785
  """
767
786
  Set the pointing direction for each direct event.
@@ -782,22 +801,31 @@ def set_pointing_direction(l1b_de: xr.Dataset) -> xr.Dataset:
782
801
  """
783
802
  # Get the pointing bin for each DE
784
803
  et = ttj2000ns_to_et(l1b_de["epoch"])
785
-
786
- direction = instrument_pointing(et, SpiceFrame.IMAP_LO_BASE, SpiceFrame.IMAP_DPS)
804
+ # get the direction in HAE coordinates
805
+ direction = instrument_pointing(
806
+ et, SpiceFrame.IMAP_LO_BASE, SpiceFrame.IMAP_HAE, cartesian=True
807
+ )
787
808
  # TODO: Need to ask Lo what to do if a latitude is outside of the
788
809
  # +/-2 degree range. Is that possible?
789
- l1b_de["direction_lon"] = xr.DataArray(
810
+ l1b_de["hae_x"] = xr.DataArray(
790
811
  direction[:, 0],
791
812
  dims=["epoch"],
792
813
  # TODO: Add direction_lon to YAML file
793
- # attrs=attr_mgr.get_variable_attributes("direction_lon"),
814
+ # attrs=attr_mgr.get_variable_attributes("hae_x"),
794
815
  )
795
816
 
796
- l1b_de["direction_lat"] = xr.DataArray(
817
+ l1b_de["hae_y"] = xr.DataArray(
797
818
  direction[:, 1],
798
819
  dims=["epoch"],
799
820
  # TODO: Add direction_lat to YAML file
800
- # attrs=attr_mgr.get_variable_attributes("direction_lat"),
821
+ # attrs=attr_mgr.get_variable_attributes("hae_y"),
822
+ )
823
+
824
+ l1b_de["hae_z"] = xr.DataArray(
825
+ direction[:, 2],
826
+ dims=["epoch"],
827
+ # TODO: Add direction_lat to YAML file
828
+ # attrs=attr_mgr.get_variable_attributes("hae_z"),
801
829
  )
802
830
 
803
831
  return l1b_de
@@ -807,7 +835,7 @@ def set_pointing_bin(l1b_de: xr.Dataset) -> xr.Dataset:
807
835
  """
808
836
  Set the pointing bin for each direct event.
809
837
 
810
- The pointing bins are defined as 3600 bins for longitude and 40 bins for latitude.
838
+ The pointing bins are defined as 3600 bins for spin and 40 bins for off angle.
811
839
  Each bin is 0.1 degrees. The bins are defined as follows:
812
840
  Longitude bins: -180 to 180 degrees
813
841
  Latitude bins: -2 to 2 degrees
@@ -822,10 +850,16 @@ def set_pointing_bin(l1b_de: xr.Dataset) -> xr.Dataset:
822
850
  l1b_de : xarray.Dataset
823
851
  The L1B DE dataset with the pointing bins added.
824
852
  """
825
- # First column: latitudes
826
- lats = l1b_de["direction_lat"]
827
- # Second column: longitudes
828
- lons = l1b_de["direction_lon"]
853
+ x = l1b_de["hae_x"]
854
+ y = l1b_de["hae_y"]
855
+ z = l1b_de["hae_z"]
856
+ # convert the pointing direction to latitudinal coordinates
857
+ direction = cartesian_to_latitudinal(np.column_stack((x, y, z)))
858
+ # first column: radius (Not needed)
859
+ # second column: longitude
860
+ lons = direction[:, 1]
861
+ # third column: latitude
862
+ lats = direction[:, 2]
829
863
 
830
864
  # Define bin edges
831
865
  # 3600 bins, 0.1° each
@@ -838,18 +872,18 @@ def set_pointing_bin(l1b_de: xr.Dataset) -> xr.Dataset:
838
872
  lon_bins = np.digitize(lons, lon_bins) - 1
839
873
  lat_bins = np.digitize(lats, lat_bins) - 1
840
874
 
841
- l1b_de["pointing_bin_lon"] = xr.DataArray(
875
+ l1b_de["spin_bin"] = xr.DataArray(
842
876
  lon_bins,
843
877
  dims=["epoch"],
844
878
  # TODO: Add pointing_bin_lon to YAML file
845
- # attrs=attr_mgr.get_variable_attributes("pointing_bin_lon"),
879
+ # attrs=attr_mgr.get_variable_attributes("spin_bin"),
846
880
  )
847
881
 
848
- l1b_de["pointing_bin_lat"] = xr.DataArray(
882
+ l1b_de["off_angle_bin"] = xr.DataArray(
849
883
  lat_bins,
850
884
  dims=["epoch"],
851
885
  # TODO: Add point_bin_lat to YAML file
852
- # attrs=attr_mgr.get_variable_attributes("pointing_bin_lat"),
886
+ # attrs=attr_mgr.get_variable_attributes("spin_bin"),
853
887
  )
854
888
 
855
889
  return l1b_de
@@ -10,6 +10,7 @@ from scipy.stats import binned_statistic_dd
10
10
 
11
11
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
12
12
  from imap_processing.lo import lo_ancillary
13
+ from imap_processing.lo.l1b.lo_l1b import set_bad_or_goodtimes
13
14
  from imap_processing.spice.repoint import get_pointing_times
14
15
  from imap_processing.spice.spin import get_spin_number
15
16
  from imap_processing.spice.time import met_to_ttj2000ns, ttj2000ns_to_met
@@ -68,7 +69,7 @@ def lo_l1c(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
68
69
  logical_source = "imap_lo_l1c_pset"
69
70
  l1b_de = sci_dependencies["imap_lo_l1b_de"]
70
71
  l1b_goodtimes_only = filter_goodtimes(l1b_de, anc_dependencies)
71
-
72
+ # TODO: Need to handle case where no good times are found
72
73
  # Set the pointing start and end times based on the first epoch
73
74
  pointing_start_met, pointing_end_met = get_pointing_times(
74
75
  ttj2000ns_to_met(l1b_goodtimes_only["epoch"][0].item())
@@ -200,19 +201,17 @@ def filter_goodtimes(l1b_de: xr.Dataset, anc_dependencies: list) -> xr.Dataset:
200
201
  next(str(s) for s in anc_dependencies if "good-times" in str(s))
201
202
  )
202
203
 
203
- # convert goodtimes from MET to TTJ2000
204
- goodtimes_start = met_to_ttj2000ns(goodtimes_table_df["GoodTime_start"])
205
- goodtimes_end = met_to_ttj2000ns(goodtimes_table_df["GoodTime_end"])
206
-
207
- # Create a mask for epochs within any of the start/end time ranges
208
- goodtimes_mask = np.zeros_like(l1b_de["epoch"], dtype=bool)
204
+ esa_steps = l1b_de["esa_step"].values
205
+ epochs = l1b_de["epoch"].values
206
+ spin_bins = l1b_de["spin_bin"].values
209
207
 
210
- # Iterate over the good times and create a mask
211
- for start, end in zip(goodtimes_start, goodtimes_end, strict=False):
212
- goodtimes_mask |= (l1b_de["epoch"] >= start) & (l1b_de["epoch"] < end)
208
+ # Get array of bools for each epoch 1 = good time, 0 not good time
209
+ goodtimes_mask = set_bad_or_goodtimes(
210
+ goodtimes_table_df, epochs, esa_steps, spin_bins
211
+ )
213
212
 
214
213
  # Filter the dataset using the mask
215
- filtered_epochs = l1b_de.sel(epoch=goodtimes_mask)
214
+ filtered_epochs = l1b_de.sel(epoch=goodtimes_mask.astype(bool))
216
215
 
217
216
  return filtered_epochs
218
217
 
@@ -853,8 +853,8 @@ def calculate_bootstrap_corrections(dataset: xr.Dataset) -> xr.Dataset:
853
853
  """
854
854
  logger.info("Applying bootstrap corrections")
855
855
 
856
- # Table 3 bootstrap terms h_i,k
857
- bootstrap_factor = np.array(
856
+ # Table 3 bootstrap terms h_i,k - convert to xarray for better dimension handling
857
+ bootstrap_factor_array = np.array(
858
858
  [
859
859
  [0, 0.03, 0.01, 0, 0, 0, 0, 0],
860
860
  [0, 0, 0.05, 0.02, 0.01, 0, 0, 0],
@@ -865,6 +865,15 @@ def calculate_bootstrap_corrections(dataset: xr.Dataset) -> xr.Dataset:
865
865
  [0, 0, 0, 0, 0, 0, 0, 0.75],
866
866
  ]
867
867
  )
868
+ # Create xarray DataArray with named dimensions for proper broadcasting
869
+ bootstrap_factor = xr.DataArray(
870
+ bootstrap_factor_array,
871
+ dims=["energy_i", "energy_k"],
872
+ coords={
873
+ "energy_i": list(range(7)),
874
+ "energy_k": list(range(8)), # Include virtual channel 7 (index 7)
875
+ },
876
+ )
868
877
 
869
878
  # Equation 14
870
879
  bg_intensity = dataset["bg_rates"] / (
@@ -910,6 +919,12 @@ def calculate_bootstrap_corrections(dataset: xr.Dataset) -> xr.Dataset:
910
919
  dataset["bootstrap_intensity_sys_err"] = xr.zeros_like(dataset["ena_intensity"])
911
920
 
912
921
  for i in range(6, -1, -1):
922
+ # Create views for the current energy channel to avoid repeated indexing
923
+ bootstrap_intensity_i = dataset["bootstrap_intensity"][0, i, ...]
924
+ bootstrap_intensity_var_i = dataset["bootstrap_intensity_var"][0, i, ...]
925
+ j_c_prime_i = j_c_prime[0, i, ...]
926
+ j_c_prime_var_i = j_c_prime_var[0, i, ...]
927
+
913
928
  # Initialize the variable with the non-summation term and virtual
914
929
  # channel energy subtraction first, then iterate through the other
915
930
  # channels which can be looked up via indexing
@@ -917,31 +932,37 @@ def calculate_bootstrap_corrections(dataset: xr.Dataset) -> xr.Dataset:
917
932
  # included the k=8 term here.
918
933
  # NOTE: The paper uses 1-based indexing and we use 0-based indexing
919
934
  # so there is an off-by-one difference in the indices.
920
- dataset["bootstrap_intensity"][0, i, ...] = (
921
- j_c_prime[0, i, ...] - bootstrap_factor[i, 7] * j_8_b[0, ...]
935
+ bootstrap_intensity_i[:] = (
936
+ j_c_prime_i - bootstrap_factor.sel(energy_i=i, energy_k=7) * j_8_b[0, ...]
922
937
  )
923
938
  # NOTE: We will square root at the end to get the uncertainty, but
924
939
  # all equations are with variances
925
- dataset["bootstrap_intensity_var"][0, i, ...] = j_c_prime_var[0, i, ...]
926
-
927
- for k in range(i + 1, 7):
928
- logger.debug(
929
- f"Subtracting bootstrap factor h_{i},{k} * J_{k}_b from J_{i}_b"
930
- )
931
- # Subtraction terms from equations 18-23
932
- dataset["bootstrap_intensity"][0, i, ...] -= (
933
- bootstrap_factor[i, k] * dataset["bootstrap_intensity"][0, k, ...]
934
- )
935
-
936
- # Summation terms from equations 25-30
937
- dataset["bootstrap_intensity_var"][0, i, ...] += (
938
- bootstrap_factor[i, k] ** 2
939
- ) * dataset["bootstrap_intensity_var"][0, k, ...]
940
+ bootstrap_intensity_var_i[:] = j_c_prime_var_i
941
+
942
+ # Vectorized summation using xarray's built-in broadcasting
943
+ # Select the relevant k indices for summation (k = i+1 to 6)
944
+ k_indices = list(range(i + 1, 7))
945
+
946
+ # Get bootstrap factors for this i and the relevant k values
947
+ # Rename energy_k dimension to energy for alignment with intensity
948
+ bootstrap_factors_k = bootstrap_factor.sel(
949
+ energy_i=i, energy_k=k_indices
950
+ ).rename({"energy_k": "energy"})
951
+
952
+ # Get intensity slices - these will have an 'energy' dimension still
953
+ intensity_k = dataset["bootstrap_intensity"][0, k_indices, ...]
954
+ intensity_var_k = dataset["bootstrap_intensity_var"][0, k_indices, ...]
955
+
956
+ # Subtraction terms from equations 18-23 (xarray vectorized)
957
+ bootstrap_intensity_i -= (bootstrap_factors_k * intensity_k).sum(dim="energy")
958
+
959
+ # Summation terms from equations 25-30 (xarray vectorized)
960
+ bootstrap_intensity_var_i += (bootstrap_factors_k**2 * intensity_var_k).sum(
961
+ dim="energy"
962
+ )
940
963
 
941
964
  # Again zero any bootstrap fluxes that are negative
942
- dataset["bootstrap_intensity"][0, i, ...].values[
943
- dataset["bootstrap_intensity"][0, i, ...] < 0
944
- ] = 0.0
965
+ bootstrap_intensity_i.values[bootstrap_intensity_i < 0] = 0.0
945
966
 
946
967
  # Equation 31 - systematic error propagation for bootstrap intensity
947
968
  # Handle division by zero: only compute where j_c_prime > 0
@@ -229,11 +229,13 @@ def cic_filter(
229
229
  cic1 = cic1 / decimation_factor
230
230
  cic2 = np.convolve(cic1, cic1)
231
231
  delay = (len(cic2) - 1) // 2
232
+
232
233
  input_filtered = input_timestamps
234
+ vectors_filtered = lfilter(cic2, 1, input_vectors, axis=0)
233
235
  if delay != 0:
234
236
  input_filtered = input_timestamps[:-delay]
237
+ vectors_filtered = vectors_filtered[delay:]
235
238
 
236
- vectors_filtered = lfilter(cic2, 1, input_vectors, axis=0)[delay:]
237
239
  return input_filtered, vectors_filtered
238
240
 
239
241
 
@@ -270,6 +272,12 @@ def linear_filtered(
270
272
  Interpolated vectors of shape (m, 3) where m is equal to the number of output
271
273
  timestamps. Contains x, y, z components of the vector.
272
274
  """
275
+ if input_vectors.shape[0] != input_timestamps.shape[0]:
276
+ raise ValueError(
277
+ "Input vectors and input timestamps must have the same length. "
278
+ f"Got {input_vectors.shape[0]} and {input_timestamps.shape[0]}"
279
+ )
280
+
273
281
  input_filtered, vectors_filtered = cic_filter(
274
282
  input_vectors, input_timestamps, output_timestamps, input_rate, output_rate
275
283
  )