imap-processing 1.0.2__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +18 -0
  3. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +4 -4
  4. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +12 -2
  5. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +1 -8
  6. imap_processing/cdf/config/imap_idex_l1b_variable_attrs.yaml +16 -5
  7. imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +27 -25
  8. imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +16 -16
  9. imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +2 -2
  10. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +12 -0
  11. imap_processing/codice/codice_l1b.py +19 -1
  12. imap_processing/codice/codice_l2.py +61 -8
  13. imap_processing/ena_maps/ena_maps.py +2 -1
  14. imap_processing/hi/hi_l1c.py +24 -8
  15. imap_processing/hi/hi_l2.py +10 -0
  16. imap_processing/ialirt/calculate_ingest.py +19 -1
  17. imap_processing/ialirt/constants.py +12 -6
  18. imap_processing/ialirt/generate_coverage.py +3 -0
  19. imap_processing/lo/l1b/lo_l1b.py +12 -2
  20. imap_processing/lo/l1c/lo_l1c.py +3 -3
  21. imap_processing/lo/l2/lo_l2.py +95 -4
  22. imap_processing/ultra/l1c/helio_pset.py +10 -7
  23. imap_processing/ultra/l1c/spacecraft_pset.py +9 -7
  24. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +5 -8
  25. imap_processing/ultra/utils/ultra_l1_utils.py +6 -0
  26. {imap_processing-1.0.2.dist-info → imap_processing-1.0.3.dist-info}/METADATA +1 -1
  27. {imap_processing-1.0.2.dist-info → imap_processing-1.0.3.dist-info}/RECORD +30 -30
  28. {imap_processing-1.0.2.dist-info → imap_processing-1.0.3.dist-info}/LICENSE +0 -0
  29. {imap_processing-1.0.2.dist-info → imap_processing-1.0.3.dist-info}/WHEEL +0 -0
  30. {imap_processing-1.0.2.dist-info → imap_processing-1.0.3.dist-info}/entry_points.txt +0 -0
@@ -845,6 +845,7 @@ class AbstractSkyMap(ABC):
845
845
  raise KeyError(f"Value keys not found in pointing set: {missing_keys}")
846
846
 
847
847
  if pset_valid_mask is None:
848
+ logger.debug("No pset_valid_mask provided, using all pixels as valid.")
848
849
  pset_valid_mask = np.ones(pointing_set.num_points, dtype=bool)
849
850
 
850
851
  if index_match_method is IndexMatchMethod.PUSH:
@@ -906,7 +907,7 @@ class AbstractSkyMap(ABC):
906
907
  stacked_valid_mask = pset_valid_mask.stack(
907
908
  {CoordNames.GENERIC_PIXEL.value: pointing_set.spatial_coords}
908
909
  )
909
- pset_valid_mask_bc, _ = xr.broadcast(data_bc, stacked_valid_mask)
910
+ _, pset_valid_mask_bc = xr.broadcast(data_bc, stacked_valid_mask)
910
911
  pset_valid_mask_values = pset_valid_mask_bc.values
911
912
  else:
912
913
  pset_valid_mask_values = pset_valid_mask
@@ -29,11 +29,12 @@ from imap_processing.spice.geometry import (
29
29
  frame_transform,
30
30
  frame_transform_az_el,
31
31
  )
32
+ from imap_processing.spice.repoint import get_pointing_times
32
33
  from imap_processing.spice.spin import (
33
34
  get_instrument_spin_phase,
34
35
  get_spin_data,
35
36
  )
36
- from imap_processing.spice.time import ttj2000ns_to_et
37
+ from imap_processing.spice.time import met_to_ttj2000ns, ttj2000ns_to_et
37
38
 
38
39
  N_SPIN_BINS = 3600
39
40
  SPIN_PHASE_BIN_EDGES = np.linspace(0, 1, N_SPIN_BINS + 1)
@@ -101,14 +102,14 @@ def generate_pset_dataset(
101
102
  config_df = CalibrationProductConfig.from_csv(calibration_prod_config_path)
102
103
 
103
104
  pset_dataset = empty_pset_dataset(
104
- de_dataset.epoch.data[0],
105
+ de_dataset.ccsds_met.data.mean(),
105
106
  de_dataset.esa_energy_step,
106
107
  config_df.cal_prod_config.number_of_products,
107
108
  logical_source_parts["sensor"],
108
109
  )
109
- pset_et = ttj2000ns_to_et(pset_dataset.epoch.data[0])
110
110
  # Calculate and add despun_z, hae_latitude, and hae_longitude variables to
111
111
  # the pset_dataset
112
+ pset_et = ttj2000ns_to_et(pset_dataset.epoch.data[0])
112
113
  pset_dataset.update(pset_geometry(pset_et, logical_source_parts["sensor"]))
113
114
  # Bin the counts into the spin-bins
114
115
  pset_dataset.update(pset_counts(pset_dataset.coords, config_df, de_dataset))
@@ -121,15 +122,16 @@ def generate_pset_dataset(
121
122
 
122
123
 
123
124
  def empty_pset_dataset(
124
- epoch_val: int, l1b_energy_steps: xr.DataArray, n_cal_prods: int, sensor_str: str
125
+ l1b_met: float, l1b_energy_steps: xr.DataArray, n_cal_prods: int, sensor_str: str
125
126
  ) -> xr.Dataset:
126
127
  """
127
128
  Allocate an empty xarray.Dataset with appropriate pset coordinates.
128
129
 
129
130
  Parameters
130
131
  ----------
131
- epoch_val : int
132
- The starting epoch in J2000 TT nanoseconds for data in the PSET.
132
+ l1b_met : float
133
+ Any met from the input L1B DE dataset. This is used to query the
134
+ repoint-table data to get the start and end times of the pointing.
133
135
  l1b_energy_steps : xarray.DataArray
134
136
  The array of esa_energy_step data from the L1B DE product.
135
137
  n_cal_prods : int
@@ -148,13 +150,18 @@ def empty_pset_dataset(
148
150
 
149
151
  # preallocate coordinates xr.DataArrays
150
152
  coords = dict()
153
+
154
+ # Get the Pointing start and end times
155
+ pointing_mets = get_pointing_times(l1b_met)
156
+ epochs = met_to_ttj2000ns(np.asarray(pointing_mets))
157
+
151
158
  # epoch coordinate has only 1 entry for pointing set
152
159
  epoch_attrs = attr_mgr.get_variable_attributes("epoch", check_schema=False)
153
160
  epoch_attrs.update(
154
161
  attr_mgr.get_variable_attributes("hi_pset_epoch", check_schema=False)
155
162
  )
156
163
  coords["epoch"] = xr.DataArray(
157
- np.array([epoch_val], dtype=np.int64), # TODO: get dtype from cdf attrs?
164
+ np.array([epochs[0]], dtype=np.int64),
158
165
  name="epoch",
159
166
  dims=["epoch"],
160
167
  attrs=epoch_attrs,
@@ -201,6 +208,15 @@ def empty_pset_dataset(
201
208
 
202
209
  # Allocate the coordinate label variables
203
210
  data_vars = dict()
211
+ # Generate the epoch_delta variable
212
+ data_vars["epoch_delta"] = xr.DataArray(
213
+ np.diff(epochs),
214
+ name="epoch_delta",
215
+ dims=["epoch"],
216
+ attrs=attr_mgr.get_variable_attributes(
217
+ "hi_pset_epoch_delta", check_schema=False
218
+ ),
219
+ )
204
220
  # Generate label variables
205
221
  data_vars["esa_energy_step_label"] = xr.DataArray(
206
222
  coords["esa_energy_step"].values.astype(str),
@@ -257,7 +273,7 @@ def pset_geometry(pset_et: float, sensor_str: str) -> dict[str, xr.DataArray]:
257
273
  Returns
258
274
  -------
259
275
  geometry_vars : dict[str, xarray.DataArray]
260
- Keys are variable names and values are data arrays.
276
+ Keys are variable names, and values are data arrays.
261
277
  """
262
278
  geometry_vars = create_dataset_variables(
263
279
  ["despun_z"], (1, 3), att_manager_lookup_str="hi_pset_{0}"
@@ -156,8 +156,18 @@ def generate_hi_map(
156
156
  pset_valid_mask = None # Default to no mask (full spin)
157
157
  if descriptor.spin_phase == "ram":
158
158
  pset_valid_mask = pset.data["ram_mask"]
159
+ logger.debug(
160
+ f"Using ram mask with shape: {pset_valid_mask.shape} "
161
+ f"containing {np.prod(pset_valid_mask.shape)} pixels,"
162
+ f"{np.sum(pset_valid_mask.values)} of which are True."
163
+ )
159
164
  elif descriptor.spin_phase == "anti":
160
165
  pset_valid_mask = ~pset.data["ram_mask"]
166
+ logger.debug(
167
+ f"Using anti-ram mask with shape: {pset_valid_mask.shape} "
168
+ f"containing {np.prod(pset_valid_mask.shape)} pixels,"
169
+ f"{np.sum(pset_valid_mask.values)} of which are True."
170
+ )
161
171
 
162
172
  # Project (bin) the PSET variables into the map pixels
163
173
  output_map.project_pset_values_to_map(
@@ -9,7 +9,7 @@ from imap_processing.ialirt.constants import STATIONS
9
9
  logger = logging.getLogger(__name__)
10
10
 
11
11
 
12
- def find_tcp_connections(
12
+ def find_tcp_connections( # noqa: PLR0912
13
13
  start_file_creation: datetime,
14
14
  end_file_creation: datetime,
15
15
  lines: list,
@@ -35,8 +35,16 @@ def find_tcp_connections(
35
35
  Output dictionary with tcp connection info.
36
36
  """
37
37
  current_starts: dict[str, datetime | None] = {}
38
+ partners_opened = set()
38
39
 
39
40
  for line in lines:
41
+ # Note if this line appears.
42
+ if "Opened raw record file" in line:
43
+ station = line.split("Opened raw record file for ")[1].split(
44
+ " antenna_partner"
45
+ )[0]
46
+ partners_opened.add(station)
47
+
40
48
  if "antenna partner connection is" not in line:
41
49
  continue
42
50
 
@@ -84,6 +92,16 @@ def find_tcp_connections(
84
92
  }
85
93
  )
86
94
 
95
+ # Handle stations with only "Opened raw record file" (no up/down)
96
+ for station in partners_opened:
97
+ if not realtime_summary["connection_times"][station]:
98
+ realtime_summary["connection_times"][station].append(
99
+ {
100
+ "start": datetime.isoformat(start_file_creation),
101
+ "end": datetime.isoformat(end_file_creation),
102
+ }
103
+ )
104
+
87
105
  # Filter out connection windows that are completely outside the time window
88
106
  for station in realtime_summary["connection_times"]:
89
107
  realtime_summary["connection_times"][station] = [
@@ -53,12 +53,6 @@ class StationProperties(NamedTuple):
53
53
  # Verified by Kiel and KSWC Observatory staff.
54
54
  # Notes: the KSWC station is not yet operational,
55
55
  # but will have the following properties:
56
- # "KSWC": StationProperties(
57
- # longitude=126.2958, # degrees East
58
- # latitude=33.4273, # degrees North
59
- # altitude=0.1, # approx 100 meters
60
- # min_elevation_deg=5, # 5 degrees is the requirement
61
- # ),
62
56
  STATIONS = {
63
57
  "Kiel": StationProperties(
64
58
  longitude=10.1808, # degrees East
@@ -66,10 +60,22 @@ STATIONS = {
66
60
  altitude=0.1, # approx 100 meters
67
61
  min_elevation_deg=5, # 5 degrees is the requirement
68
62
  ),
63
+ "Korea": StationProperties(
64
+ longitude=126.2958, # degrees East
65
+ latitude=33.4273, # degrees North
66
+ altitude=0.1, # approx 100 meters
67
+ min_elevation_deg=5, # 5 degrees is the requirement
68
+ ),
69
69
  "Manaus": StationProperties(
70
70
  longitude=-59.969334, # degrees East (negative = West)
71
71
  latitude=-2.891257, # degrees North (negative = South)
72
72
  altitude=0.1, # approx 100 meters
73
73
  min_elevation_deg=5, # 5 degrees is the requirement
74
74
  ),
75
+ "SANSA": StationProperties(
76
+ longitude=27.714, # degrees East (negative = West)
77
+ latitude=-25.888, # degrees North (negative = South)
78
+ altitude=1.542, # approx 1542 meters
79
+ min_elevation_deg=2, # 5 degrees is the requirement
80
+ ),
75
81
  }
@@ -57,6 +57,9 @@ def generate_coverage(
57
57
 
58
58
  stations = {
59
59
  "Kiel": STATIONS["Kiel"],
60
+ "Korea": STATIONS["Korea"],
61
+ "Manaus": STATIONS["Manaus"],
62
+ "SANSA": STATIONS["SANSA"],
60
63
  }
61
64
  coverage_dict = {}
62
65
  outage_dict = {}
@@ -19,6 +19,7 @@ from imap_processing.lo.l1b.tof_conversions import (
19
19
  from imap_processing.spice.geometry import (
20
20
  SpiceFrame,
21
21
  cartesian_to_latitudinal,
22
+ frame_transform,
22
23
  instrument_pointing,
23
24
  )
24
25
  from imap_processing.spice.repoint import get_pointing_times
@@ -760,8 +761,10 @@ def set_bad_or_goodtimes(
760
761
  # the bin_start and bin_end are 6 degree bins and need to be converted to
761
762
  # 0.1 degree bins to align with the spin_bins, so multiply by 60
762
763
  time_mask = (epochs[:, None] >= times_start) & (epochs[:, None] <= times_end)
764
+ # The ancillary file binning uses 0-59 for the 6 degree bins, so add 1 to bin_end
765
+ # so the upper bound is inclusive of the full bin range.
763
766
  bin_mask = (spin_bins[:, None] >= times_df["bin_start"].values * 60) & (
764
- spin_bins[:, None] <= times_df["bin_end"].values * 60
767
+ spin_bins[:, None] < (times_df["bin_end"].values + 1) * 60
765
768
  )
766
769
 
767
770
  # Combined mask for epochs that fall within the time and bin ranges
@@ -853,8 +856,15 @@ def set_pointing_bin(l1b_de: xr.Dataset) -> xr.Dataset:
853
856
  x = l1b_de["hae_x"]
854
857
  y = l1b_de["hae_y"]
855
858
  z = l1b_de["hae_z"]
859
+ # Convert from HAE to DPS coordinates
860
+ dps_xyz = frame_transform(
861
+ ttj2000ns_to_et(l1b_de["epoch"]),
862
+ np.column_stack((x, y, z)),
863
+ SpiceFrame.IMAP_HAE,
864
+ SpiceFrame.IMAP_DPS,
865
+ )
856
866
  # convert the pointing direction to latitudinal coordinates
857
- direction = cartesian_to_latitudinal(np.column_stack((x, y, z)))
867
+ direction = cartesian_to_latitudinal(dps_xyz)
858
868
  # first column: radius (Not needed)
859
869
  # second column: longitude
860
870
  lons = direction[:, 1]
@@ -295,8 +295,8 @@ def create_pset_counts(
295
295
  data = np.column_stack(
296
296
  (
297
297
  de_filtered["esa_step"],
298
- de_filtered["pointing_bin_lon"],
299
- de_filtered["pointing_bin_lat"],
298
+ de_filtered["spin_bin"],
299
+ de_filtered["off_angle_bin"],
300
300
  )
301
301
  )
302
302
  # Create the histogram with 3600 longitude bins, 40 latitude bins, and 7 energy bins
@@ -341,7 +341,7 @@ def calculate_exposure_times(counts: xr.DataArray, l1b_de: xr.Dataset) -> xr.Dat
341
341
  The exposure times for the L1B Direct Event dataset.
342
342
  """
343
343
  data = np.column_stack(
344
- (l1b_de["esa_step"], l1b_de["pointing_bin_lon"], l1b_de["pointing_bin_lat"])
344
+ (l1b_de["esa_step"], l1b_de["spin_bin"], l1b_de["off_angle_bin"])
345
345
  )
346
346
 
347
347
  result = binned_statistic_dd(
@@ -10,6 +10,7 @@ import xarray as xr
10
10
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
11
11
  from imap_processing.ena_maps import ena_maps
12
12
  from imap_processing.ena_maps.ena_maps import AbstractSkyMap, RectangularSkyMap
13
+ from imap_processing.ena_maps.utils.corrections import PowerLawFluxCorrector
13
14
  from imap_processing.ena_maps.utils.naming import MapDescriptor
14
15
  from imap_processing.lo import lo_ancillary
15
16
  from imap_processing.spice.time import et_to_datetime64, ttj2000ns_to_et
@@ -77,7 +78,13 @@ def lo_l2(
77
78
  logger.info("Step 4: Calculating rates and intensities")
78
79
 
79
80
  # Determine if corrections are needed and prepare oxygen data if required
80
- sputtering_correction, bootstrap_correction, o_map_dataset = _prepare_corrections(
81
+ (
82
+ sputtering_correction,
83
+ bootstrap_correction,
84
+ flux_correction,
85
+ o_map_dataset,
86
+ flux_factors,
87
+ ) = _prepare_corrections(
81
88
  map_descriptor, descriptor, sci_dependencies, anc_dependencies
82
89
  )
83
90
 
@@ -85,7 +92,9 @@ def lo_l2(
85
92
  dataset,
86
93
  sputtering_correction=sputtering_correction,
87
94
  bootstrap_correction=bootstrap_correction,
95
+ flux_correction=flux_correction,
88
96
  o_map_dataset=o_map_dataset,
97
+ flux_factors=flux_factors,
89
98
  )
90
99
 
91
100
  logger.info("Step 5: Finalizing dataset with attributes")
@@ -100,7 +109,7 @@ def _prepare_corrections(
100
109
  descriptor: str,
101
110
  sci_dependencies: dict,
102
111
  anc_dependencies: list,
103
- ) -> tuple[bool, bool, xr.Dataset | None]:
112
+ ) -> tuple[bool, bool, bool, xr.Dataset | None, Path | None]:
104
113
  """
105
114
  Determine what corrections are needed and prepare oxygen dataset if required.
106
115
 
@@ -130,7 +139,9 @@ def _prepare_corrections(
130
139
  # Default values - no corrections needed
131
140
  sputtering_correction = False
132
141
  bootstrap_correction = False
142
+ flux_correction = False
133
143
  o_map_dataset = None
144
+ flux_factors: None | Path = None
134
145
 
135
146
  # Sputtering and bootstrap corrections are only applied to hydrogen ENA data
136
147
  # Guard against recursion: don't process oxygen for oxygen maps
@@ -145,7 +156,24 @@ def _prepare_corrections(
145
156
  sputtering_correction = True
146
157
  bootstrap_correction = True
147
158
 
148
- return sputtering_correction, bootstrap_correction, o_map_dataset
159
+ if "raw" not in map_descriptor.principal_data:
160
+ flux_correction = True
161
+ try:
162
+ flux_factors = next(
163
+ x for x in anc_dependencies if "esa-eta-fit-factors" in str(x)
164
+ )
165
+ except StopIteration:
166
+ raise ValueError(
167
+ "No flux correction factor file found in ancillary dependencies"
168
+ ) from None
169
+
170
+ return (
171
+ sputtering_correction,
172
+ bootstrap_correction,
173
+ flux_correction,
174
+ o_map_dataset,
175
+ flux_factors,
176
+ )
149
177
 
150
178
 
151
179
  # =============================================================================
@@ -664,7 +692,9 @@ def calculate_all_rates_and_intensities(
664
692
  dataset: xr.Dataset,
665
693
  sputtering_correction: bool = False,
666
694
  bootstrap_correction: bool = False,
695
+ flux_correction: bool = False,
667
696
  o_map_dataset: xr.Dataset | None = None,
697
+ flux_factors: Path | None = None,
668
698
  ) -> xr.Dataset:
669
699
  """
670
700
  Calculate rates and intensities with proper error propagation.
@@ -679,8 +709,13 @@ def calculate_all_rates_and_intensities(
679
709
  bootstrap_correction : bool, optional
680
710
  Whether to apply bootstrap corrections to intensities.
681
711
  Default is False.
712
+ flux_correction : bool, optional
713
+ Whether to apply flux corrections to intensities.
714
+ Default is False.
682
715
  o_map_dataset : xr.Dataset, optional
683
716
  Dataset specifically for oxygen, needed for sputtering corrections.
717
+ flux_factors : Path, optional
718
+ Path to flux factor file for flux corrections.
684
719
 
685
720
  Returns
686
721
  -------
@@ -705,7 +740,13 @@ def calculate_all_rates_and_intensities(
705
740
  if bootstrap_correction:
706
741
  dataset = calculate_bootstrap_corrections(dataset)
707
742
 
708
- # Step 6: Clean up intermediate variables
743
+ # Optional Step 6: Calculate flux corrections
744
+ if flux_correction:
745
+ if flux_factors is None:
746
+ raise ValueError("Flux factors file must be provided for flux corrections")
747
+ dataset = calculate_flux_corrections(dataset, flux_factors)
748
+
749
+ # Step 7: Clean up intermediate variables
709
750
  dataset = cleanup_intermediate_variables(dataset)
710
751
 
711
752
  return dataset
@@ -1084,6 +1125,56 @@ def calculate_bootstrap_corrections(dataset: xr.Dataset) -> xr.Dataset:
1084
1125
  return dataset
1085
1126
 
1086
1127
 
1128
+ def calculate_flux_corrections(dataset: xr.Dataset, flux_factors: Path) -> xr.Dataset:
1129
+ """
1130
+ Calculate flux corrections for intensities.
1131
+
1132
+ Uses the shared ena maps ``PowerLawFluxCorrector`` class to do the
1133
+ correction calculations.
1134
+
1135
+ Parameters
1136
+ ----------
1137
+ dataset : xr.Dataset
1138
+ Dataset with count rates, geometric factors, and center energies.
1139
+ flux_factors : Path
1140
+ Path to the eta flux factor file to use for corrections. Read in as
1141
+ an ancillary file in the preprocessing step.
1142
+
1143
+ Returns
1144
+ -------
1145
+ xr.Dataset
1146
+ Dataset with calculated flux-corrected intensities and their
1147
+ uncertainties for the specified species.
1148
+ """
1149
+ logger.info("Applying flux corrections")
1150
+
1151
+ # Flux correction
1152
+ corrector = PowerLawFluxCorrector(flux_factors)
1153
+ # FluxCorrector works on (energy, :) arrays, so we need to flatten the map
1154
+ # spatial dimensions for the correction and then reshape back after.
1155
+ input_shape = dataset["ena_intensity"].shape[1:] # Exclude epoch dimension
1156
+ intensity = dataset["ena_intensity"].values[0].reshape(len(dataset["energy"]), -1)
1157
+ stat_uncert = (
1158
+ dataset["ena_intensity_stat_uncert"]
1159
+ .values[0]
1160
+ .reshape(len(dataset["energy"]), -1)
1161
+ )
1162
+ corrected_intensity, corrected_stat_unc = corrector.apply_flux_correction(
1163
+ intensity,
1164
+ stat_uncert,
1165
+ dataset["energy"].data,
1166
+ )
1167
+ # Add the size 1 epoch dimension back in to the corrected fluxes.
1168
+ dataset["ena_intensity"].data = corrected_intensity.reshape(input_shape)[
1169
+ np.newaxis, ...
1170
+ ]
1171
+ dataset["ena_intensity_stat_uncert"].data = corrected_stat_unc.reshape(input_shape)[
1172
+ np.newaxis, ...
1173
+ ]
1174
+
1175
+ return dataset
1176
+
1177
+
1087
1178
  def cleanup_intermediate_variables(dataset: xr.Dataset) -> xr.Dataset:
1088
1179
  """
1089
1180
  Remove intermediate variables that were only needed for calculations.
@@ -128,8 +128,8 @@ def calculate_helio_pset(
128
128
  healpix = np.arange(n_pix)
129
129
 
130
130
  # Get midpoint timestamp for pointing.
131
- pointing_start, pointing_stop = get_pointing_times(
132
- et_to_met(species_dataset["event_times"].data[0])
131
+ pointing_range_met = get_pointing_times(
132
+ et_to_met(species_dataset["event_times"].mean())
133
133
  )
134
134
  logger.info("Calculating spacecraft exposure times with deadtime correction.")
135
135
  exposure_time, deadtime_ratios = get_spacecraft_exposure_times(
@@ -137,8 +137,7 @@ def calculate_helio_pset(
137
137
  params_dataset,
138
138
  pixels_below_scattering,
139
139
  boundary_scale_factors,
140
- pointing_start,
141
- pointing_stop,
140
+ pointing_range_met,
142
141
  n_pix=n_pix,
143
142
  )
144
143
  logger.info("Calculating spun efficiencies and geometric function.")
@@ -164,7 +163,7 @@ def calculate_helio_pset(
164
163
  nside=nside,
165
164
  )
166
165
 
167
- mid_time = ttj2000ns_to_et(met_to_ttj2000ns((pointing_start + pointing_stop) / 2))
166
+ mid_time = ttj2000ns_to_et(met_to_ttj2000ns((np.sum(pointing_range_met)) / 2))
168
167
 
169
168
  logger.info("Adjusting data for helio frame.")
170
169
  exposure_time, _efficiency, geometric_function = get_helio_adjusted_data(
@@ -191,9 +190,13 @@ def calculate_helio_pset(
191
190
  helio_pset_quality_flags,
192
191
  nside=nside,
193
192
  )
194
- pointing_start = met_to_ttj2000ns(pointing_start)
193
+ # Convert pointing start and end time to ttj2000ns
194
+ pointing_range_ns = met_to_ttj2000ns(pointing_range_met)
195
195
  # Epoch should be the start of the pointing
196
- pset_dict["epoch"] = np.atleast_1d(pointing_start).astype(np.int64)
196
+ pset_dict["epoch"] = np.atleast_1d(pointing_range_ns[0]).astype(np.int64)
197
+ pset_dict["epoch_delta"] = np.atleast_1d(np.diff(pointing_range_ns)).astype(
198
+ np.int64
199
+ )
197
200
  pset_dict["counts"] = counts[np.newaxis, ...]
198
201
  pset_dict["latitude"] = latitude[np.newaxis, ...]
199
202
  pset_dict["longitude"] = longitude[np.newaxis, ...]
@@ -138,8 +138,8 @@ def calculate_spacecraft_pset(
138
138
  sensitivity = efficiencies * geometric_function
139
139
 
140
140
  # Get the start and stop times of the pointing period
141
- pointing_start, pointing_stop = get_pointing_times(
142
- float(et_to_met(species_dataset["event_times"].data[0]))
141
+ pointing_range_met = get_pointing_times(
142
+ float(et_to_met(species_dataset["event_times"].mean()))
143
143
  )
144
144
  # Calculate exposure times
145
145
  logger.info("Calculating spacecraft exposure times with deadtime correction.")
@@ -148,8 +148,7 @@ def calculate_spacecraft_pset(
148
148
  params_dataset,
149
149
  pixels_below_scattering,
150
150
  boundary_scale_factors,
151
- pointing_start,
152
- pointing_stop,
151
+ pointing_range_met,
153
152
  n_pix=n_pix,
154
153
  )
155
154
  logger.info("Calculating background rates.")
@@ -179,10 +178,13 @@ def calculate_spacecraft_pset(
179
178
  spacecraft_pset_quality_flags,
180
179
  nside=nside,
181
180
  )
182
- # Convert pointing start time to ttj2000ns
183
- pointing_start = met_to_ttj2000ns(pointing_start)
181
+ # Convert pointing start and end time to ttj2000ns
182
+ pointing_range_ns = met_to_ttj2000ns(pointing_range_met)
184
183
  # Epoch should be the start of the pointing
185
- pset_dict["epoch"] = np.atleast_1d(pointing_start).astype(np.int64)
184
+ pset_dict["epoch"] = np.atleast_1d(pointing_range_ns[0]).astype(np.int64)
185
+ pset_dict["epoch_delta"] = np.atleast_1d(np.diff(pointing_range_ns)).astype(
186
+ np.int64
187
+ )
186
188
  pset_dict["counts"] = counts[np.newaxis, ...]
187
189
  pset_dict["latitude"] = latitude[np.newaxis, ...]
188
190
  pset_dict["longitude"] = longitude[np.newaxis, ...]
@@ -415,8 +415,7 @@ def get_spacecraft_exposure_times(
415
415
  params_dataset: xr.Dataset,
416
416
  pixels_below_scattering: list[list],
417
417
  boundary_scale_factors: NDArray,
418
- pointing_start_met: float,
419
- pointing_stop_met: float,
418
+ pointing_range_met: tuple[float, float],
420
419
  n_pix: int,
421
420
  ) -> tuple[NDArray, NDArray]:
422
421
  """
@@ -435,10 +434,8 @@ def get_spacecraft_exposure_times(
435
434
  below the FWHM scattering threshold.
436
435
  boundary_scale_factors : np.ndarray
437
436
  Boundary scale factors for each pixel at each spin phase.
438
- pointing_start_met : float
439
- Start time of the pointing period in mission elapsed time.
440
- pointing_stop_met : float
441
- Stop time of the pointing period in mission elapsed time.
437
+ pointing_range_met : tuple
438
+ Start and stop time of the pointing period in mission elapsed time.
442
439
  n_pix : int
443
440
  Number of HEALPix pixels.
444
441
 
@@ -465,8 +462,8 @@ def get_spacecraft_exposure_times(
465
462
  spin_data = get_spin_data()
466
463
  # Filter for spins only in pointing
467
464
  spin_data = spin_data[
468
- (spin_data["spin_start_met"] >= pointing_start_met)
469
- & (spin_data["spin_start_met"] <= pointing_stop_met)
465
+ (spin_data["spin_start_met"] >= pointing_range_met[0])
466
+ & (spin_data["spin_start_met"] <= pointing_range_met[1])
470
467
  ]
471
468
  # Get only valid spin data
472
469
  valid_mask = (spin_data["spin_phase_valid"].values == 1) & (
@@ -102,6 +102,12 @@ def create_dataset( # noqa: PLR0912
102
102
  "spin_phase_step",
103
103
  ]:
104
104
  continue
105
+ elif key == "epoch_delta":
106
+ dataset[key] = xr.DataArray(
107
+ data,
108
+ dims=["epoch"],
109
+ attrs=cdf_manager.get_variable_attributes(key, check_schema=False),
110
+ )
105
111
  elif key in velocity_keys:
106
112
  dataset[key] = xr.DataArray(
107
113
  data,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: imap-processing
3
- Version: 1.0.2
3
+ Version: 1.0.3
4
4
  Summary: IMAP Science Operations Center Processing
5
5
  License: MIT
6
6
  Keywords: IMAP,SDC,SOC,Science Operations