imap-processing 0.18.0__py3-none-any.whl → 0.19.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (122) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/ancillary/ancillary_dataset_combiner.py +161 -1
  3. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +6 -0
  4. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +221 -1057
  5. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +307 -283
  6. imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +1044 -203
  7. imap_processing/cdf/config/imap_constant_attrs.yaml +4 -2
  8. imap_processing/cdf/config/imap_enamaps_l2-common_variable_attrs.yaml +11 -0
  9. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +15 -1
  10. imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +5 -0
  11. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +10 -4
  12. imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +33 -4
  13. imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +8 -91
  14. imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +106 -16
  15. imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +5 -4
  16. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +4 -15
  17. imap_processing/cdf/config/imap_lo_l1c_variable_attrs.yaml +189 -98
  18. imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +85 -2
  19. imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +24 -1
  20. imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +20 -8
  21. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +45 -35
  22. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +110 -7
  23. imap_processing/cli.py +138 -93
  24. imap_processing/codice/codice_l0.py +2 -1
  25. imap_processing/codice/codice_l1a.py +167 -69
  26. imap_processing/codice/codice_l1b.py +42 -32
  27. imap_processing/codice/codice_l2.py +215 -9
  28. imap_processing/codice/constants.py +790 -603
  29. imap_processing/codice/data/lo_stepping_values.csv +1 -1
  30. imap_processing/decom.py +1 -4
  31. imap_processing/ena_maps/ena_maps.py +71 -43
  32. imap_processing/ena_maps/utils/corrections.py +291 -0
  33. imap_processing/ena_maps/utils/map_utils.py +20 -4
  34. imap_processing/ena_maps/utils/naming.py +8 -2
  35. imap_processing/glows/ancillary/imap_glows_exclusions-by-instr-team_20250923_v002.dat +10 -0
  36. imap_processing/glows/ancillary/imap_glows_map-of-excluded-regions_20250923_v002.dat +393 -0
  37. imap_processing/glows/ancillary/imap_glows_map-of-uv-sources_20250923_v002.dat +593 -0
  38. imap_processing/glows/ancillary/imap_glows_pipeline-settings_20250923_v002.json +54 -0
  39. imap_processing/glows/ancillary/imap_glows_suspected-transients_20250923_v002.dat +10 -0
  40. imap_processing/glows/l1b/glows_l1b.py +123 -18
  41. imap_processing/glows/l1b/glows_l1b_data.py +358 -47
  42. imap_processing/glows/l2/glows_l2.py +11 -0
  43. imap_processing/hi/hi_l1a.py +124 -3
  44. imap_processing/hi/hi_l1b.py +154 -71
  45. imap_processing/hi/hi_l1c.py +4 -109
  46. imap_processing/hi/hi_l2.py +104 -60
  47. imap_processing/hi/utils.py +262 -8
  48. imap_processing/hit/l0/constants.py +3 -0
  49. imap_processing/hit/l0/decom_hit.py +3 -6
  50. imap_processing/hit/l1a/hit_l1a.py +311 -21
  51. imap_processing/hit/l1b/hit_l1b.py +54 -126
  52. imap_processing/hit/l2/hit_l2.py +6 -6
  53. imap_processing/ialirt/calculate_ingest.py +219 -0
  54. imap_processing/ialirt/constants.py +12 -2
  55. imap_processing/ialirt/generate_coverage.py +15 -2
  56. imap_processing/ialirt/l0/ialirt_spice.py +6 -2
  57. imap_processing/ialirt/l0/parse_mag.py +293 -42
  58. imap_processing/ialirt/l0/process_hit.py +5 -3
  59. imap_processing/ialirt/l0/process_swapi.py +41 -25
  60. imap_processing/ialirt/process_ephemeris.py +70 -14
  61. imap_processing/ialirt/utils/create_xarray.py +1 -1
  62. imap_processing/idex/idex_l0.py +2 -2
  63. imap_processing/idex/idex_l1a.py +2 -3
  64. imap_processing/idex/idex_l1b.py +2 -3
  65. imap_processing/idex/idex_l2a.py +130 -4
  66. imap_processing/idex/idex_l2b.py +158 -143
  67. imap_processing/idex/idex_utils.py +1 -3
  68. imap_processing/lo/ancillary_data/imap_lo_hydrogen-geometric-factor_v001.csv +75 -0
  69. imap_processing/lo/ancillary_data/imap_lo_oxygen-geometric-factor_v001.csv +75 -0
  70. imap_processing/lo/l0/lo_science.py +25 -24
  71. imap_processing/lo/l1b/lo_l1b.py +93 -19
  72. imap_processing/lo/l1c/lo_l1c.py +273 -93
  73. imap_processing/lo/l2/lo_l2.py +949 -135
  74. imap_processing/lo/lo_ancillary.py +55 -0
  75. imap_processing/mag/l1a/mag_l1a.py +1 -0
  76. imap_processing/mag/l1a/mag_l1a_data.py +26 -0
  77. imap_processing/mag/l1b/mag_l1b.py +3 -2
  78. imap_processing/mag/l1c/interpolation_methods.py +14 -15
  79. imap_processing/mag/l1c/mag_l1c.py +23 -6
  80. imap_processing/mag/l1d/mag_l1d.py +57 -14
  81. imap_processing/mag/l1d/mag_l1d_data.py +202 -32
  82. imap_processing/mag/l2/mag_l2.py +2 -0
  83. imap_processing/mag/l2/mag_l2_data.py +14 -5
  84. imap_processing/quality_flags.py +23 -1
  85. imap_processing/spice/geometry.py +89 -39
  86. imap_processing/spice/pointing_frame.py +4 -8
  87. imap_processing/spice/repoint.py +78 -2
  88. imap_processing/spice/spin.py +28 -8
  89. imap_processing/spice/time.py +12 -22
  90. imap_processing/swapi/l1/swapi_l1.py +10 -4
  91. imap_processing/swapi/l2/swapi_l2.py +15 -17
  92. imap_processing/swe/l1b/swe_l1b.py +1 -2
  93. imap_processing/ultra/constants.py +30 -24
  94. imap_processing/ultra/l0/ultra_utils.py +9 -11
  95. imap_processing/ultra/l1a/ultra_l1a.py +1 -2
  96. imap_processing/ultra/l1b/badtimes.py +35 -11
  97. imap_processing/ultra/l1b/de.py +95 -31
  98. imap_processing/ultra/l1b/extendedspin.py +31 -16
  99. imap_processing/ultra/l1b/goodtimes.py +112 -0
  100. imap_processing/ultra/l1b/lookup_utils.py +281 -28
  101. imap_processing/ultra/l1b/quality_flag_filters.py +10 -1
  102. imap_processing/ultra/l1b/ultra_l1b.py +7 -7
  103. imap_processing/ultra/l1b/ultra_l1b_culling.py +169 -7
  104. imap_processing/ultra/l1b/ultra_l1b_extended.py +311 -69
  105. imap_processing/ultra/l1c/helio_pset.py +139 -37
  106. imap_processing/ultra/l1c/l1c_lookup_utils.py +289 -0
  107. imap_processing/ultra/l1c/spacecraft_pset.py +140 -29
  108. imap_processing/ultra/l1c/ultra_l1c.py +33 -24
  109. imap_processing/ultra/l1c/ultra_l1c_culling.py +92 -0
  110. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +400 -292
  111. imap_processing/ultra/l2/ultra_l2.py +54 -11
  112. imap_processing/ultra/utils/ultra_l1_utils.py +37 -7
  113. imap_processing/utils.py +3 -4
  114. {imap_processing-0.18.0.dist-info → imap_processing-0.19.2.dist-info}/METADATA +2 -2
  115. {imap_processing-0.18.0.dist-info → imap_processing-0.19.2.dist-info}/RECORD +118 -109
  116. imap_processing/idex/idex_l2c.py +0 -84
  117. imap_processing/spice/kernels.py +0 -187
  118. imap_processing/ultra/l1b/cullingmask.py +0 -87
  119. imap_processing/ultra/l1c/histogram.py +0 -36
  120. {imap_processing-0.18.0.dist-info → imap_processing-0.19.2.dist-info}/LICENSE +0 -0
  121. {imap_processing-0.18.0.dist-info → imap_processing-0.19.2.dist-info}/WHEEL +0 -0
  122. {imap_processing-0.18.0.dist-info → imap_processing-0.19.2.dist-info}/entry_points.txt +0 -0
@@ -8,22 +8,20 @@ Reference: https://spiceypy.readthedocs.io/en/main/documentation.html.
8
8
 
9
9
  import logging
10
10
  import typing
11
- from typing import Union
11
+ from datetime import datetime, timedelta
12
12
 
13
13
  import numpy as np
14
14
  import spiceypy
15
15
  from numpy import ndarray
16
16
 
17
+ from imap_processing.ialirt.constants import STATIONS
17
18
  from imap_processing.spice.geometry import SpiceBody, SpiceFrame, imap_state
18
- from imap_processing.spice.kernels import ensure_spice
19
19
  from imap_processing.spice.time import et_to_utc, str_to_et
20
20
 
21
21
  # Logger setup
22
22
  logger = logging.getLogger(__name__)
23
23
 
24
24
 
25
- @typing.no_type_check
26
- @ensure_spice
27
25
  def latitude_longitude_to_ecef(
28
26
  longitude: float, latitude: float, altitude: float
29
27
  ) -> ndarray:
@@ -68,12 +66,11 @@ def latitude_longitude_to_ecef(
68
66
 
69
67
 
70
68
  @typing.no_type_check
71
- @ensure_spice
72
69
  def calculate_azimuth_and_elevation(
73
70
  longitude: float,
74
71
  latitude: float,
75
72
  altitude: float,
76
- observation_time: Union[float, np.ndarray],
73
+ observation_time: float | np.ndarray,
77
74
  target: str = SpiceBody.IMAP.name,
78
75
  ) -> tuple:
79
76
  """
@@ -137,8 +134,8 @@ def calculate_doppler(
137
134
  longitude: float,
138
135
  latitude: float,
139
136
  altitude: float,
140
- observation_time: Union[float, np.ndarray],
141
- ) -> Union[float, ndarray[float]]:
137
+ observation_time: float | np.ndarray,
138
+ ) -> float | ndarray[float]:
142
139
  """
143
140
  Calculate the doppler velocity.
144
141
 
@@ -194,7 +191,7 @@ def build_output(
194
191
  latitude: float,
195
192
  altitude: float,
196
193
  time_endpoints: tuple[str, str],
197
- time_step: float,
194
+ time_step: float = 60,
198
195
  ) -> dict[str, np.ndarray]:
199
196
  """
200
197
  Build the output dictionary containing time, azimuth, elevation, and doppler.
@@ -210,7 +207,7 @@ def build_output(
210
207
  time_endpoints : tuple[str, str]
211
208
  Start and stop times in UTC.
212
209
  time_step : float
213
- Seconds between data points.
210
+ Seconds between data points. Default is 60.
214
211
 
215
212
  Returns
216
213
  -------
@@ -230,10 +227,10 @@ def build_output(
230
227
  )
231
228
 
232
229
  output_dict["time"] = et_to_utc(time_range, format_str="ISOC")
233
- output_dict["azimuth"] = azimuth
234
- output_dict["elevation"] = elevation
235
- output_dict["doppler"] = calculate_doppler(
236
- longitude, latitude, altitude, time_range
230
+ output_dict["azimuth"] = np.round(azimuth, 6)
231
+ output_dict["elevation"] = np.round(elevation, 6)
232
+ output_dict["doppler"] = np.round(
233
+ calculate_doppler(longitude, latitude, altitude, time_range), 6
237
234
  )
238
235
 
239
236
  logger.info(
@@ -242,3 +239,62 @@ def build_output(
242
239
  )
243
240
 
244
241
  return output_dict
242
+
243
+
244
+ def generate_text_files(station: str, day: str) -> list[str]:
245
+ """
246
+ Generate a pointing schedule text file and return it as a list of strings.
247
+
248
+ Parameters
249
+ ----------
250
+ station : str
251
+ Station name.
252
+ day : str
253
+ The day for which to generate a pointing schedule, in ISO format.
254
+ Ex: "2025-08-11".
255
+
256
+ Returns
257
+ -------
258
+ lines : list[str]
259
+ A list of strings that makeup the lines of a pointing schedule file.
260
+ """
261
+ station_properties = STATIONS[station]
262
+
263
+ day_as_datetime = datetime.fromisoformat(day)
264
+ time_endpoints = (
265
+ datetime.strftime(day_as_datetime, "%Y-%m-%d %H:%M:%S"),
266
+ datetime.strftime(day_as_datetime + timedelta(days=1), "%Y-%m-%d %H:%M:%S"),
267
+ )
268
+ output_dict = build_output(
269
+ station_properties[0],
270
+ station_properties[1],
271
+ station_properties[2],
272
+ time_endpoints,
273
+ )
274
+
275
+ lines = [
276
+ f"Station: {station}\n",
277
+ "Target: IMAP\n",
278
+ f"Creation date (UTC): {datetime.utcnow()}\n",
279
+ f"Start time: {time_endpoints[0]}\n",
280
+ f"End time: {time_endpoints[1]}\n",
281
+ "Cadence (sec): 60\n\n",
282
+ "Date/Time"
283
+ + "Azimuth".rjust(29)
284
+ + "Elevation".rjust(17)
285
+ + "Doppler".rjust(15)
286
+ + "\n",
287
+ "(UTC)" + "(deg.)".rjust(33) + "(deg.)".rjust(16) + "(km/s)".rjust(16) + "\n",
288
+ ]
289
+
290
+ length = len(output_dict["time"])
291
+ for i in range(length):
292
+ lines.append(
293
+ f"{output_dict['time'][i]}"
294
+ + f"{output_dict['azimuth'][i]}".rjust(16)
295
+ + f"{output_dict['elevation'][i]}".rjust(16)
296
+ + f"{output_dict['doppler'][i]}".rjust(15)
297
+ + "\n"
298
+ )
299
+
300
+ return lines
@@ -144,7 +144,7 @@ def create_xarray_from_records(records: list[dict]) -> xr.Dataset: # noqa: PLR0
144
144
  # Populate the dataset variables
145
145
  for i, record in enumerate(records):
146
146
  for key, val in record.items():
147
- if key in ["apid", "met", "met_in_utc", "ttj2000ns"]:
147
+ if key in ["apid", "met", "met_in_utc", "ttj2000ns", "last_modified"]:
148
148
  continue
149
149
  elif key in ["mag_B_GSE", "mag_B_GSM", "mag_B_RTN"]:
150
150
  dataset[key].data[i, :] = val
@@ -2,7 +2,7 @@
2
2
 
3
3
  import logging
4
4
  from pathlib import Path
5
- from typing import Any, Union
5
+ from typing import Any
6
6
 
7
7
  from xarray import Dataset
8
8
 
@@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
13
13
 
14
14
 
15
15
  def decom_packets(
16
- packet_file: Union[str, Path],
16
+ packet_file: str | Path,
17
17
  ) -> tuple[list[Any], dict[int, Dataset], dict[int, Dataset]]:
18
18
  """
19
19
  Decom IDEX data packets using IDEX packet definition.
@@ -17,7 +17,6 @@ Examples
17
17
  import logging
18
18
  from enum import IntEnum
19
19
  from pathlib import Path
20
- from typing import Union
21
20
 
22
21
  import numpy as np
23
22
  import numpy.typing as npt
@@ -61,7 +60,7 @@ class PacketParser:
61
60
  The path and filename to the L0 file to read.
62
61
  """
63
62
 
64
- def __init__(self, packet_file: Union[str, Path]) -> None:
63
+ def __init__(self, packet_file: str | Path) -> None:
65
64
  """
66
65
  Read a L0 pkts file and perform all of the decom work.
67
66
 
@@ -250,7 +249,7 @@ def _read_waveform_bits(waveform_raw: str, high_sample: bool = True) -> list[int
250
249
 
251
250
 
252
251
  def calculate_idex_epoch_time(
253
- shcoarse_time: Union[float, np.ndarray], shfine_time: Union[float, np.ndarray]
252
+ shcoarse_time: float | np.ndarray, shfine_time: float | np.ndarray
254
253
  ) -> npt.NDArray[np.int64]:
255
254
  """
256
255
  Calculate the epoch time from the FPGA header time variables.
@@ -16,7 +16,6 @@ Examples
16
16
 
17
17
  import logging
18
18
  from enum import Enum
19
- from typing import Union
20
19
 
21
20
  import pandas as pd
22
21
  import xarray as xr
@@ -226,7 +225,7 @@ def convert_waveforms(
226
225
 
227
226
  def get_trigger_mode_and_level(
228
227
  l1a_dataset: xr.Dataset,
229
- ) -> Union[dict[str, xr.DataArray], dict]:
228
+ ) -> dict[str, xr.DataArray] | dict:
230
229
  """
231
230
  Determine the trigger mode and threshold level for each event.
232
231
 
@@ -249,7 +248,7 @@ def get_trigger_mode_and_level(
249
248
 
250
249
  def compute_trigger_values(
251
250
  trigger_mode: int, trigger_controls: int, gain_channel: str
252
- ) -> Union[tuple[str, Union[int, float]], tuple[None, None]]:
251
+ ) -> tuple[str, int | float] | tuple[None, None]:
253
252
  """
254
253
  Compute the trigger mode label and threshold level.
255
254
 
@@ -24,7 +24,7 @@ import pandas as pd
24
24
  import xarray as xr
25
25
  from numpy.typing import NDArray
26
26
  from scipy.integrate import quad
27
- from scipy.optimize import curve_fit
27
+ from scipy.optimize import curve_fit, root_scalar
28
28
  from scipy.signal import butter, detrend, filtfilt, find_peaks
29
29
  from scipy.stats import exponnorm
30
30
 
@@ -52,7 +52,33 @@ class BaselineNoiseTime(IntEnum):
52
52
  STOP = -5
53
53
 
54
54
 
55
- def idex_l2a(l1b_dataset: xr.Dataset) -> xr.Dataset:
55
+ def load_calibration_files(ancillary_files: dict) -> tuple[NDArray, NDArray]:
56
+ """
57
+ Load calibration files for IDEX L2A processing.
58
+
59
+ Parameters
60
+ ----------
61
+ ancillary_files : dict
62
+ Dictionary containing paths to calibration files.
63
+
64
+ Returns
65
+ -------
66
+ numpy.ndarray
67
+ Calibration parameters for the rise time function.
68
+ numpy.ndarray
69
+ Calibration parameters for the charge yield function.
70
+ """
71
+ # Load calibration coefficients from ancillary files
72
+ t_rise_params = pd.read_csv(
73
+ ancillary_files["l2a-calibration-curve-yield-params"], skiprows=1, header=None
74
+ ).values.flatten()[:8]
75
+ yield_params = pd.read_csv(
76
+ ancillary_files["l2a-calibration-curve-t-rise"], skiprows=1, header=None
77
+ ).values.flatten()[:8]
78
+ return t_rise_params, yield_params
79
+
80
+
81
+ def idex_l2a(l1b_dataset: xr.Dataset, ancillary_files: dict) -> xr.Dataset:
56
82
  """
57
83
  Will process IDEX l1b data to create l2a data products.
58
84
 
@@ -68,6 +94,9 @@ def idex_l2a(l1b_dataset: xr.Dataset) -> xr.Dataset:
68
94
  ----------
69
95
  l1b_dataset : xarray.Dataset
70
96
  IDEX L1a dataset to process.
97
+ ancillary_files : dict
98
+ Ancillary files containing calibration coefficients needed to estimate
99
+ velocity and mass of the dust particles.
71
100
 
72
101
  Returns
73
102
  -------
@@ -79,6 +108,7 @@ def idex_l2a(l1b_dataset: xr.Dataset) -> xr.Dataset:
79
108
  logger.info(
80
109
  f"Running IDEX L2A processing on dataset: {l1b_dataset.attrs['Logical_source']}"
81
110
  )
111
+ t_rise_params, yield_params = load_calibration_files(ancillary_files)
82
112
 
83
113
  tof_high = l1b_dataset["TOF_High"]
84
114
  hs_time = l1b_dataset["time_high_sample_rate"]
@@ -176,11 +206,24 @@ def idex_l2a(l1b_dataset: xr.Dataset) -> xr.Dataset:
176
206
  output_dtypes=[np.float64] * 6,
177
207
  keep_attrs=True,
178
208
  )
209
+ # Calculate mass and velocity estimates
210
+ velocity_mass_results = xr.apply_ufunc(
211
+ calculate_velocity_and_mass,
212
+ fit_results[1], # signal amplitude
213
+ fit_results[0].data[:, 3], # fit params
214
+ output_core_dims=[[], []],
215
+ vectorize=True,
216
+ output_dtypes=[np.float64, np.float64],
217
+ keep_attrs=True,
218
+ kwargs={"t_rise_params": t_rise_params, "yield_params": yield_params},
219
+ )
220
+
179
221
  waveform_name = waveform.lower()
180
222
  output_vars = {
181
223
  f"{waveform_name}_fit_parameters": fit_results[0],
182
224
  f"{waveform_name}_impact_charge": fit_results[1],
183
- f"{waveform_name}_dust_mass_estimate": fit_results[1],
225
+ f"{waveform_name}_velocity_estimate": velocity_mass_results[0],
226
+ f"{waveform_name}_dust_mass_estimate": velocity_mass_results[1],
184
227
  # Same as impact_charge for now
185
228
  f"{waveform_name}_chi_squared": fit_results[2],
186
229
  f"{waveform_name}_reduced_chi_squared": fit_results[3],
@@ -261,6 +304,89 @@ def idex_l2a(l1b_dataset: xr.Dataset) -> xr.Dataset:
261
304
  return l2a_dataset
262
305
 
263
306
 
307
+ def calculate_velocity_and_mass(
308
+ sig_amp: float, t_rise: float, t_rise_params: np.ndarray, yield_params: np.ndarray
309
+ ) -> tuple[float, float]:
310
+ """
311
+ Calculate velocity and mass estimates.
312
+
313
+ The fitted target signals are used to generate IDEX’s specific charge yield as a
314
+ function of the impact speed. The calibration curve is fitted with a
315
+ segmented power law distribution. The charge yield curve enables the mass of
316
+ the dust particle to be estimated from the total charge it generates on the target.
317
+
318
+ Parameters
319
+ ----------
320
+ sig_amp : float
321
+ Signal amplitude.
322
+ t_rise : float
323
+ T_rise fit parameter from the target fit.
324
+ t_rise_params : np.ndarray
325
+ Calibration parameters for rise time.
326
+ yield_params : np.ndarray
327
+ Calibration parameters for yield.
328
+
329
+ Returns
330
+ -------
331
+ v_est : float
332
+ Estimated velocity.
333
+ mass_est : float
334
+ Estimated mass.
335
+ """
336
+ log_a_t: float = np.log10(t_rise_params[0])
337
+ try:
338
+ root = root_scalar(
339
+ lambda lv: log_smooth_powerlaw(lv, log_a_t, t_rise_params[1:])
340
+ - np.log10(t_rise),
341
+ bracket=[-1, 2],
342
+ )
343
+ v_est = 10**root.root
344
+ except Exception:
345
+ logger.error(
346
+ "Unable to calculate velocity and mass estimate. "
347
+ "The root finding failed for power law function. "
348
+ "Returning nans for the estimate."
349
+ )
350
+ return np.nan, np.nan
351
+
352
+ log_a_y: float = np.log10(yield_params[0])
353
+ yield_val = 10 ** log_smooth_powerlaw(np.log10(v_est), log_a_y, yield_params[1:])
354
+ mass_est = sig_amp / yield_val
355
+
356
+ return v_est, mass_est
357
+
358
+
359
+ def log_smooth_powerlaw(log_v: float, log_a: float, params: np.ndarray) -> float:
360
+ """
361
+ Define a smoothly transitioning power law to fit the calibration curve to.
362
+
363
+ Parameters
364
+ ----------
365
+ log_v : float
366
+ Velocity.
367
+ log_a : float
368
+ Scale factor.
369
+ params : np.ndarray
370
+ Calibration parameters for the power law.
371
+
372
+ Returns
373
+ -------
374
+ float
375
+ The value of the power law at the given velocity.
376
+ """
377
+ # Unpack the rest of the calibration parameters
378
+ # a1, a2, and a3 are the power law exponents for the low, medium, and high-velocity
379
+ # segments.
380
+ # vb and vc are the characteristic speeds where the slope transition happens, and k
381
+ # setting the sharpness of the transitions.
382
+ a1, a2, a3, vb, vc, k, m = params
383
+ v = 10**log_v
384
+ base = log_a + a1 * log_v
385
+ transition1 = (1 + (v / vb) ** m) ** ((a2 - a1) / m)
386
+ transition2 = (1 + (v / vc) ** m) ** ((a3 - a2) / m)
387
+ return base + np.log10(transition1 * transition2)
388
+
389
+
264
390
  def time_to_mass(
265
391
  tof_high: np.ndarray, high_sampling_time: np.ndarray, masses: np.ndarray
266
392
  ) -> tuple[NDArray, NDArray, NDArray]:
@@ -399,7 +525,7 @@ def calculate_kappa(mass_scales: np.ndarray, peaks_2d: list) -> NDArray:
399
525
  kappas = np.asarray(
400
526
  [
401
527
  np.mean(mass_scale[peaks] - np.round(mass_scale[peaks]))
402
- for mass_scale, peaks in zip(mass_scales, peaks_2d)
528
+ for mass_scale, peaks in zip(mass_scales, peaks_2d, strict=False)
403
529
  ]
404
530
  )
405
531
  return kappas