imap-processing 0.16.2__py3-none-any.whl → 0.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (110) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/ccsds/excel_to_xtce.py +12 -0
  3. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +6 -6
  4. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +35 -0
  5. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +35 -0
  6. imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +24 -0
  7. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +8 -8
  8. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +1 -1
  9. imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +163 -100
  10. imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +398 -415
  11. imap_processing/cdf/config/imap_ialirt_l1_variable_attrs.yaml +97 -54
  12. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +9 -9
  13. imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +233 -57
  14. imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +16 -90
  15. imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +30 -0
  16. imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +15 -1
  17. imap_processing/cdf/config/imap_swapi_variable_attrs.yaml +19 -0
  18. imap_processing/cdf/config/imap_swe_l1b_variable_attrs.yaml +20 -0
  19. imap_processing/cdf/config/imap_swe_l2_variable_attrs.yaml +39 -0
  20. imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +168 -0
  21. imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +103 -2
  22. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +91 -11
  23. imap_processing/cdf/utils.py +7 -1
  24. imap_processing/cli.py +42 -13
  25. imap_processing/codice/codice_l1a.py +125 -78
  26. imap_processing/codice/codice_l1b.py +1 -1
  27. imap_processing/codice/codice_l2.py +0 -9
  28. imap_processing/codice/constants.py +481 -498
  29. imap_processing/hi/hi_l1a.py +4 -4
  30. imap_processing/hi/hi_l1b.py +2 -2
  31. imap_processing/hi/packet_definitions/TLM_HI_COMBINED_SCI.xml +218 -38
  32. imap_processing/hit/hit_utils.py +2 -2
  33. imap_processing/hit/l0/decom_hit.py +4 -3
  34. imap_processing/hit/l1a/hit_l1a.py +64 -24
  35. imap_processing/hit/l1b/constants.py +5 -0
  36. imap_processing/hit/l1b/hit_l1b.py +18 -16
  37. imap_processing/hit/l2/constants.py +1 -1
  38. imap_processing/hit/l2/hit_l2.py +4 -4
  39. imap_processing/ialirt/constants.py +21 -0
  40. imap_processing/ialirt/generate_coverage.py +188 -0
  41. imap_processing/ialirt/l0/parse_mag.py +62 -5
  42. imap_processing/ialirt/l0/process_swapi.py +1 -1
  43. imap_processing/ialirt/l0/process_swe.py +23 -7
  44. imap_processing/ialirt/utils/constants.py +22 -16
  45. imap_processing/ialirt/utils/create_xarray.py +42 -19
  46. imap_processing/idex/idex_constants.py +8 -5
  47. imap_processing/idex/idex_l2b.py +554 -58
  48. imap_processing/idex/idex_l2c.py +30 -196
  49. imap_processing/lo/l0/lo_apid.py +1 -0
  50. imap_processing/lo/l0/lo_star_sensor.py +48 -0
  51. imap_processing/lo/l1a/lo_l1a.py +74 -30
  52. imap_processing/lo/packet_definitions/lo_xtce.xml +5359 -106
  53. imap_processing/mag/constants.py +1 -0
  54. imap_processing/mag/l0/decom_mag.py +9 -6
  55. imap_processing/mag/l0/mag_l0_data.py +46 -0
  56. imap_processing/mag/l1d/__init__.py +0 -0
  57. imap_processing/mag/l1d/mag_l1d.py +133 -0
  58. imap_processing/mag/l1d/mag_l1d_data.py +588 -0
  59. imap_processing/mag/l2/__init__.py +0 -0
  60. imap_processing/mag/l2/mag_l2.py +25 -20
  61. imap_processing/mag/l2/mag_l2_data.py +191 -130
  62. imap_processing/quality_flags.py +20 -2
  63. imap_processing/spice/geometry.py +25 -3
  64. imap_processing/spice/pointing_frame.py +1 -1
  65. imap_processing/spice/spin.py +4 -0
  66. imap_processing/spice/time.py +51 -0
  67. imap_processing/swapi/l1/swapi_l1.py +12 -2
  68. imap_processing/swapi/l2/swapi_l2.py +59 -14
  69. imap_processing/swapi/swapi_utils.py +1 -1
  70. imap_processing/swe/l1b/swe_l1b.py +11 -4
  71. imap_processing/swe/l2/swe_l2.py +111 -17
  72. imap_processing/ultra/constants.py +49 -1
  73. imap_processing/ultra/l0/decom_tools.py +28 -14
  74. imap_processing/ultra/l0/decom_ultra.py +225 -15
  75. imap_processing/ultra/l0/ultra_utils.py +281 -8
  76. imap_processing/ultra/l1a/ultra_l1a.py +77 -8
  77. imap_processing/ultra/l1b/cullingmask.py +3 -3
  78. imap_processing/ultra/l1b/de.py +53 -15
  79. imap_processing/ultra/l1b/extendedspin.py +26 -2
  80. imap_processing/ultra/l1b/lookup_utils.py +171 -50
  81. imap_processing/ultra/l1b/quality_flag_filters.py +14 -0
  82. imap_processing/ultra/l1b/ultra_l1b_culling.py +198 -5
  83. imap_processing/ultra/l1b/ultra_l1b_extended.py +304 -66
  84. imap_processing/ultra/l1c/helio_pset.py +54 -7
  85. imap_processing/ultra/l1c/spacecraft_pset.py +9 -1
  86. imap_processing/ultra/l1c/ultra_l1c.py +2 -0
  87. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +106 -109
  88. imap_processing/ultra/packet_definitions/ULTRA_SCI_COMBINED.xml +3 -3
  89. imap_processing/ultra/utils/ultra_l1_utils.py +13 -1
  90. imap_processing/utils.py +20 -42
  91. {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/METADATA +2 -2
  92. {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/RECORD +95 -103
  93. imap_processing/lo/l0/data_classes/star_sensor.py +0 -98
  94. imap_processing/lo/l0/utils/lo_base.py +0 -57
  95. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_LeftSlit.csv +0 -526
  96. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_RightSlit.csv +0 -526
  97. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_LeftSlit.csv +0 -526
  98. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +0 -524
  99. imap_processing/ultra/lookup_tables/EgyNorm.mem.csv +0 -32769
  100. imap_processing/ultra/lookup_tables/FM45_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
  101. imap_processing/ultra/lookup_tables/FM90_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
  102. imap_processing/ultra/lookup_tables/dps_grid45_compressed.cdf +0 -0
  103. imap_processing/ultra/lookup_tables/ultra45_back-pos-luts.csv +0 -4097
  104. imap_processing/ultra/lookup_tables/ultra45_tdc_norm.csv +0 -2050
  105. imap_processing/ultra/lookup_tables/ultra90_back-pos-luts.csv +0 -4097
  106. imap_processing/ultra/lookup_tables/ultra90_tdc_norm.csv +0 -2050
  107. imap_processing/ultra/lookup_tables/yadjust.csv +0 -257
  108. {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/LICENSE +0 -0
  109. {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/WHEEL +0 -0
  110. {imap_processing-0.16.2.dist-info → imap_processing-0.18.0.dist-info}/entry_points.txt +0 -0
@@ -169,7 +169,7 @@ STANDARD_PARTICLE_ENERGY_RANGE_MAPPING = {
169
169
  {"energy_min": 5.0, "energy_max": 6.0, "R2": [59], "R3": [], "R4": []},
170
170
  {"energy_min": 6.0, "energy_max": 8.0, "R2": [60], "R3": [63], "R4": []},
171
171
  {"energy_min": 8.0, "energy_max": 10.0, "R2": [61], "R3": [64], "R4": []},
172
- {"energy_min": 10.0, "energy_max": 12.0, "R2": [], "R3": [65], "R4": []},
172
+ {"energy_min": 10.0, "energy_max": 12.0, "R2": [62], "R3": [65], "R4": []},
173
173
  {"energy_min": 12.0, "energy_max": 15.0, "R2": [], "R3": [66], "R4": []},
174
174
  {"energy_min": 15.0, "energy_max": 21.0, "R2": [], "R3": [67], "R4": []},
175
175
  {"energy_min": 21.0, "energy_max": 27.0, "R2": [], "R3": [68], "R4": []},
@@ -95,11 +95,11 @@ def add_cdf_attributes(
95
95
 
96
96
  Parameters
97
97
  ----------
98
- dataset : xr.Dataset
98
+ dataset : xarray.Dataset
99
99
  The dataset to update.
100
100
  logical_source : str
101
101
  The logical source of the dataset.
102
- attr_mgr : AttributeManager
102
+ attr_mgr : ImapCdfAttributes
103
103
  The attribute manager to retrieve attributes.
104
104
 
105
105
  Returns
@@ -132,7 +132,6 @@ def add_cdf_attributes(
132
132
  # check_schema=False to avoid attr_mgr adding stuff dimensions don't need
133
133
  for dim in dataset.dims:
134
134
  dataset[dim].attrs = attr_mgr.get_variable_attributes(dim, check_schema=False)
135
- # TODO: should labels be added as coordinates? Check with SPDF
136
135
  if dim != "epoch":
137
136
  label_array = xr.DataArray(
138
137
  dataset[dim].values.astype(str),
@@ -312,6 +311,7 @@ def calculate_intensities_for_a_species(
312
311
  The updated dataset with intensities calculated for the given species.
313
312
  """
314
313
  updated_ds = l2_dataset.copy()
314
+ # Get the dynamic threshold state for the species variable
315
315
  dynamic_threshold_states = updated_ds["dynamic_threshold_state"].values
316
316
  unique_states = np.unique(dynamic_threshold_states)
317
317
  species_name = (
@@ -632,7 +632,7 @@ def process_summed_intensity(
632
632
  summed_intensity_dataset = add_total_uncertainties(
633
633
  summed_intensity_dataset, var
634
634
  )
635
- # Expand the variable name to include standard intensity
635
+ # Expand the variable name to include summed intensity
636
636
  summed_intensity_dataset = summed_intensity_dataset.rename(
637
637
  {var: f"{var}_summed_intensity"}
638
638
  )
@@ -1,6 +1,7 @@
1
1
  """Module for constants and useful shared classes used in I-ALiRT processing."""
2
2
 
3
3
  from dataclasses import dataclass
4
+ from typing import NamedTuple
4
5
 
5
6
  import numpy as np
6
7
 
@@ -36,3 +37,23 @@ class IalirtSwapiConstants:
36
37
  az_fov = np.deg2rad(30) # azimuthal width of the field of view, radians
37
38
  fwhm_width = 0.085 # FWHM of energy width
38
39
  speed_ew = 0.5 * fwhm_width # speed width of energy passband
40
+
41
+
42
+ class StationProperties(NamedTuple):
43
+ """Class that represents properties of ground stations."""
44
+
45
+ longitude: float # longitude in degrees
46
+ latitude: float # latitude in degrees
47
+ altitude: float # altitude in kilometers
48
+ min_elevation_deg: float # minimum elevation angle in degrees
49
+
50
+
51
+ # Verified by Kiel Observatory staff.
52
+ STATIONS = {
53
+ "Kiel": StationProperties(
54
+ longitude=10.1808, # degrees East
55
+ latitude=54.2632, # degrees North
56
+ altitude=0.1, # approx 100 meters
57
+ min_elevation_deg=5, # 5 degrees is the requirement
58
+ )
59
+ }
@@ -0,0 +1,188 @@
1
+ """Coverage time for each station."""
2
+
3
+ import logging
4
+
5
+ import numpy as np
6
+
7
+ from imap_processing.ialirt.constants import STATIONS
8
+ from imap_processing.ialirt.process_ephemeris import calculate_azimuth_and_elevation
9
+ from imap_processing.spice.time import et_to_utc, str_to_et
10
+
11
+ # Logger setup
12
+ logger = logging.getLogger(__name__)
13
+
14
+ # TODO: get a list of all potential DSN stations.
15
+ ALL_STATIONS = [*STATIONS.keys(), "DSS-55", "DSS-56", "DSS-74", "DSS-75"]
16
+
17
+
18
+ def generate_coverage(
19
+ start_time: str,
20
+ outages: dict | None = None,
21
+ dsn: dict | None = None,
22
+ ) -> tuple[dict, dict]:
23
+ """
24
+ Build the output dictionary containing coverage and outage time for each station.
25
+
26
+ Parameters
27
+ ----------
28
+ start_time : str
29
+ Start time in UTC.
30
+ outages : dict, optional
31
+ Dictionary of outages for each station.
32
+ dsn : dict, optional
33
+ Dictionary of Deep Space Network (DSN) stations.
34
+
35
+ Returns
36
+ -------
37
+ coverage_dict : dict
38
+ Visibility times per station.
39
+ outage_dict : dict
40
+ Outage times per station.
41
+ """
42
+ duration_seconds = 24 * 60 * 60 # 86400 seconds in 24 hours
43
+ time_step = 3600 # 1 hr in seconds
44
+
45
+ stations = {
46
+ "Kiel": STATIONS["Kiel"],
47
+ }
48
+ coverage_dict = {}
49
+ outage_dict = {}
50
+
51
+ start_et_input = str_to_et(start_time)
52
+ stop_et_input = start_et_input + duration_seconds
53
+
54
+ time_range = np.arange(start_et_input, stop_et_input, time_step)
55
+ total_visible_mask = np.zeros(time_range.shape, dtype=bool)
56
+
57
+ # Precompute DSN outage mask for non-DSN stations
58
+ dsn_outage_mask = np.zeros(time_range.shape, dtype=bool)
59
+ if dsn:
60
+ for dsn_contacts in dsn.values():
61
+ for start, end in dsn_contacts:
62
+ start_et = str_to_et(start)
63
+ end_et = str_to_et(end)
64
+ dsn_outage_mask |= (time_range >= start_et) & (time_range <= end_et)
65
+
66
+ for station_name, (lon, lat, alt, min_elevation) in stations.items():
67
+ azimuth, elevation = calculate_azimuth_and_elevation(lon, lat, alt, time_range)
68
+ visible = elevation > min_elevation
69
+
70
+ outage_mask = np.zeros(time_range.shape, dtype=bool)
71
+ if outages and station_name in outages:
72
+ for start, end in outages[station_name]:
73
+ start_et = str_to_et(start)
74
+ end_et = str_to_et(end)
75
+ outage_mask |= (time_range >= start_et) & (time_range <= end_et)
76
+
77
+ visible[outage_mask] = False
78
+ # DSN contacts block other stations
79
+ visible[dsn_outage_mask] = False
80
+ total_visible_mask |= visible
81
+
82
+ coverage_dict[station_name] = et_to_utc(time_range[visible], format_str="ISOC")
83
+ outage_dict[station_name] = et_to_utc(
84
+ time_range[outage_mask], format_str="ISOC"
85
+ )
86
+
87
+ # --- DSN Stations ---
88
+ if dsn:
89
+ for dsn_station, contacts in dsn.items():
90
+ dsn_visible_mask = np.zeros(time_range.shape, dtype=bool)
91
+ for start, end in contacts:
92
+ start_et = str_to_et(start)
93
+ end_et = str_to_et(end)
94
+ dsn_visible_mask |= (time_range >= start_et) & (time_range <= end_et)
95
+
96
+ # Apply DSN outages if present
97
+ outage_mask = np.zeros(time_range.shape, dtype=bool)
98
+ if outages and dsn_station in outages:
99
+ for start, end in outages[dsn_station]:
100
+ start_et = str_to_et(start)
101
+ end_et = str_to_et(end)
102
+ outage_mask |= (time_range >= start_et) & (time_range <= end_et)
103
+
104
+ dsn_visible_mask[outage_mask] = False
105
+ total_visible_mask |= dsn_visible_mask
106
+
107
+ coverage_dict[f"{dsn_station}"] = et_to_utc(
108
+ time_range[dsn_visible_mask], format_str="ISOC"
109
+ )
110
+ outage_dict[f"{dsn_station}"] = et_to_utc(
111
+ time_range[outage_mask], format_str="ISOC"
112
+ )
113
+
114
+ # Total coverage percentage
115
+ total_coverage_percent = (
116
+ np.count_nonzero(total_visible_mask) / time_range.size
117
+ ) * 100
118
+ coverage_dict["total_coverage_percent"] = total_coverage_percent
119
+
120
+ # Ensure all stations are present in both dicts
121
+ for station in ALL_STATIONS:
122
+ coverage_dict.setdefault(station, np.array([], dtype="<U23"))
123
+ outage_dict.setdefault(station, np.array([], dtype="<U23"))
124
+
125
+ return coverage_dict, outage_dict
126
+
127
+
128
+ def format_coverage_summary(
129
+ coverage_dict: dict, outage_dict: dict, start_time: str
130
+ ) -> dict:
131
+ """
132
+ Build the output dictionary containing coverage time for each station.
133
+
134
+ Parameters
135
+ ----------
136
+ coverage_dict : dict
137
+ Coverage for each station, keyed by station name with arrays of UTC times.
138
+ outage_dict : dict
139
+ Outage times for each station, keyed by station name with arrays of UTC times.
140
+ start_time : str
141
+ Start time in UTC.
142
+
143
+ Returns
144
+ -------
145
+ output_dict : dict
146
+ Formatted coverage summary.
147
+ """
148
+ # Include all known stations,
149
+ # plus any new ones that appear in coverage_dict.
150
+ all_stations = ALL_STATIONS + [
151
+ station
152
+ for station in coverage_dict.keys()
153
+ if station not in ALL_STATIONS and station != "total_coverage_percent"
154
+ ]
155
+
156
+ duration_seconds = 24 * 60 * 60 # 86400 seconds in 24 hours
157
+ time_step = 3600 # 1 hr in seconds
158
+
159
+ start_et_input = str_to_et(start_time)
160
+ stop_et_input = start_et_input + duration_seconds
161
+
162
+ time_range = np.arange(start_et_input, stop_et_input, time_step)
163
+ all_times = et_to_utc(time_range, format_str="ISOC")
164
+
165
+ data_rows = []
166
+ for time in all_times:
167
+ row = {"time": time}
168
+ for station in all_stations:
169
+ visible_times = coverage_dict.get(station, [])
170
+ outage_times = outage_dict.get(station, [])
171
+ if time in outage_times:
172
+ row[station] = "X"
173
+ elif time in visible_times:
174
+ row[station] = "1"
175
+ else:
176
+ row[station] = "0"
177
+ data_rows.append(row)
178
+
179
+ output_dict = {
180
+ "summary": "I-ALiRT Coverage Summary",
181
+ "generated": start_time,
182
+ "time_format": "UTC (ISOC)",
183
+ "stations": all_stations,
184
+ "total_coverage_percent": round(coverage_dict["total_coverage_percent"], 1),
185
+ "data": data_rows,
186
+ }
187
+
188
+ return output_dict
@@ -20,6 +20,8 @@ from imap_processing.mag.l1b.mag_l1b import (
20
20
  calibrate_vector,
21
21
  shift_time,
22
22
  )
23
+ from imap_processing.mag.l1d.mag_l1d_data import MagL1d
24
+ from imap_processing.mag.l2.mag_l2_data import MagL2L1dBase
23
25
  from imap_processing.spice.time import met_to_ttj2000ns, met_to_utc
24
26
 
25
27
  logger = logging.getLogger(__name__)
@@ -286,6 +288,56 @@ def calculate_l1b(
286
288
  return updated_vector_mago, updated_vector_magi, time_data
287
289
 
288
290
 
291
+ def calibrate_and_offset_vectors(
292
+ vectors: np.ndarray,
293
+ range_vals: np.ndarray,
294
+ calibration: np.ndarray,
295
+ offsets: np.ndarray,
296
+ is_magi: bool = False,
297
+ ) -> np.ndarray:
298
+ """
299
+ Apply calibration and offsets to magnetic vectors.
300
+
301
+ Parameters
302
+ ----------
303
+ vectors : np.ndarray
304
+ Raw magnetic vectors, shape (n, 3).
305
+ range_vals : np.ndarray
306
+ Range indices for each vector, shape (n). Values 0–3.
307
+ calibration : np.ndarray
308
+ Calibration matrix, shape (3, 3, 4).
309
+ offsets : np.ndarray
310
+ Offsets array, shape (2, 4, 3) where:
311
+ - index 0 = MAGo, 1 = MAGi
312
+ - second index = range (0–3)
313
+ - third index = axis (x, y, z)
314
+ is_magi : bool, optional
315
+ True if applying to MAGi data, False for MAGo.
316
+
317
+ Returns
318
+ -------
319
+ calibrated_and_offset_vectors : np.ndarray
320
+ Calibrated and offset vectors, shape (n, 3).
321
+ """
322
+ # Append range as 4th column
323
+ vec_plus_range = np.concatenate((vectors, range_vals[:, np.newaxis]), axis=1)
324
+
325
+ # Apply calibration matrix -> (n,4)
326
+ calibrated = MagL2L1dBase.apply_calibration(vec_plus_range, calibration)
327
+
328
+ # Apply offsets per vector
329
+ # vec shape (4)
330
+ # offsets shape (2, 4, 3) where first index is 0 for MAGo and 1 for MAGi
331
+ calibrated = np.array(
332
+ [
333
+ MagL1d.apply_calibration_offset_single_vector(vec, offsets, is_magi=is_magi)
334
+ for vec in calibrated
335
+ ]
336
+ )
337
+
338
+ return calibrated[:, :3]
339
+
340
+
289
341
  def process_packet(
290
342
  accumulated_data: xr.Dataset, calibration_dataset: xr.Dataset
291
343
  ) -> tuple[list[dict], list[dict]]:
@@ -392,11 +444,16 @@ def process_packet(
392
444
  "met": int(met.values.min()),
393
445
  "met_in_utc": met_to_utc(met.values.min()).split(".")[0],
394
446
  "ttj2000ns": int(met_to_ttj2000ns(met.values.min())),
395
- "mag_4s_b_gse": [Decimal("0.0") for _ in range(3)],
396
- "mag_4s_b_gsm": [Decimal("0.0") for _ in range(3)],
397
- "mag_4s_b_rtn": [Decimal("0.0") for _ in range(3)],
398
- "mag_phi_4s_b_gsm": Decimal("0.0"),
399
- "mag_theta_4s_b_gsm": Decimal("0.0"),
447
+ # TODO: Placeholder for mag_epoch
448
+ "mag_epoch": int(met.values.min()),
449
+ "mag_B_GSE": [Decimal("0.0") for _ in range(3)],
450
+ "mag_B_GSM": [Decimal("0.0") for _ in range(3)],
451
+ "mag_B_RTN": [Decimal("0.0") for _ in range(3)],
452
+ "mag_B_magnitude": Decimal("0.0"),
453
+ "mag_phi_B_GSM": Decimal("0.0"),
454
+ "mag_theta_B_GSM": Decimal("0.0"),
455
+ "mag_phi_B_GSE": Decimal("0.0"),
456
+ "mag_theta_B_GSE": Decimal("0.0"),
400
457
  }
401
458
  )
402
459
 
@@ -96,7 +96,7 @@ def optimize_pseudo_parameters(
96
96
  # Read in energy passbands
97
97
  energy_data = pd.read_csv(
98
98
  f"{imap_module_directory}/tests/swapi/lut/imap_swapi_esa-unit"
99
- f"-conversion_20250211_v000.csv"
99
+ f"-conversion_20250626_v001.csv"
100
100
  )
101
101
  energy_passbands = (
102
102
  energy_data["Energy"][0:63]
@@ -542,16 +542,32 @@ def process_swe(accumulated_data: xr.Dataset, in_flight_cal_files: list) -> list
542
542
  summed_first = normalized_first_half.sum(axis=(1, 2))
543
543
  summed_second = normalized_second_half.sum(axis=(1, 2))
544
544
 
545
+ met_first_half = int(
546
+ grouped["met"].where(grouped["swe_seq"] == 0, drop=True).values[0]
547
+ )
548
+ met_second_half = int(
549
+ grouped["met"].where(grouped["swe_seq"] == 30, drop=True).values[0]
550
+ )
551
+
552
+ swe_data.append(
553
+ {
554
+ "apid": 478,
555
+ "met": met_first_half,
556
+ "met_in_utc": met_to_utc(met_first_half).split(".")[0],
557
+ "ttj2000ns": int(met_to_ttj2000ns(met_first_half)),
558
+ "swe_normalized_counts": [int(val) for val in summed_first],
559
+ "swe_counterstreaming_electrons": bde_first_half,
560
+ },
561
+ )
545
562
  swe_data.append(
546
563
  {
547
564
  "apid": 478,
548
- "met": int(grouped["met"].min()),
549
- "met_in_utc": met_to_utc(grouped["met"].min()).split(".")[0],
550
- "ttj2000ns": int(met_to_ttj2000ns(grouped["met"].min())),
551
- "swe_normalized_counts_half_1_esa": [int(val) for val in summed_first],
552
- "swe_normalized_counts_half_2_esa": [int(val) for val in summed_second],
553
- "swe_counterstreaming_electrons": max(bde_first_half, bde_second_half),
554
- }
565
+ "met": met_second_half,
566
+ "met_in_utc": met_to_utc(met_second_half).split(".")[0],
567
+ "ttj2000ns": int(met_to_ttj2000ns(met_second_half)),
568
+ "swe_normalized_counts": [int(val) for val in summed_second],
569
+ "swe_counterstreaming_electrons": bde_second_half,
570
+ },
555
571
  )
556
572
 
557
573
  return swe_data
@@ -2,19 +2,19 @@
2
2
 
3
3
  IALIRT_KEYS = [
4
4
  # H intensities in 15 energy ranges and binned into 4 azimuths and 4 spin angle bins
5
- "codicehi_h",
5
+ "codice_hi_h",
6
6
  # C/O abundance ratio
7
- "codicelo_c_over_o_abundance",
7
+ "codice_lo_c_over_o_abundance",
8
8
  # Mg/O abundance ratio
9
- "codicelo_mg_over_o_abundance",
9
+ "codice_lo_mg_over_o_abundance",
10
10
  # Fe/O abundance ratio
11
- "codicelo_fe_over_o_abundance",
11
+ "codice_lo_fe_over_o_abundance",
12
12
  # C+6/C+5 charge state ratio
13
- "codicelo_c_plus_6_over_c_plus_5_ratio",
13
+ "codice_lo_c_plus_6_over_c_plus_5_ratio",
14
14
  # O+7/O+6 charge state ratio
15
- "codicelo_o_plus_7_over_o_plus_6_ratio",
15
+ "codice_lo_o_plus_7_over_o_plus_6_ratio",
16
16
  # Fe low/Fe high charge state ratio
17
- "codicelo_fe_low_over_fe_high_ratio",
17
+ "codice_lo_fe_low_over_fe_high_ratio",
18
18
  # Low energy (~300 keV) electrons (A-side)
19
19
  "hit_e_a_side_low_en",
20
20
  # Medium energy (~3 MeV) electrons (A-side)
@@ -37,26 +37,32 @@ IALIRT_KEYS = [
37
37
  "hit_he_omni_low_en",
38
38
  # High energy (15 to 70 MeV/nuc) He (Omnidirectional)
39
39
  "hit_he_omni_high_en",
40
+ # MAG instrument epoch
41
+ "mag_epoch",
40
42
  # Magnetic field vector in GSE coordinates
41
- "mag_4s_b_gse",
43
+ "mag_B_GSE",
42
44
  # Magnetic field vector in GSM coordinates
43
- "mag_4s_b_gsm",
45
+ "mag_B_GSM",
44
46
  # Magnetic field vector in RTN coordinates
45
- "mag_4s_b_rtn",
47
+ "mag_B_RTN",
48
+ # Magnitude of the magnetic field vector
49
+ "mag_B_magnitude",
46
50
  # Azimuth angle (φ) of the magnetic field in GSM coordinates
47
- "mag_phi_4s_b_gsm",
51
+ "mag_phi_B_GSM",
48
52
  # Elevation angle (θ) of the magnetic field in GSM coordinates
49
- "mag_theta_4s_b_gsm",
53
+ "mag_theta_B_GSM",
54
+ # Azimuth angle (φ) of the magnetic field in GSE coordinates
55
+ "mag_phi_B_GSE",
56
+ # Elevation angle (θ) of the magnetic field in GSE coordinates
57
+ "mag_theta_B_GSE",
50
58
  # Pseudo density of solar wind protons
51
59
  "swapi_pseudo_proton_density",
52
60
  # Pseudo speed of solar wind protons in solar inertial frame
53
61
  "swapi_pseudo_proton_speed",
54
62
  # Pseudo temperature of solar wind protons in plasma frame
55
63
  "swapi_pseudo_proton_temperature",
56
- # SWE Normalized Counts - Half Cycle 1
57
- "swe_normalized_counts_half_1",
58
- # SWE Normalized Counts - Half Cycle 2
59
- "swe_normalized_counts_half_2",
64
+ # SWE Normalized Counts
65
+ "swe_normalized_counts",
60
66
  # SWE Counterstreaming flag
61
67
  "swe_counterstreaming_electrons",
62
68
  ]
@@ -48,6 +48,13 @@ def create_xarray_from_records(records: list[dict]) -> xr.Dataset: # noqa: PLR0
48
48
  attrs=cdf_manager.get_variable_attributes("component", check_schema=False),
49
49
  )
50
50
 
51
+ rtn_component = xr.DataArray(
52
+ ["radial", "tangential", "normal"],
53
+ name="RTN_component",
54
+ dims=["RTN_component"],
55
+ attrs=cdf_manager.get_variable_attributes("RTN_componentt", check_schema=False),
56
+ )
57
+
51
58
  esa_step = xr.DataArray(
52
59
  data=np.arange(8, dtype=np.uint8),
53
60
  name="esa_step",
@@ -57,32 +64,39 @@ def create_xarray_from_records(records: list[dict]) -> xr.Dataset: # noqa: PLR0
57
64
 
58
65
  energy_ranges = xr.DataArray(
59
66
  data=np.arange(15, dtype=np.uint8),
60
- name="energy_ranges",
61
- dims=["energy_ranges"],
62
- attrs=cdf_manager.get_variable_attributes("energy_ranges", check_schema=False),
67
+ name="codice_hi_h_energy_ranges",
68
+ dims=["codice_hi_h_energy_ranges"],
69
+ attrs=cdf_manager.get_variable_attributes(
70
+ "codice_hi_h_energy_ranges", check_schema=False
71
+ ),
63
72
  )
64
73
 
65
- azimuth = xr.DataArray(
74
+ elevation = xr.DataArray(
66
75
  data=np.arange(4, dtype=np.uint8),
67
- name="azimuth",
68
- dims=["azimuth"],
69
- attrs=cdf_manager.get_variable_attributes("azimuth", check_schema=False),
76
+ name="codice_hi_h_elevation",
77
+ dims=["codice_hi_h_elevation"],
78
+ attrs=cdf_manager.get_variable_attributes(
79
+ "codice_hi_h_elevation", check_schema=False
80
+ ),
70
81
  )
71
82
 
72
- spin_angle_bin = xr.DataArray(
83
+ spin_angle = xr.DataArray(
73
84
  data=np.arange(4, dtype=np.uint8),
74
- name="spin_angle_bin",
75
- dims=["spin_angle_bin"],
76
- attrs=cdf_manager.get_variable_attributes("spin_angle_bin", check_schema=False),
85
+ name="codice_hi_h_spin_angle",
86
+ dims=["codice_hi_h_spin_angle"],
87
+ attrs=cdf_manager.get_variable_attributes(
88
+ "codice_hi_h_spin_anglen", check_schema=False
89
+ ),
77
90
  )
78
91
 
79
92
  coords = {
80
93
  "epoch": epoch,
81
94
  "component": component,
95
+ "RTN_component": rtn_component,
82
96
  "esa_step": esa_step,
83
- "energy_ranges": energy_ranges,
84
- "azimuth": azimuth,
85
- "spin_angle_bin": spin_angle_bin,
97
+ "codice_hi_h_energy_ranges": energy_ranges,
98
+ "codice_hi_h_elevation": elevation,
99
+ "codice_hi_h_spin_angle": spin_angle,
86
100
  }
87
101
  dataset = xr.Dataset(
88
102
  coords=coords,
@@ -93,13 +107,22 @@ def create_xarray_from_records(records: list[dict]) -> xr.Dataset: # noqa: PLR0
93
107
  for key in instrument_keys:
94
108
  attrs = cdf_manager.get_variable_attributes(key, check_schema=False)
95
109
  fillval = attrs.get("FILLVAL")
96
- if key.startswith("mag"):
110
+ if key in ["mag_B_GSE", "mag_B_GSM"]:
97
111
  data = np.full((n, 3), fillval, dtype=np.float32)
98
112
  dims = ["epoch", "component"]
99
113
  dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
100
- elif key.startswith("codicehi"):
114
+ elif key == "mag_B_RTN":
115
+ data = np.full((n, 3), fillval, dtype=np.float32)
116
+ dims = ["epoch", "RTN_component"]
117
+ dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
118
+ elif key.startswith("codice_hi"):
101
119
  data = np.full((n, 15, 4, 4), fillval, dtype=np.float32)
102
- dims = ["epoch", "energy", "azimuth", "spin_angle_bin"]
120
+ dims = [
121
+ "epoch",
122
+ "codice_hi_h_energy_ranges",
123
+ "codice_hi_h_elevation",
124
+ "codice_hi_h_spin_angle",
125
+ ]
103
126
  dataset[key] = xr.DataArray(data, dims=dims, attrs=attrs)
104
127
  elif key == "swe_counterstreaming_electrons":
105
128
  data = np.full(n, fillval, dtype=np.uint8)
@@ -123,11 +146,11 @@ def create_xarray_from_records(records: list[dict]) -> xr.Dataset: # noqa: PLR0
123
146
  for key, val in record.items():
124
147
  if key in ["apid", "met", "met_in_utc", "ttj2000ns"]:
125
148
  continue
126
- elif key.startswith("mag"):
149
+ elif key in ["mag_B_GSE", "mag_B_GSM", "mag_B_RTN"]:
127
150
  dataset[key].data[i, :] = val
128
151
  elif key.startswith("swe_normalized_counts"):
129
152
  dataset[key].data[i, :] = val
130
- elif key.startswith("codicehi"):
153
+ elif key.startswith("codice_hi"):
131
154
  dataset[key].data[i, :, :, :] = val
132
155
  else:
133
156
  dataset[key].data[i] = val
@@ -46,6 +46,13 @@ NS_TO_S = 1e-9
46
46
  # Microseconds to seconds conversion
47
47
  US_TO_S = 1e-6
48
48
 
49
+ # Seconds in a day
50
+ SECONDS_IN_DAY = 86400
51
+ # Nanoseconds in day
52
+ NANOSECONDS_IN_DAY = SECONDS_IN_DAY * int(1e9)
53
+ # fg to kg conversion factor
54
+ FG_TO_KG = 1e-15
55
+
49
56
  TARGET_HIGH_FREQUENCY_CUTOFF = 100
50
57
 
51
58
  TARGET_NOISE_FREQUENCY = 7000
@@ -75,13 +82,9 @@ SPICE_ARRAYS = [
75
82
  "spin_phase",
76
83
  ]
77
84
 
78
- # Default IDEX Healpix parameters
79
- # Used in IDEX l2c processing
80
- IDEX_HEALPIX_NSIDE = 8
81
- IDEX_HEALPIX_NESTED = False
82
85
  # Default IDEX Rectangular parameters
83
86
  # Used in IDEX l2c processing
84
- IDEX_SPACING_DEG = 4 # TODO
87
+ IDEX_SPACING_DEG = 6
85
88
 
86
89
  # Define the pointing reference frame for IDEX
87
90
  IDEX_EVENT_REFERENCE_FRAME = SpiceFrame.ECLIPJ2000