imap-processing 0.17.0__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (141) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/ancillary/ancillary_dataset_combiner.py +161 -1
  3. imap_processing/ccsds/excel_to_xtce.py +12 -0
  4. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +6 -6
  5. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +312 -274
  6. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +39 -28
  7. imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +1048 -183
  8. imap_processing/cdf/config/imap_constant_attrs.yaml +4 -2
  9. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +12 -0
  10. imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +5 -0
  11. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +10 -4
  12. imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +163 -100
  13. imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +4 -4
  14. imap_processing/cdf/config/imap_ialirt_l1_variable_attrs.yaml +97 -54
  15. imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +33 -4
  16. imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +44 -44
  17. imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +77 -61
  18. imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +30 -0
  19. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +4 -15
  20. imap_processing/cdf/config/imap_lo_l1c_variable_attrs.yaml +189 -98
  21. imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +99 -2
  22. imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +24 -1
  23. imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +60 -0
  24. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +99 -11
  25. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +50 -7
  26. imap_processing/cli.py +121 -44
  27. imap_processing/codice/codice_l1a.py +165 -77
  28. imap_processing/codice/codice_l1b.py +1 -1
  29. imap_processing/codice/codice_l2.py +118 -19
  30. imap_processing/codice/constants.py +1217 -1089
  31. imap_processing/decom.py +1 -4
  32. imap_processing/ena_maps/ena_maps.py +32 -25
  33. imap_processing/ena_maps/utils/naming.py +8 -2
  34. imap_processing/glows/ancillary/imap_glows_exclusions-by-instr-team_20250923_v002.dat +10 -0
  35. imap_processing/glows/ancillary/imap_glows_map-of-excluded-regions_20250923_v002.dat +393 -0
  36. imap_processing/glows/ancillary/imap_glows_map-of-uv-sources_20250923_v002.dat +593 -0
  37. imap_processing/glows/ancillary/imap_glows_pipeline_settings_20250923_v002.json +54 -0
  38. imap_processing/glows/ancillary/imap_glows_suspected-transients_20250923_v002.dat +10 -0
  39. imap_processing/glows/l1b/glows_l1b.py +99 -9
  40. imap_processing/glows/l1b/glows_l1b_data.py +350 -38
  41. imap_processing/glows/l2/glows_l2.py +11 -0
  42. imap_processing/hi/hi_l1a.py +124 -3
  43. imap_processing/hi/hi_l1b.py +154 -71
  44. imap_processing/hi/hi_l2.py +84 -51
  45. imap_processing/hi/utils.py +153 -8
  46. imap_processing/hit/l0/constants.py +3 -0
  47. imap_processing/hit/l0/decom_hit.py +5 -8
  48. imap_processing/hit/l1a/hit_l1a.py +375 -45
  49. imap_processing/hit/l1b/constants.py +5 -0
  50. imap_processing/hit/l1b/hit_l1b.py +61 -131
  51. imap_processing/hit/l2/constants.py +1 -1
  52. imap_processing/hit/l2/hit_l2.py +10 -11
  53. imap_processing/ialirt/calculate_ingest.py +219 -0
  54. imap_processing/ialirt/constants.py +32 -1
  55. imap_processing/ialirt/generate_coverage.py +201 -0
  56. imap_processing/ialirt/l0/ialirt_spice.py +5 -2
  57. imap_processing/ialirt/l0/parse_mag.py +337 -29
  58. imap_processing/ialirt/l0/process_hit.py +5 -3
  59. imap_processing/ialirt/l0/process_swapi.py +41 -25
  60. imap_processing/ialirt/l0/process_swe.py +23 -7
  61. imap_processing/ialirt/process_ephemeris.py +70 -14
  62. imap_processing/ialirt/utils/constants.py +22 -16
  63. imap_processing/ialirt/utils/create_xarray.py +42 -19
  64. imap_processing/idex/idex_constants.py +1 -5
  65. imap_processing/idex/idex_l0.py +2 -2
  66. imap_processing/idex/idex_l1a.py +2 -3
  67. imap_processing/idex/idex_l1b.py +2 -3
  68. imap_processing/idex/idex_l2a.py +130 -4
  69. imap_processing/idex/idex_l2b.py +313 -119
  70. imap_processing/idex/idex_utils.py +1 -3
  71. imap_processing/lo/l0/lo_apid.py +1 -0
  72. imap_processing/lo/l0/lo_science.py +25 -24
  73. imap_processing/lo/l1a/lo_l1a.py +44 -0
  74. imap_processing/lo/l1b/lo_l1b.py +3 -3
  75. imap_processing/lo/l1c/lo_l1c.py +116 -50
  76. imap_processing/lo/l2/lo_l2.py +29 -29
  77. imap_processing/lo/lo_ancillary.py +55 -0
  78. imap_processing/lo/packet_definitions/lo_xtce.xml +5359 -106
  79. imap_processing/mag/constants.py +1 -0
  80. imap_processing/mag/l1a/mag_l1a.py +1 -0
  81. imap_processing/mag/l1a/mag_l1a_data.py +26 -0
  82. imap_processing/mag/l1b/mag_l1b.py +3 -2
  83. imap_processing/mag/l1c/interpolation_methods.py +14 -15
  84. imap_processing/mag/l1c/mag_l1c.py +23 -6
  85. imap_processing/mag/l1d/__init__.py +0 -0
  86. imap_processing/mag/l1d/mag_l1d.py +176 -0
  87. imap_processing/mag/l1d/mag_l1d_data.py +725 -0
  88. imap_processing/mag/l2/__init__.py +0 -0
  89. imap_processing/mag/l2/mag_l2.py +25 -20
  90. imap_processing/mag/l2/mag_l2_data.py +199 -130
  91. imap_processing/quality_flags.py +28 -2
  92. imap_processing/spice/geometry.py +101 -36
  93. imap_processing/spice/pointing_frame.py +1 -7
  94. imap_processing/spice/repoint.py +29 -2
  95. imap_processing/spice/spin.py +32 -8
  96. imap_processing/spice/time.py +60 -19
  97. imap_processing/swapi/l1/swapi_l1.py +10 -4
  98. imap_processing/swapi/l2/swapi_l2.py +66 -24
  99. imap_processing/swapi/swapi_utils.py +1 -1
  100. imap_processing/swe/l1b/swe_l1b.py +3 -6
  101. imap_processing/ultra/constants.py +28 -3
  102. imap_processing/ultra/l0/decom_tools.py +15 -8
  103. imap_processing/ultra/l0/decom_ultra.py +35 -11
  104. imap_processing/ultra/l0/ultra_utils.py +102 -12
  105. imap_processing/ultra/l1a/ultra_l1a.py +26 -6
  106. imap_processing/ultra/l1b/cullingmask.py +6 -3
  107. imap_processing/ultra/l1b/de.py +122 -26
  108. imap_processing/ultra/l1b/extendedspin.py +29 -2
  109. imap_processing/ultra/l1b/lookup_utils.py +424 -50
  110. imap_processing/ultra/l1b/quality_flag_filters.py +23 -0
  111. imap_processing/ultra/l1b/ultra_l1b_culling.py +356 -5
  112. imap_processing/ultra/l1b/ultra_l1b_extended.py +534 -90
  113. imap_processing/ultra/l1c/helio_pset.py +127 -7
  114. imap_processing/ultra/l1c/l1c_lookup_utils.py +256 -0
  115. imap_processing/ultra/l1c/spacecraft_pset.py +90 -15
  116. imap_processing/ultra/l1c/ultra_l1c.py +6 -0
  117. imap_processing/ultra/l1c/ultra_l1c_culling.py +85 -0
  118. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +446 -341
  119. imap_processing/ultra/l2/ultra_l2.py +0 -1
  120. imap_processing/ultra/utils/ultra_l1_utils.py +40 -3
  121. imap_processing/utils.py +3 -4
  122. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/METADATA +3 -3
  123. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/RECORD +126 -126
  124. imap_processing/idex/idex_l2c.py +0 -250
  125. imap_processing/spice/kernels.py +0 -187
  126. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_LeftSlit.csv +0 -526
  127. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_RightSlit.csv +0 -526
  128. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_LeftSlit.csv +0 -526
  129. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +0 -524
  130. imap_processing/ultra/lookup_tables/EgyNorm.mem.csv +0 -32769
  131. imap_processing/ultra/lookup_tables/FM45_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
  132. imap_processing/ultra/lookup_tables/FM90_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
  133. imap_processing/ultra/lookup_tables/dps_grid45_compressed.cdf +0 -0
  134. imap_processing/ultra/lookup_tables/ultra45_back-pos-luts.csv +0 -4097
  135. imap_processing/ultra/lookup_tables/ultra45_tdc_norm.csv +0 -2050
  136. imap_processing/ultra/lookup_tables/ultra90_back-pos-luts.csv +0 -4097
  137. imap_processing/ultra/lookup_tables/ultra90_tdc_norm.csv +0 -2050
  138. imap_processing/ultra/lookup_tables/yadjust.csv +0 -257
  139. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/LICENSE +0 -0
  140. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/WHEEL +0 -0
  141. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/entry_points.txt +0 -0
@@ -131,6 +131,7 @@ MAX_FINE_TIME = np.iinfo(np.uint16).max # maximum 16 bit unsigned int
131
131
  AXIS_COUNT = 3
132
132
  RANGE_BIT_WIDTH = 2
133
133
  MAX_COMPRESSED_VECTOR_BITS = 60
134
+ FILLVAL = -1e31
134
135
 
135
136
 
136
137
  def vectors_per_second_from_string(vecsec_string: str) -> dict:
@@ -328,6 +328,7 @@ def generate_dataset(
328
328
  global_attributes = attribute_manager.get_global_attributes(logical_file_id)
329
329
  global_attributes["is_mago"] = str(bool(single_file_l1a.is_mago))
330
330
  global_attributes["is_active"] = str(bool(single_file_l1a.is_active))
331
+ global_attributes["all_vectors_primary"] = single_file_l1a.all_vectors_primary()
331
332
  global_attributes["vectors_per_second"] = (
332
333
  single_file_l1a.vectors_per_second_attribute()
333
334
  )
@@ -15,6 +15,7 @@ from imap_processing.mag.constants import (
15
15
  MAX_COMPRESSED_VECTOR_BITS,
16
16
  MAX_FINE_TIME,
17
17
  RANGE_BIT_WIDTH,
18
+ PrimarySensor,
18
19
  )
19
20
  from imap_processing.spice.time import met_to_ttj2000ns
20
21
 
@@ -241,6 +242,7 @@ class MagL1a:
241
242
  twos_complement()
242
243
  update_compression_array()
243
244
  vectors_per_second_attribute()
245
+ all_vectors_primary()
244
246
  """
245
247
 
246
248
  is_mago: bool
@@ -1117,3 +1119,27 @@ class MagL1a:
1117
1119
  last_vectors_per_second = vecsec
1118
1120
 
1119
1121
  return output_str
1122
+
1123
+ def all_vectors_primary(self) -> bool:
1124
+ """
1125
+ Check if all vectors in the file are from the primary sensor.
1126
+
1127
+ For MAGO datasets, this checks if MAGO was consistently the primary sensor
1128
+ across all packets. For MAGI datasets, this checks if MAGI was consistently
1129
+ the primary sensor across all packets.
1130
+
1131
+ Returns
1132
+ -------
1133
+ bool
1134
+ True if all vectors are from the primary sensor across all packets,
1135
+ False otherwise.
1136
+ """
1137
+ expected_primary_value = (
1138
+ PrimarySensor.MAGO.value if self.is_mago else PrimarySensor.MAGI.value
1139
+ )
1140
+
1141
+ for _, packet in self.packet_definitions.items():
1142
+ if packet.mago_is_primary != expected_primary_value:
1143
+ return False
1144
+
1145
+ return True
@@ -74,7 +74,6 @@ def mag_l1b(
74
74
  calibration_matrix, time_shift = retrieve_matrix_from_l1b_calibration(
75
75
  calibration_dataset, day_to_process, is_mago
76
76
  )
77
- print(f"Using calibration matrix: {calibration_matrix}")
78
77
 
79
78
  output_dataset = mag_l1b_processing(
80
79
  input_dataset, calibration_matrix, time_shift, mag_attributes, source
@@ -182,6 +181,9 @@ def mag_l1b_processing(
182
181
  try:
183
182
  global_attributes["is_mago"] = input_dataset.attrs["is_mago"]
184
183
  global_attributes["is_active"] = input_dataset.attrs["is_active"]
184
+ global_attributes["all_vectors_primary"] = input_dataset.attrs[
185
+ "all_vectors_primary"
186
+ ]
185
187
  global_attributes["vectors_per_second"] = timeshift_vectors_per_second(
186
188
  input_dataset.attrs["vectors_per_second"], time_shift
187
189
  )
@@ -245,7 +247,6 @@ def retrieve_matrix_from_l1b_calibration(
245
247
  The calibration matrix and time shift. These can be passed directly into
246
248
  update_vector, calibrate_vector, and shift_time.
247
249
  """
248
- print(f"Finding data for day {day}")
249
250
  if is_mago:
250
251
  calibration_matrix = calibration_dataset.sel(epoch=day)["MFOTOURFO"]
251
252
  time_shift = calibration_dataset.sel(epoch=day)["OTS"]
@@ -3,7 +3,6 @@
3
3
 
4
4
  import logging
5
5
  from enum import Enum
6
- from typing import Optional
7
6
 
8
7
  import numpy as np
9
8
  from scipy.interpolate import make_interp_spline
@@ -44,8 +43,8 @@ def linear(
44
43
  input_vectors: np.ndarray,
45
44
  input_timestamps: np.ndarray,
46
45
  output_timestamps: np.ndarray,
47
- input_rate: Optional[VecSec] = None,
48
- output_rate: Optional[VecSec] = None,
46
+ input_rate: VecSec | None = None,
47
+ output_rate: VecSec | None = None,
49
48
  ) -> np.ndarray:
50
49
  """
51
50
  Linear interpolation of input vectors to output timestamps.
@@ -80,8 +79,8 @@ def quadratic(
80
79
  input_vectors: np.ndarray,
81
80
  input_timestamps: np.ndarray,
82
81
  output_timestamps: np.ndarray,
83
- input_rate: Optional[VecSec] = None,
84
- output_rate: Optional[VecSec] = None,
82
+ input_rate: VecSec | None = None,
83
+ output_rate: VecSec | None = None,
85
84
  ) -> np.ndarray:
86
85
  """
87
86
  Quadratic interpolation of input vectors to output timestamps.
@@ -115,8 +114,8 @@ def cubic(
115
114
  input_vectors: np.ndarray,
116
115
  input_timestamps: np.ndarray,
117
116
  output_timestamps: np.ndarray,
118
- input_rate: Optional[VecSec] = None,
119
- output_rate: Optional[VecSec] = None,
117
+ input_rate: VecSec | None = None,
118
+ output_rate: VecSec | None = None,
120
119
  ) -> np.ndarray:
121
120
  """
122
121
  Cubic interpolation of input vectors to output timestamps.
@@ -175,8 +174,8 @@ def cic_filter(
175
174
  input_vectors: np.ndarray,
176
175
  input_timestamps: np.ndarray,
177
176
  output_timestamps: np.ndarray,
178
- input_rate: Optional[VecSec],
179
- output_rate: Optional[VecSec],
177
+ input_rate: VecSec | None,
178
+ output_rate: VecSec | None,
180
179
  ):
181
180
  """
182
181
  Apply CIC filter to data before interpolating.
@@ -242,8 +241,8 @@ def linear_filtered(
242
241
  input_vectors: np.ndarray,
243
242
  input_timestamps: np.ndarray,
244
243
  output_timestamps: np.ndarray,
245
- input_rate: Optional[VecSec] = None,
246
- output_rate: Optional[VecSec] = None,
244
+ input_rate: VecSec | None = None,
245
+ output_rate: VecSec | None = None,
247
246
  ) -> np.ndarray:
248
247
  """
249
248
  Linear filtered interpolation of input vectors to output timestamps.
@@ -281,8 +280,8 @@ def quadratic_filtered(
281
280
  input_vectors: np.ndarray,
282
281
  input_timestamps: np.ndarray,
283
282
  output_timestamps: np.ndarray,
284
- input_rate: Optional[VecSec] = None,
285
- output_rate: Optional[VecSec] = None,
283
+ input_rate: VecSec | None = None,
284
+ output_rate: VecSec | None = None,
286
285
  ) -> np.ndarray:
287
286
  """
288
287
  Quadratic filtered interpolation of input vectors to output timestamps.
@@ -320,8 +319,8 @@ def cubic_filtered(
320
319
  input_vectors: np.ndarray,
321
320
  input_timestamps: np.ndarray,
322
321
  output_timestamps: np.ndarray,
323
- input_rate: Optional[VecSec] = None,
324
- output_rate: Optional[VecSec] = None,
322
+ input_rate: VecSec | None = None,
323
+ output_rate: VecSec | None = None,
325
324
  ) -> np.ndarray:
326
325
  """
327
326
  Cubic filtered interpolation of input vectors to output timestamps.
@@ -1,7 +1,6 @@
1
1
  """MAG L1C processing module."""
2
2
 
3
3
  import logging
4
- from typing import Optional
5
4
 
6
5
  import numpy as np
7
6
  import xarray as xr
@@ -125,6 +124,23 @@ def mag_l1c(
125
124
  try:
126
125
  global_attributes["is_mago"] = normal_mode_dataset.attrs["is_mago"]
127
126
  global_attributes["is_active"] = normal_mode_dataset.attrs["is_active"]
127
+
128
+ # Check if all vectors are primary in both normal and burst datasets
129
+ is_mago = normal_mode_dataset.attrs.get("is_mago", "False") == "True"
130
+ normal_all_primary = normal_mode_dataset.attrs.get("all_vectors_primary", False)
131
+
132
+ # Default for missing burst dataset: 1 if MAGO (expected primary), 0 if MAGI
133
+ burst_all_primary = is_mago
134
+ if burst_mode_dataset is not None:
135
+ burst_all_primary = burst_mode_dataset.attrs.get(
136
+ "all_vectors_primary", False
137
+ )
138
+
139
+ # Both datasets must have all vectors primary for the combined result to be True
140
+ global_attributes["all_vectors_primary"] = (
141
+ normal_all_primary and burst_all_primary
142
+ )
143
+
128
144
  global_attributes["missing_sequences"] = normal_mode_dataset.attrs[
129
145
  "missing_sequences"
130
146
  ]
@@ -162,8 +178,9 @@ def mag_l1c(
162
178
  output_core_dims=[[]],
163
179
  vectorize=True,
164
180
  )
165
- # output_dataset['vector_magnitude'].attrs =
166
- # attribute_manager.get_variable_attributes("vector_magnitude_attrs")
181
+ output_dataset[
182
+ "vector_magnitude"
183
+ ].attrs = attribute_manager.get_variable_attributes("vector_magnitude_attrs")
167
184
 
168
185
  output_dataset["compression_flags"] = xr.DataArray(
169
186
  completed_timeline[:, 6:8],
@@ -176,14 +193,14 @@ def mag_l1c(
176
193
  completed_timeline[:, 5],
177
194
  name="generated_flag",
178
195
  dims=["epoch"],
179
- # attrs=attribute_manager.get_variable_attributes("generated_flag_attrs"),
196
+ attrs=attribute_manager.get_variable_attributes("generated_flag_attrs"),
180
197
  )
181
198
 
182
199
  return output_dataset
183
200
 
184
201
 
185
202
  def select_datasets(
186
- first_input_dataset: xr.Dataset, second_input_dataset: Optional[xr.Dataset] = None
203
+ first_input_dataset: xr.Dataset, second_input_dataset: xr.Dataset | None = None
187
204
  ) -> tuple[xr.Dataset, xr.Dataset]:
188
205
  """
189
206
  Given one or two datasets, assign one to norm and one to burst.
@@ -503,7 +520,7 @@ def generate_timeline(epoch_data: np.ndarray, gaps: np.ndarray) -> np.ndarray:
503
520
 
504
521
 
505
522
  def find_all_gaps(
506
- epoch_data: np.ndarray, vecsec_dict: Optional[dict] = None
523
+ epoch_data: np.ndarray, vecsec_dict: dict | None = None
507
524
  ) -> np.ndarray:
508
525
  """
509
526
  Find all the gaps in the epoch data.
File without changes
@@ -0,0 +1,176 @@
1
+ """Module for generating Level 1d magnetic field data."""
2
+
3
+ import numpy as np
4
+ import xarray as xr
5
+
6
+ from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
7
+ from imap_processing.mag.constants import DataMode
8
+ from imap_processing.mag.l1d.mag_l1d_data import MagL1d, MagL1dConfiguration
9
+ from imap_processing.mag.l2.mag_l2_data import ValidFrames
10
+
11
+
12
+ def mag_l1d( # noqa: PLR0912
13
+ science_data: list[xr.Dataset],
14
+ calibration_dataset: xr.Dataset,
15
+ day_to_process: np.datetime64,
16
+ ) -> list[xr.Dataset]:
17
+ """
18
+ Generate Level 1d magnetic field data from Level 1b/1c data.
19
+
20
+ Both norm and burst mode are calculated at the same time. Normal mode MAGO and MAGI
21
+ L1C data is required, burst mode MAGO and MAGI L1B data is optional.
22
+
23
+ Parameters
24
+ ----------
25
+ science_data : list[xr.Dataset]
26
+ The list of input datasets containing the MAG L1C and L1B data. This is required
27
+ to have at least one normal mode dataset for MAGo and MAGi, and optionally
28
+ burst mode datasets for MAGo and MAGi. There cannot be duplicates, so two
29
+ norm-mago files is invalid.
30
+ calibration_dataset : xr.Dataset
31
+ The calibration dataset to use for processing. Generated from multiple L1D
32
+ ancillary files using MagAncillaryCombiner class.
33
+ day_to_process : np.datetime64
34
+ The day to process, in np.datetime64[D] format. This is used to select the
35
+ correct ancillary parameters and to remove excessive data from the output.
36
+
37
+ Returns
38
+ -------
39
+ list[xr.Dataset]
40
+ A list containing the generated Level 1d dataset(s).
41
+ """
42
+ input_magi_norm = None
43
+ input_mago_norm = None
44
+ input_magi_burst = None
45
+ input_mago_burst = None
46
+ for dataset in science_data:
47
+ source = dataset.attrs.get("Logical_source", "")
48
+ instrument_mode = source.split("_")[-1]
49
+ match instrument_mode:
50
+ case "norm-magi":
51
+ input_magi_norm = dataset
52
+ case "norm-mago":
53
+ input_mago_norm = dataset
54
+ case "burst-magi":
55
+ input_magi_burst = dataset
56
+ case "burst-mago":
57
+ input_mago_burst = dataset
58
+ case _:
59
+ raise ValueError(f"Input data has invalid logical source {source}")
60
+
61
+ if input_magi_norm is None or input_mago_norm is None:
62
+ raise ValueError(
63
+ "Both MAGo and MAGi normal mode datasets are required for L1d processing."
64
+ )
65
+
66
+ day: np.datetime64 = day_to_process.astype("datetime64[D]")
67
+
68
+ output_datasets = []
69
+
70
+ # Read configuration out of file
71
+ config = MagL1dConfiguration(calibration_dataset, day)
72
+
73
+ # Only the first 3 components are used for L1d
74
+ mago_vectors = input_mago_norm["vectors"].data[:, :3]
75
+ magi_vectors = input_magi_norm["vectors"].data[:, :3]
76
+
77
+ # Verify that MAGO is primary sensor for all vectors before applying gradiometry
78
+ if not input_mago_norm.attrs.get("all_vectors_primary", 1):
79
+ config.apply_gradiometry = False
80
+
81
+ # TODO: L1D attributes
82
+ attributes = ImapCdfAttributes()
83
+ attributes.add_instrument_global_attrs("mag")
84
+ attributes.add_instrument_variable_attrs("mag", "l2")
85
+
86
+ l1d_norm = MagL1d(
87
+ vectors=mago_vectors,
88
+ epoch=input_mago_norm["epoch"].data,
89
+ range=input_mago_norm["vectors"].data[:, 3],
90
+ global_attributes={},
91
+ quality_flags=np.zeros(len(input_mago_norm["epoch"].data)),
92
+ quality_bitmask=np.zeros(len(input_mago_norm["epoch"].data)),
93
+ data_mode=DataMode.NORM,
94
+ magi_vectors=magi_vectors,
95
+ magi_range=input_magi_norm["vectors"].data[:, 3],
96
+ magi_epoch=input_magi_norm["epoch"].data,
97
+ config=config,
98
+ day=day,
99
+ )
100
+
101
+ # Nominally, this is expected to create MAGO data. However, if the configuration
102
+ # setting for always_output_mago is set to False, it will create MAGI data.
103
+
104
+ l1d_norm.rotate_frame(ValidFrames.SRF)
105
+ norm_srf_dataset = l1d_norm.generate_dataset(attributes, day_to_process)
106
+ l1d_norm.rotate_frame(ValidFrames.DSRF)
107
+ norm_dsrf_dataset = l1d_norm.generate_dataset(attributes, day_to_process)
108
+ l1d_norm.rotate_frame(ValidFrames.GSE)
109
+ norm_gse_dataset = l1d_norm.generate_dataset(attributes, day_to_process)
110
+ l1d_norm.rotate_frame(ValidFrames.RTN)
111
+ norm_rtn_dataset = l1d_norm.generate_dataset(attributes, day_to_process)
112
+ output_datasets.append(norm_srf_dataset)
113
+ output_datasets.append(norm_dsrf_dataset)
114
+ output_datasets.append(norm_gse_dataset)
115
+ output_datasets.append(norm_rtn_dataset)
116
+
117
+ if input_mago_burst is not None and input_magi_burst is not None:
118
+ # If burst data is provided, use it to create the burst L1d dataset
119
+ mago_burst_vectors = input_mago_burst["vectors"].data[:, :3]
120
+ magi_burst_vectors = input_magi_burst["vectors"].data[:, :3]
121
+
122
+ l1d_burst = MagL1d(
123
+ vectors=mago_burst_vectors,
124
+ epoch=input_mago_burst["epoch"].data,
125
+ range=input_mago_burst["vectors"].data[:, 3],
126
+ global_attributes={},
127
+ quality_flags=np.zeros(len(input_mago_burst["epoch"].data)),
128
+ quality_bitmask=np.zeros(len(input_mago_burst["epoch"].data)),
129
+ data_mode=DataMode.BURST,
130
+ magi_vectors=magi_burst_vectors,
131
+ magi_range=input_magi_burst["vectors"].data[:, 3],
132
+ magi_epoch=input_magi_burst["epoch"].data,
133
+ config=config,
134
+ spin_offsets=l1d_norm.spin_offsets,
135
+ day=day,
136
+ )
137
+
138
+ # TODO: frame specific attributes may be required
139
+ l1d_burst.rotate_frame(ValidFrames.SRF)
140
+ burst_srf_dataset = l1d_burst.generate_dataset(attributes, day_to_process)
141
+ l1d_burst.rotate_frame(ValidFrames.DSRF)
142
+ burst_dsrf_dataset = l1d_burst.generate_dataset(attributes, day_to_process)
143
+ l1d_burst.rotate_frame(ValidFrames.GSE)
144
+ burst_gse_dataset = l1d_burst.generate_dataset(attributes, day_to_process)
145
+ l1d_burst.rotate_frame(ValidFrames.RTN)
146
+ burst_rtn_dataset = l1d_burst.generate_dataset(attributes, day_to_process)
147
+ output_datasets.append(burst_srf_dataset)
148
+ output_datasets.append(burst_dsrf_dataset)
149
+ output_datasets.append(burst_gse_dataset)
150
+ output_datasets.append(burst_rtn_dataset)
151
+
152
+ # Output ancillary files
153
+ # Add spin offsets dataset from normal mode processing
154
+ if l1d_norm.spin_offsets is not None:
155
+ spin_offset_dataset = l1d_norm.generate_spin_offset_dataset()
156
+ spin_offset_dataset.attrs["Logical_source"] = "imap_mag_l1d-spin-offsets"
157
+ output_datasets.append(spin_offset_dataset)
158
+
159
+ # Add gradiometry offsets dataset if gradiometry was applied
160
+ if l1d_norm.config.apply_gradiometry and hasattr(l1d_norm, "gradiometry_offsets"):
161
+ gradiometry_dataset = l1d_norm.gradiometry_offsets.copy()
162
+ gradiometry_dataset.attrs["Logical_source"] = (
163
+ "imap_mag_l1d-gradiometry-offsets-norm"
164
+ )
165
+ output_datasets.append(gradiometry_dataset)
166
+
167
+ # Also add burst gradiometry offsets if burst data was processed
168
+ if input_mago_burst is not None and input_magi_burst is not None:
169
+ if hasattr(l1d_burst, "gradiometry_offsets"):
170
+ burst_gradiometry_dataset = l1d_burst.gradiometry_offsets.copy()
171
+ burst_gradiometry_dataset.attrs["Logical_source"] = (
172
+ "imap_mag_l1d-gradiometry-offsets-burst"
173
+ )
174
+ output_datasets.append(burst_gradiometry_dataset)
175
+
176
+ return output_datasets