imap-processing 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (124) hide show
  1. imap_processing/__init__.py +1 -1
  2. imap_processing/_version.py +2 -2
  3. imap_processing/ccsds/excel_to_xtce.py +34 -2
  4. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +1 -1
  5. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +145 -30
  6. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +36 -36
  7. imap_processing/cdf/config/imap_hi_variable_attrs.yaml +36 -8
  8. imap_processing/cdf/config/imap_hit_l1b_variable_attrs.yaml +9 -0
  9. imap_processing/cdf/config/imap_idex_global_cdf_attrs.yaml +7 -7
  10. imap_processing/cdf/config/imap_idex_l1a_variable_attrs.yaml +32 -33
  11. imap_processing/cdf/config/imap_mag_l1_variable_attrs.yaml +24 -28
  12. imap_processing/cdf/config/imap_ultra_l1a_variable_attrs.yaml +1 -0
  13. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +133 -78
  14. imap_processing/cdf/config/imap_variable_schema.yaml +13 -0
  15. imap_processing/cdf/imap_cdf_manager.py +31 -27
  16. imap_processing/cli.py +12 -10
  17. imap_processing/codice/codice_l1a.py +151 -61
  18. imap_processing/codice/constants.py +1 -1
  19. imap_processing/codice/decompress.py +4 -9
  20. imap_processing/codice/utils.py +1 -0
  21. imap_processing/glows/l1b/glows_l1b.py +3 -3
  22. imap_processing/glows/l1b/glows_l1b_data.py +59 -37
  23. imap_processing/glows/l2/glows_l2_data.py +123 -0
  24. imap_processing/hi/l1a/histogram.py +1 -1
  25. imap_processing/hi/l1a/science_direct_event.py +1 -1
  26. imap_processing/hi/l1b/hi_l1b.py +85 -11
  27. imap_processing/hi/l1c/hi_l1c.py +23 -1
  28. imap_processing/hi/utils.py +1 -1
  29. imap_processing/hit/hit_utils.py +221 -0
  30. imap_processing/hit/l0/constants.py +118 -0
  31. imap_processing/hit/l0/decom_hit.py +186 -153
  32. imap_processing/hit/l1a/hit_l1a.py +20 -175
  33. imap_processing/hit/l1b/hit_l1b.py +33 -153
  34. imap_processing/idex/idex_l1a.py +10 -9
  35. imap_processing/lo/l0/decompression_tables/decompression_tables.py +1 -1
  36. imap_processing/lo/l0/lo_science.py +1 -1
  37. imap_processing/lo/packet_definitions/lo_xtce.xml +1 -3296
  38. imap_processing/mag/l0/decom_mag.py +4 -3
  39. imap_processing/mag/l1a/mag_l1a.py +11 -11
  40. imap_processing/mag/l1b/mag_l1b.py +89 -7
  41. imap_processing/spice/geometry.py +126 -4
  42. imap_processing/swapi/l1/swapi_l1.py +1 -1
  43. imap_processing/swapi/l2/swapi_l2.py +1 -1
  44. imap_processing/swe/l1b/swe_l1b_science.py +8 -8
  45. imap_processing/tests/ccsds/test_data/expected_output.xml +1 -0
  46. imap_processing/tests/ccsds/test_excel_to_xtce.py +4 -4
  47. imap_processing/tests/cdf/test_imap_cdf_manager.py +0 -10
  48. imap_processing/tests/codice/conftest.py +1 -17
  49. imap_processing/tests/codice/data/imap_codice_l0_raw_20241110_v001.pkts +0 -0
  50. imap_processing/tests/codice/test_codice_l0.py +8 -2
  51. imap_processing/tests/codice/test_codice_l1a.py +127 -107
  52. imap_processing/tests/codice/test_codice_l1b.py +1 -0
  53. imap_processing/tests/codice/test_decompress.py +7 -7
  54. imap_processing/tests/conftest.py +54 -15
  55. imap_processing/tests/glows/conftest.py +6 -0
  56. imap_processing/tests/glows/test_glows_l1b.py +9 -9
  57. imap_processing/tests/glows/test_glows_l1b_data.py +9 -9
  58. imap_processing/tests/glows/test_glows_l2_data.py +0 -0
  59. imap_processing/tests/hi/test_data/l1a/imap_hi_l1a_45sensor-de_20250415_v000.cdf +0 -0
  60. imap_processing/tests/hi/test_hi_l1b.py +71 -1
  61. imap_processing/tests/hi/test_hi_l1c.py +10 -2
  62. imap_processing/tests/hi/test_utils.py +4 -3
  63. imap_processing/tests/hit/{test_hit_decom.py → test_decom_hit.py} +84 -35
  64. imap_processing/tests/hit/test_hit_l1a.py +2 -197
  65. imap_processing/tests/hit/test_hit_l1b.py +156 -25
  66. imap_processing/tests/hit/test_hit_utils.py +218 -0
  67. imap_processing/tests/idex/conftest.py +1 -1
  68. imap_processing/tests/idex/imap_idex_l0_raw_20231214_v001.pkts +0 -0
  69. imap_processing/tests/idex/impact_14_tof_high_data.txt +4444 -4444
  70. imap_processing/tests/idex/test_idex_l0.py +3 -3
  71. imap_processing/tests/idex/test_idex_l1a.py +1 -1
  72. imap_processing/tests/lo/test_lo_science.py +2 -2
  73. imap_processing/tests/mag/imap_mag_l1a_norm-magi_20251017_v001.cdf +0 -0
  74. imap_processing/tests/mag/test_mag_l1b.py +59 -3
  75. imap_processing/tests/spice/test_data/imap_ena_sim_metakernel.template +3 -1
  76. imap_processing/tests/spice/test_geometry.py +84 -4
  77. imap_processing/tests/swe/conftest.py +33 -0
  78. imap_processing/tests/swe/l1_validation/swe_l0_unpacked-data_20240510_v001_VALIDATION_L1B_v3.dat +4332 -0
  79. imap_processing/tests/swe/test_swe_l1b.py +29 -8
  80. imap_processing/tests/test_utils.py +1 -1
  81. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E12.cdf +0 -0
  82. imap_processing/tests/ultra/test_data/l1/dps_exposure_helio_45_E24.cdf +0 -0
  83. imap_processing/tests/ultra/unit/test_de.py +108 -0
  84. imap_processing/tests/ultra/unit/test_ultra_l1b.py +27 -3
  85. imap_processing/tests/ultra/unit/test_ultra_l1b_annotated.py +31 -10
  86. imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py +21 -11
  87. imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py +9 -44
  88. imap_processing/ultra/constants.py +8 -3
  89. imap_processing/ultra/l1b/de.py +174 -30
  90. imap_processing/ultra/l1b/ultra_l1b_annotated.py +24 -10
  91. imap_processing/ultra/l1b/ultra_l1b_extended.py +21 -14
  92. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +70 -119
  93. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/METADATA +15 -14
  94. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/RECORD +98 -113
  95. imap_processing/cdf/cdf_attribute_manager.py +0 -322
  96. imap_processing/cdf/config/shared/default_global_cdf_attrs_schema.yaml +0 -246
  97. imap_processing/cdf/config/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  98. imap_processing/hit/l0/data_classes/housekeeping.py +0 -240
  99. imap_processing/hit/l0/data_classes/science_packet.py +0 -259
  100. imap_processing/hit/l0/utils/hit_base.py +0 -57
  101. imap_processing/tests/cdf/shared/default_global_cdf_attrs_schema.yaml +0 -246
  102. imap_processing/tests/cdf/shared/default_variable_cdf_attrs_schema.yaml +0 -466
  103. imap_processing/tests/cdf/test_cdf_attribute_manager.py +0 -353
  104. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-aggregated_20240429_v001.pkts +0 -0
  105. imap_processing/tests/codice/data/imap_codice_l0_hi-counters-singles_20240429_v001.pkts +0 -0
  106. imap_processing/tests/codice/data/imap_codice_l0_hi-omni_20240429_v001.pkts +0 -0
  107. imap_processing/tests/codice/data/imap_codice_l0_hi-pha_20240429_v001.pkts +0 -0
  108. imap_processing/tests/codice/data/imap_codice_l0_hi-sectored_20240429_v001.pkts +0 -0
  109. imap_processing/tests/codice/data/imap_codice_l0_hskp_20100101_v001.pkts +0 -0
  110. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-aggregated_20240429_v001.pkts +0 -0
  111. imap_processing/tests/codice/data/imap_codice_l0_lo-counters-singles_20240429_v001.pkts +0 -0
  112. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-angular_20240429_v001.pkts +0 -0
  113. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-priority_20240429_v001.pkts +0 -0
  114. imap_processing/tests/codice/data/imap_codice_l0_lo-nsw-species_20240429_v001.pkts +0 -0
  115. imap_processing/tests/codice/data/imap_codice_l0_lo-pha_20240429_v001.pkts +0 -0
  116. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-angular_20240429_v001.pkts +0 -0
  117. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-priority_20240429_v001.pkts +0 -0
  118. imap_processing/tests/codice/data/imap_codice_l0_lo-sw-species_20240429_v001.pkts +0 -0
  119. imap_processing/tests/idex/imap_idex_l0_raw_20230725_v001.pkts +0 -0
  120. imap_processing/tests/mag/imap_mag_l1a_burst-magi_20231025_v001.cdf +0 -0
  121. /imap_processing/tests/hit/test_data/{imap_hit_l0_hk_20100105_v001.pkts → imap_hit_l0_raw_20100105_v001.pkts} +0 -0
  122. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/LICENSE +0 -0
  123. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/WHEEL +0 -0
  124. {imap_processing-0.7.0.dist-info → imap_processing-0.8.0.dist-info}/entry_points.txt +0 -0
@@ -129,7 +129,9 @@ def generate_dataset(
129
129
  direction.astype(str),
130
130
  name="direction_label",
131
131
  dims=["direction_label"],
132
- attrs=attribute_manager.get_variable_attributes("direction_label"),
132
+ attrs=attribute_manager.get_variable_attributes(
133
+ "direction_label", check_schema=False
134
+ ),
133
135
  )
134
136
 
135
137
  # TODO: Epoch here refers to the start of the sample. Confirm that this is
@@ -154,11 +156,10 @@ def generate_dataset(
154
156
  coords={
155
157
  "epoch": epoch_time,
156
158
  "direction": direction,
157
- "direction_label": direction_label,
158
159
  },
159
160
  attrs=attribute_manager.get_global_attributes(logical_id),
160
161
  )
161
-
162
+ output["direction_label"] = direction_label
162
163
  output["raw_vectors"] = raw_vectors
163
164
 
164
165
  for key, value in support_data.items():
@@ -158,7 +158,6 @@ def process_packets(
158
158
  "timedelta64[ns]"
159
159
  )
160
160
  ).astype("datetime64[D]")
161
-
162
161
  primary_packet_properties = MagL1aPacketProperties(
163
162
  mag_l0.SHCOARSE,
164
163
  primary_start_time,
@@ -284,9 +283,7 @@ def generate_dataset(
284
283
 
285
284
  # TODO: Just leave time in datetime64 type with vector as dtype object to avoid this
286
285
  # Get the timestamp from the end of the vector
287
- time_data = single_file_l1a.vectors[:, 4].astype(
288
- np.dtype("datetime64[ns]"), copy=False
289
- )
286
+ time_data = single_file_l1a.vectors[:, 4]
290
287
 
291
288
  compression = xr.DataArray(
292
289
  np.arange(2),
@@ -326,17 +323,21 @@ def generate_dataset(
326
323
  )
327
324
 
328
325
  direction_label = xr.DataArray(
329
- direction.astype(str),
326
+ direction.values.astype(str),
330
327
  name="direction_label",
331
328
  dims=["direction_label"],
332
- attrs=attribute_manager.get_variable_attributes("direction_label"),
329
+ attrs=attribute_manager.get_variable_attributes(
330
+ "direction_label", check_schema=False
331
+ ),
333
332
  )
334
333
 
335
334
  compression_label = xr.DataArray(
336
- compression.astype(str),
335
+ compression.values.astype(str),
337
336
  name="compression_label",
338
337
  dims=["compression_label"],
339
- attrs=attribute_manager.get_variable_attributes("compression_label"),
338
+ attrs=attribute_manager.get_variable_attributes(
339
+ "compression_label", check_schema=False
340
+ ),
340
341
  )
341
342
 
342
343
  output = xr.Dataset(
@@ -344,12 +345,11 @@ def generate_dataset(
344
345
  "epoch": epoch_time,
345
346
  "direction": direction,
346
347
  "compression": compression,
347
- "direction_label": direction_label,
348
- "compression_label": compression_label,
349
348
  },
350
349
  attrs=attribute_manager.get_global_attributes(logical_file_id),
351
350
  )
352
-
351
+ output["direction_label"] = direction_label
352
+ output["compression_label"] = compression_label
353
353
  output["vectors"] = vectors
354
354
  output["compression_flags"] = compression_flags
355
355
 
@@ -64,9 +64,11 @@ def mag_l1b_processing(input_dataset: xr.Dataset) -> xr.Dataset:
64
64
  """
65
65
  # TODO: There is a time alignment step that will add a lot of complexity.
66
66
  # This needs to be done once we have some SPICE time data.
67
+ mag_attributes = ImapCdfAttributes()
68
+ mag_attributes.add_instrument_variable_attrs("mag", "l1")
67
69
 
68
- dims = [["direction"]]
69
- new_dims = [["direction"]]
70
+ dims = [["direction"], ["compression"]]
71
+ new_dims = [["direction"], ["compression"]]
70
72
  # TODO: This should definitely be loaded from AWS
71
73
  calibration_dataset = load_cdf(
72
74
  Path(__file__).parent / "imap_calibration_mag_20240229_v01.cdf"
@@ -78,8 +80,9 @@ def mag_l1b_processing(input_dataset: xr.Dataset) -> xr.Dataset:
78
80
  calibration_matrix = calibration_dataset["MFITOURFI"]
79
81
 
80
82
  l1b_fields = xr.apply_ufunc(
81
- calibrate,
83
+ update_vector,
82
84
  input_dataset["vectors"],
85
+ input_dataset["compression_flags"],
83
86
  input_core_dims=dims,
84
87
  output_core_dims=new_dims,
85
88
  vectorize=True,
@@ -88,13 +91,93 @@ def mag_l1b_processing(input_dataset: xr.Dataset) -> xr.Dataset:
88
91
  )
89
92
 
90
93
  output_dataset = input_dataset.copy()
91
- output_dataset["vectors"] = l1b_fields
94
+ output_dataset["vectors"].data = l1b_fields[0].data
95
+
96
+ output_dataset["epoch"].attrs = mag_attributes.get_variable_attributes("epoch")
97
+ output_dataset["direction"].attrs = mag_attributes.get_variable_attributes(
98
+ "direction_attrs"
99
+ )
100
+ output_dataset["compression"].attrs = mag_attributes.get_variable_attributes(
101
+ "compression_attrs"
102
+ )
103
+ output_dataset["direction_label"].attrs = mag_attributes.get_variable_attributes(
104
+ "direction_label", check_schema=False
105
+ )
106
+ output_dataset["compression_label"].attrs = mag_attributes.get_variable_attributes(
107
+ "compression_label", check_schema=False
108
+ )
92
109
 
93
- # TODO add/update attributes
94
110
  return output_dataset
95
111
 
96
112
 
97
- def calibrate(
113
+ def update_vector(
114
+ input_vector: np.ndarray,
115
+ input_compression: np.ndarray,
116
+ calibration_matrix: xr.DataArray,
117
+ ) -> tuple[np.ndarray, np.ndarray]:
118
+ """
119
+ Apply calibration and compression scaling to vector.
120
+
121
+ This calls, in sequence, calibrate_vector and rescale_vector to apply L1B processing
122
+ to the input vector.
123
+
124
+ Parameters
125
+ ----------
126
+ input_vector : numpy.ndarray
127
+ One input vector to update, looking like (x, y, z, range).
128
+ input_compression : numpy.ndarray
129
+ Compression flags corresponding to the vector, looking like (is_compressed,
130
+ compression_width).
131
+ calibration_matrix : xr.DataArray
132
+ DataArray containing the full set of calibration matrices, for each range.
133
+ Size is ((3, 3, 4)).
134
+
135
+ Returns
136
+ -------
137
+ tuple[numpy.ndarray, numpy.ndarray]
138
+ Updated vector and the same compression flags.
139
+ """
140
+ vector = calibrate_vector(input_vector, calibration_matrix)
141
+ return rescale_vector(vector, input_compression), input_compression
142
+
143
+
144
+ def rescale_vector(
145
+ input_vector: np.ndarray, compression_flags: np.ndarray
146
+ ) -> np.ndarray:
147
+ """
148
+ Rescale vector based on compression flags.
149
+
150
+ If the first value of compression_flags is zero, this just returns the input_vector
151
+ unchanged. Otherwise, the vector is scaled using the compression width, which is
152
+ the second part of compression_flags.
153
+
154
+ The vector is scaled using the following equation:
155
+ M = 2 ^ (16-width)
156
+ output_vector = input_vector * M
157
+
158
+ Therefore, for a 16 bit width, the same vector is returned.
159
+
160
+ Parameters
161
+ ----------
162
+ input_vector : numpy.ndarray
163
+ One input vector to update, looking like (x, y, z, range).
164
+ compression_flags : numpy.ndarray
165
+ Compression flags corresponding to the vector, looking like (is_compressed,
166
+ compression_width).
167
+
168
+ Returns
169
+ -------
170
+ output_vector : numpy.ndarray
171
+ Updated vector.
172
+ """
173
+ if not compression_flags[0]:
174
+ return input_vector
175
+ else:
176
+ factor = np.float_power(2, (16 - compression_flags[1]))
177
+ return input_vector * factor # type: ignore
178
+
179
+
180
+ def calibrate_vector(
98
181
  input_vector: np.ndarray, calibration_matrix: xr.DataArray = None
99
182
  ) -> np.ndarray:
100
183
  """
@@ -121,5 +204,4 @@ def calibrate(
121
204
  updated_vector[:3] = np.matmul(
122
205
  input_vector[:3], calibration_matrix.values[:, :, int(input_vector[3])]
123
206
  )
124
-
125
207
  return updated_vector
@@ -19,6 +19,7 @@ import numpy as np
19
19
  import numpy.typing as npt
20
20
  import pandas as pd
21
21
  import spiceypy as spice
22
+ from numpy.typing import NDArray
22
23
 
23
24
  from imap_processing.spice.kernels import ensure_spice
24
25
 
@@ -303,8 +304,6 @@ def get_spacecraft_to_instrument_spin_phase_offset(instrument: SpiceFrame) -> fl
303
304
  return offset_lookup[instrument]
304
305
 
305
306
 
306
- @typing.no_type_check
307
- @ensure_spice
308
307
  def frame_transform(
309
308
  et: Union[float, npt.NDArray],
310
309
  position: npt.NDArray,
@@ -350,10 +349,11 @@ def frame_transform(
350
349
  f"Invalid position shape: {position.shape}. "
351
350
  f"Each input position vector must have 3 elements."
352
351
  )
353
- if not len(position) == len(et):
352
+ if not len(position) == np.asarray(et).size:
354
353
  raise ValueError(
355
354
  "Mismatch in number of position vectors and Ephemeris times provided."
356
- f"Position has {len(position)} elements and et has {len(et)} elements."
355
+ f"Position has {len(position)} elements and et has "
356
+ f"{np.asarray(et).size} elements."
357
357
  )
358
358
 
359
359
  # rotate will have shape = (3, 3) or (n, 3, 3)
@@ -369,6 +369,8 @@ def frame_transform(
369
369
  return result
370
370
 
371
371
 
372
+ @typing.no_type_check
373
+ @ensure_spice
372
374
  def get_rotation_matrix(
373
375
  et: Union[float, npt.NDArray],
374
376
  from_frame: SpiceFrame,
@@ -443,3 +445,123 @@ def instrument_pointing(
443
445
  if isinstance(et, typing.Collection):
444
446
  return np.rad2deg([spice.reclat(vec)[1:] for vec in pointing])
445
447
  return np.rad2deg(spice.reclat(pointing)[1:])
448
+
449
+
450
+ def basis_vectors(
451
+ et: Union[float, npt.NDArray],
452
+ from_frame: SpiceFrame,
453
+ to_frame: SpiceFrame,
454
+ ) -> npt.NDArray:
455
+ """
456
+ Get the basis vectors of the `from_frame` expressed in the `to_frame`.
457
+
458
+ The rotation matrix defining a frame transform are the basis vectors of
459
+ the `from_frame` expressed in the `to_frame`. This function just transposes
460
+ each rotation matrix retrieved from the `get_rotation_matrix` function so
461
+ that basis vectors can be indexed 0 for x, 1 for y, and 2 for z.
462
+
463
+ Parameters
464
+ ----------
465
+ et : float or np.ndarray
466
+ Ephemeris time(s) for which to get the rotation matrices.
467
+ from_frame : SpiceFrame
468
+ Reference frame to transform from.
469
+ to_frame : SpiceFrame
470
+ Reference frame to transform to.
471
+
472
+ Returns
473
+ -------
474
+ basis_vectors : np.ndarray
475
+ If `et` is a float, the returned basis vector matrix is of shape `(3, 3)`. If
476
+ `et` is a np.ndarray, the returned basis vector matrix is of shape `(n, 3, 3)`
477
+ where `n` matches the number of elements in et and basis vectors are the rows
478
+ of the 3 by 3 matrices.
479
+
480
+ Examples
481
+ --------
482
+ >>> from imap_processing.spice.geometry import basis_vectors
483
+ ... from imap_processing.spice.time import j2000ns_to_j2000s
484
+ ... et = j2000ns_to_j2000s(dataset.epoch.values)
485
+ ... basis_vectors = basis_vectors(
486
+ ... et, SpiceFrame.IMAP_SPACECRAFT, SpiceFrame.ECLIPJ2000
487
+ ... )
488
+ ... spacecraft_x = basis_vectors[:, 0]
489
+ ... spacecraft_y = basis_vectors[:, 1]
490
+ ... spacecraft_z = basis_vectors[:, 2]
491
+ """
492
+ return np.moveaxis(get_rotation_matrix(et, from_frame, to_frame), -1, -2)
493
+
494
+
495
+ def cartesian_to_spherical(
496
+ v: NDArray,
497
+ ) -> NDArray:
498
+ """
499
+ Convert cartesian coordinates to spherical coordinates.
500
+
501
+ Parameters
502
+ ----------
503
+ v : np.ndarray
504
+ A NumPy array with shape (n, 3) where each
505
+ row represents a vector
506
+ with x, y, z-components.
507
+
508
+ Returns
509
+ -------
510
+ spherical_coords : np.ndarray
511
+ A NumPy array with shape (n, 3), where each row contains
512
+ the spherical coordinates (r, azimuth, elevation):
513
+
514
+ - r : Distance of the point from the origin.
515
+ - azimuth : angle in the xy-plane in radians [0, 2*pi].
516
+ - elevation : angle from the z-axis in radians [-pi/2, pi/2].
517
+ """
518
+ # Magnitude of the velocity vector
519
+ magnitude_v = np.linalg.norm(v, axis=-1, keepdims=True)
520
+
521
+ vhat = v / magnitude_v
522
+
523
+ # Elevation angle (angle from the z-axis, range: [-pi/2, pi/2])
524
+ el = np.arcsin(vhat[..., 2])
525
+
526
+ # Azimuth angle (angle in the xy-plane, range: [0, 2*pi])
527
+ az = np.arctan2(vhat[..., 1], vhat[..., 0])
528
+
529
+ # Ensure azimuth is from 0 to 2PI
530
+ az = az % (2 * np.pi)
531
+ spherical_coords = np.stack(
532
+ (np.squeeze(magnitude_v), np.degrees(az), np.degrees(el)), axis=-1
533
+ )
534
+
535
+ return spherical_coords
536
+
537
+
538
+ def spherical_to_cartesian(spherical_coords: NDArray) -> NDArray:
539
+ """
540
+ Convert spherical coordinates to Cartesian coordinates.
541
+
542
+ Parameters
543
+ ----------
544
+ spherical_coords : np.ndarray
545
+ A NumPy array with shape (n, 3), where each row contains
546
+ the spherical coordinates (r, azimuth, elevation):
547
+
548
+ - r : Distance of the point from the origin.
549
+ - azimuth : angle in the xy-plane in radians [0, 2*pi].
550
+ - elevation : angle from the z-axis in radians [-pi/2, pi/2].
551
+
552
+ Returns
553
+ -------
554
+ cartesian_coords : np.ndarray
555
+ Cartesian coordinates.
556
+ """
557
+ r = spherical_coords[..., 0]
558
+ azimuth = spherical_coords[..., 1]
559
+ elevation = spherical_coords[..., 2]
560
+
561
+ x = r * np.cos(elevation) * np.cos(azimuth)
562
+ y = r * np.cos(elevation) * np.sin(azimuth)
563
+ z = r * np.sin(elevation)
564
+
565
+ cartesian_coords = np.stack((x, y, z), axis=-1)
566
+
567
+ return cartesian_coords
@@ -480,7 +480,7 @@ def process_swapi_science(
480
480
  # ====================================================
481
481
  cdf_manager = ImapCdfAttributes()
482
482
  cdf_manager.add_instrument_global_attrs("swapi")
483
- cdf_manager.load_variable_attributes("imap_swapi_variable_attrs.yaml")
483
+ cdf_manager.add_instrument_variable_attrs(instrument="swapi", level=None)
484
484
 
485
485
  # ===================================================================
486
486
  # Quality flags
@@ -43,7 +43,7 @@ def swapi_l2(l1_dataset: xr.Dataset, data_version: str) -> xr.Dataset:
43
43
  # Load the CDF attributes
44
44
  cdf_manager = ImapCdfAttributes()
45
45
  cdf_manager.add_instrument_global_attrs("swapi")
46
- cdf_manager.load_variable_attributes("imap_swapi_variable_attrs.yaml")
46
+ cdf_manager.add_instrument_variable_attrs(instrument="swapi", level=None)
47
47
 
48
48
  # Copy over only certain variables from L1 to L2 dataset
49
49
  l1_data_keys = [
@@ -95,7 +95,7 @@ def deadtime_correction(counts: np.ndarray, acq_duration: int) -> npt.NDArray:
95
95
  counts : numpy.ndarray
96
96
  Counts data before deadtime corrections.
97
97
  acq_duration : int
98
- This is ACQ_DURATION from science packet.
98
+ This is ACQ_DURATION from science packet. acq_duration is in microseconds.
99
99
 
100
100
  Returns
101
101
  -------
@@ -104,10 +104,10 @@ def deadtime_correction(counts: np.ndarray, acq_duration: int) -> npt.NDArray:
104
104
  """
105
105
  # deadtime is 360 ns
106
106
  deadtime = 360e-9
107
- correct = 1.0 - (deadtime * counts / (acq_duration / 1000.0))
107
+ correct = 1.0 - (deadtime * (counts / (acq_duration * 1e-6)))
108
108
  correct = np.maximum(0.1, correct)
109
109
  corrected_count = np.divide(counts, correct)
110
- return corrected_count
110
+ return corrected_count.astype(np.float64)
111
111
 
112
112
 
113
113
  def convert_counts_to_rate(data: np.ndarray, acq_duration: int) -> npt.NDArray:
@@ -121,17 +121,17 @@ def convert_counts_to_rate(data: np.ndarray, acq_duration: int) -> npt.NDArray:
121
121
  data : numpy.ndarray
122
122
  Counts data.
123
123
  acq_duration : int
124
- Acquisition duration. acq_duration is in millieseconds.
124
+ Acquisition duration. acq_duration is in microseconds.
125
125
 
126
126
  Returns
127
127
  -------
128
128
  numpy.ndarray
129
129
  Count rates array in seconds.
130
130
  """
131
- # convert milliseconds to seconds
132
- # Todo: check with SWE team about int or float types.
133
- acq_duration = int(acq_duration / 1000.0)
134
- return data / acq_duration
131
+ # convert microseconds to seconds
132
+ acq_duration_sec = acq_duration * 1e-6
133
+ count_rate = data / acq_duration_sec
134
+ return count_rate.astype(np.float64)
135
135
 
136
136
 
137
137
  def calculate_calibration_factor(time: int) -> None:
@@ -64,6 +64,7 @@
64
64
  <xtce:EnumerationList>
65
65
  <xtce:Enumeration value="0" label="OFF" />
66
66
  <xtce:Enumeration value="1" label="ON" />
67
+ <xtce:Enumeration value="2" label="NONE" />
67
68
  </xtce:EnumerationList>
68
69
  </xtce:EnumeratedParameterType>
69
70
  <xtce:IntegerParameterType name="TEST_PACKET2.SHCOARSE" signed="false">
@@ -195,10 +195,10 @@ def xtce_excel_file(tmp_path):
195
195
  }
196
196
 
197
197
  states = {
198
- "packetName": ["TEST_PACKET"] * 2,
199
- "mnemonic": ["VAR_STATE"] * 2,
200
- "value": [0, 1],
201
- "state": ["OFF", "ON"],
198
+ "packetName": ["TEST_PACKET"] * 3,
199
+ "mnemonic": ["VAR_STATE"] * 3,
200
+ "value": [0, 1, "0x2"],
201
+ "state": ["OFF", "ON", "NONE"],
202
202
  }
203
203
 
204
204
  # Write the DataFrame to an excel file
@@ -1,5 +1,4 @@
1
1
  from pathlib import Path
2
- from unittest import mock
3
2
 
4
3
  # from imap_processing.cdf.cdf_attribute_manager import CdfAttributeManager
5
4
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
@@ -25,15 +24,6 @@ def test_add_instrument_global_attrs():
25
24
  assert instrument2_instrument["Project"] == "STP>Solar Terrestrial Probes"
26
25
 
27
26
 
28
- @mock.patch(
29
- "imap_processing.cdf.cdf_attribute_manager.CdfAttributeManager.load_variable_attributes"
30
- )
31
- def testing_source_dir(mock_load_variable_attributes):
32
- # Create an ImapCdfAttributes object
33
- imap_cdf_manager = ImapCdfAttributes(Path(__file__).parent.parent / "cdf")
34
- assert str(imap_cdf_manager.source_dir) == str(Path(__file__).parent.parent / "cdf")
35
-
36
-
37
27
  def test_add_instrument_variable_attrs():
38
28
  # Create an ImapCdfAttributes object
39
29
  imap_cdf_manager = ImapCdfAttributes()
@@ -2,23 +2,7 @@ from imap_processing import imap_module_directory
2
2
 
3
3
  TEST_DATA_PATH = imap_module_directory / "tests" / "codice" / "data"
4
4
 
5
- TEST_PACKETS = [
6
- TEST_DATA_PATH / "imap_codice_l0_hskp_20100101_v001.pkts",
7
- TEST_DATA_PATH / "imap_codice_l0_hi-counters-aggregated_20240429_v001.pkts",
8
- TEST_DATA_PATH / "imap_codice_l0_hi-counters-singles_20240429_v001.pkts",
9
- TEST_DATA_PATH / "imap_codice_l0_hi-omni_20240429_v001.pkts",
10
- TEST_DATA_PATH / "imap_codice_l0_hi-sectored_20240429_v001.pkts",
11
- TEST_DATA_PATH / "imap_codice_l0_hi-pha_20240429_v001.pkts",
12
- TEST_DATA_PATH / "imap_codice_l0_lo-counters-aggregated_20240429_v001.pkts",
13
- TEST_DATA_PATH / "imap_codice_l0_lo-counters-singles_20240429_v001.pkts",
14
- TEST_DATA_PATH / "imap_codice_l0_lo-sw-angular_20240429_v001.pkts",
15
- TEST_DATA_PATH / "imap_codice_l0_lo-nsw-angular_20240429_v001.pkts",
16
- TEST_DATA_PATH / "imap_codice_l0_lo-sw-priority_20240429_v001.pkts",
17
- TEST_DATA_PATH / "imap_codice_l0_lo-nsw-priority_20240429_v001.pkts",
18
- TEST_DATA_PATH / "imap_codice_l0_lo-sw-species_20240429_v001.pkts",
19
- TEST_DATA_PATH / "imap_codice_l0_lo-nsw-species_20240429_v001.pkts",
20
- TEST_DATA_PATH / "imap_codice_l0_lo-pha_20240429_v001.pkts",
21
- ]
5
+ TEST_L0_FILE = TEST_DATA_PATH / "imap_codice_l0_raw_20241110_v001.pkts"
22
6
 
23
7
  TEST_L1A_FILES = [
24
8
  TEST_DATA_PATH / "imap_codice_l1a_hskp_20100101_v001.pkts",
@@ -37,7 +37,7 @@ def decom_test_data() -> xr.Dataset:
37
37
 
38
38
  packet_file = Path(
39
39
  f"{imap_module_directory}/tests/codice/data/"
40
- f"imap_codice_l0_hskp_20100101_v001.pkts"
40
+ f"imap_codice_l0_raw_20241110_v001.pkts"
41
41
  )
42
42
 
43
43
  packet = codice_l0.decom_packets(packet_file)[1136]
@@ -69,6 +69,9 @@ def validation_data() -> pd.core.frame.DataFrame:
69
69
  return validation_data
70
70
 
71
71
 
72
+ @pytest.mark.xfail(
73
+ reason="Need to update to validate against new validation CDFs. See issue #1154."
74
+ )
72
75
  def test_eu_hskp_data(
73
76
  decom_test_data: xr.Dataset,
74
77
  validation_data: pd.core.frame.DataFrame,
@@ -106,6 +109,9 @@ def test_eu_hskp_data(
106
109
  assert round(eu_val, 5) == round(validation_val, 5)
107
110
 
108
111
 
112
+ @pytest.mark.xfail(
113
+ reason="Need to update to validate against new validation CDFs. See issue #1154."
114
+ )
109
115
  def test_raw_hskp_data(
110
116
  decom_test_data: xr.Dataset,
111
117
  validation_data: pd.core.frame.DataFrame,
@@ -140,5 +146,5 @@ def test_total_packets_in_data_file(decom_test_data: xr.Dataset):
140
146
  The decommutated housekeeping packet
141
147
  """
142
148
 
143
- total_packets = 99
149
+ total_packets = 31778
144
150
  assert len(decom_test_data.epoch) == total_packets