imap-processing 0.17.0__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (141) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/ancillary/ancillary_dataset_combiner.py +161 -1
  3. imap_processing/ccsds/excel_to_xtce.py +12 -0
  4. imap_processing/cdf/config/imap_codice_global_cdf_attrs.yaml +6 -6
  5. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +312 -274
  6. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +39 -28
  7. imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +1048 -183
  8. imap_processing/cdf/config/imap_constant_attrs.yaml +4 -2
  9. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +12 -0
  10. imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +5 -0
  11. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +10 -4
  12. imap_processing/cdf/config/imap_hit_l1a_variable_attrs.yaml +163 -100
  13. imap_processing/cdf/config/imap_hit_l2_variable_attrs.yaml +4 -4
  14. imap_processing/cdf/config/imap_ialirt_l1_variable_attrs.yaml +97 -54
  15. imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +33 -4
  16. imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +44 -44
  17. imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +77 -61
  18. imap_processing/cdf/config/imap_lo_global_cdf_attrs.yaml +30 -0
  19. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +4 -15
  20. imap_processing/cdf/config/imap_lo_l1c_variable_attrs.yaml +189 -98
  21. imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +99 -2
  22. imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +24 -1
  23. imap_processing/cdf/config/imap_ultra_global_cdf_attrs.yaml +60 -0
  24. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +99 -11
  25. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +50 -7
  26. imap_processing/cli.py +121 -44
  27. imap_processing/codice/codice_l1a.py +165 -77
  28. imap_processing/codice/codice_l1b.py +1 -1
  29. imap_processing/codice/codice_l2.py +118 -19
  30. imap_processing/codice/constants.py +1217 -1089
  31. imap_processing/decom.py +1 -4
  32. imap_processing/ena_maps/ena_maps.py +32 -25
  33. imap_processing/ena_maps/utils/naming.py +8 -2
  34. imap_processing/glows/ancillary/imap_glows_exclusions-by-instr-team_20250923_v002.dat +10 -0
  35. imap_processing/glows/ancillary/imap_glows_map-of-excluded-regions_20250923_v002.dat +393 -0
  36. imap_processing/glows/ancillary/imap_glows_map-of-uv-sources_20250923_v002.dat +593 -0
  37. imap_processing/glows/ancillary/imap_glows_pipeline_settings_20250923_v002.json +54 -0
  38. imap_processing/glows/ancillary/imap_glows_suspected-transients_20250923_v002.dat +10 -0
  39. imap_processing/glows/l1b/glows_l1b.py +99 -9
  40. imap_processing/glows/l1b/glows_l1b_data.py +350 -38
  41. imap_processing/glows/l2/glows_l2.py +11 -0
  42. imap_processing/hi/hi_l1a.py +124 -3
  43. imap_processing/hi/hi_l1b.py +154 -71
  44. imap_processing/hi/hi_l2.py +84 -51
  45. imap_processing/hi/utils.py +153 -8
  46. imap_processing/hit/l0/constants.py +3 -0
  47. imap_processing/hit/l0/decom_hit.py +5 -8
  48. imap_processing/hit/l1a/hit_l1a.py +375 -45
  49. imap_processing/hit/l1b/constants.py +5 -0
  50. imap_processing/hit/l1b/hit_l1b.py +61 -131
  51. imap_processing/hit/l2/constants.py +1 -1
  52. imap_processing/hit/l2/hit_l2.py +10 -11
  53. imap_processing/ialirt/calculate_ingest.py +219 -0
  54. imap_processing/ialirt/constants.py +32 -1
  55. imap_processing/ialirt/generate_coverage.py +201 -0
  56. imap_processing/ialirt/l0/ialirt_spice.py +5 -2
  57. imap_processing/ialirt/l0/parse_mag.py +337 -29
  58. imap_processing/ialirt/l0/process_hit.py +5 -3
  59. imap_processing/ialirt/l0/process_swapi.py +41 -25
  60. imap_processing/ialirt/l0/process_swe.py +23 -7
  61. imap_processing/ialirt/process_ephemeris.py +70 -14
  62. imap_processing/ialirt/utils/constants.py +22 -16
  63. imap_processing/ialirt/utils/create_xarray.py +42 -19
  64. imap_processing/idex/idex_constants.py +1 -5
  65. imap_processing/idex/idex_l0.py +2 -2
  66. imap_processing/idex/idex_l1a.py +2 -3
  67. imap_processing/idex/idex_l1b.py +2 -3
  68. imap_processing/idex/idex_l2a.py +130 -4
  69. imap_processing/idex/idex_l2b.py +313 -119
  70. imap_processing/idex/idex_utils.py +1 -3
  71. imap_processing/lo/l0/lo_apid.py +1 -0
  72. imap_processing/lo/l0/lo_science.py +25 -24
  73. imap_processing/lo/l1a/lo_l1a.py +44 -0
  74. imap_processing/lo/l1b/lo_l1b.py +3 -3
  75. imap_processing/lo/l1c/lo_l1c.py +116 -50
  76. imap_processing/lo/l2/lo_l2.py +29 -29
  77. imap_processing/lo/lo_ancillary.py +55 -0
  78. imap_processing/lo/packet_definitions/lo_xtce.xml +5359 -106
  79. imap_processing/mag/constants.py +1 -0
  80. imap_processing/mag/l1a/mag_l1a.py +1 -0
  81. imap_processing/mag/l1a/mag_l1a_data.py +26 -0
  82. imap_processing/mag/l1b/mag_l1b.py +3 -2
  83. imap_processing/mag/l1c/interpolation_methods.py +14 -15
  84. imap_processing/mag/l1c/mag_l1c.py +23 -6
  85. imap_processing/mag/l1d/__init__.py +0 -0
  86. imap_processing/mag/l1d/mag_l1d.py +176 -0
  87. imap_processing/mag/l1d/mag_l1d_data.py +725 -0
  88. imap_processing/mag/l2/__init__.py +0 -0
  89. imap_processing/mag/l2/mag_l2.py +25 -20
  90. imap_processing/mag/l2/mag_l2_data.py +199 -130
  91. imap_processing/quality_flags.py +28 -2
  92. imap_processing/spice/geometry.py +101 -36
  93. imap_processing/spice/pointing_frame.py +1 -7
  94. imap_processing/spice/repoint.py +29 -2
  95. imap_processing/spice/spin.py +32 -8
  96. imap_processing/spice/time.py +60 -19
  97. imap_processing/swapi/l1/swapi_l1.py +10 -4
  98. imap_processing/swapi/l2/swapi_l2.py +66 -24
  99. imap_processing/swapi/swapi_utils.py +1 -1
  100. imap_processing/swe/l1b/swe_l1b.py +3 -6
  101. imap_processing/ultra/constants.py +28 -3
  102. imap_processing/ultra/l0/decom_tools.py +15 -8
  103. imap_processing/ultra/l0/decom_ultra.py +35 -11
  104. imap_processing/ultra/l0/ultra_utils.py +102 -12
  105. imap_processing/ultra/l1a/ultra_l1a.py +26 -6
  106. imap_processing/ultra/l1b/cullingmask.py +6 -3
  107. imap_processing/ultra/l1b/de.py +122 -26
  108. imap_processing/ultra/l1b/extendedspin.py +29 -2
  109. imap_processing/ultra/l1b/lookup_utils.py +424 -50
  110. imap_processing/ultra/l1b/quality_flag_filters.py +23 -0
  111. imap_processing/ultra/l1b/ultra_l1b_culling.py +356 -5
  112. imap_processing/ultra/l1b/ultra_l1b_extended.py +534 -90
  113. imap_processing/ultra/l1c/helio_pset.py +127 -7
  114. imap_processing/ultra/l1c/l1c_lookup_utils.py +256 -0
  115. imap_processing/ultra/l1c/spacecraft_pset.py +90 -15
  116. imap_processing/ultra/l1c/ultra_l1c.py +6 -0
  117. imap_processing/ultra/l1c/ultra_l1c_culling.py +85 -0
  118. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +446 -341
  119. imap_processing/ultra/l2/ultra_l2.py +0 -1
  120. imap_processing/ultra/utils/ultra_l1_utils.py +40 -3
  121. imap_processing/utils.py +3 -4
  122. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/METADATA +3 -3
  123. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/RECORD +126 -126
  124. imap_processing/idex/idex_l2c.py +0 -250
  125. imap_processing/spice/kernels.py +0 -187
  126. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_LeftSlit.csv +0 -526
  127. imap_processing/ultra/lookup_tables/Angular_Profiles_FM45_RightSlit.csv +0 -526
  128. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_LeftSlit.csv +0 -526
  129. imap_processing/ultra/lookup_tables/Angular_Profiles_FM90_RightSlit.csv +0 -524
  130. imap_processing/ultra/lookup_tables/EgyNorm.mem.csv +0 -32769
  131. imap_processing/ultra/lookup_tables/FM45_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
  132. imap_processing/ultra/lookup_tables/FM90_Startup1_ULTRA_IMGPARAMS_20240719.csv +0 -2
  133. imap_processing/ultra/lookup_tables/dps_grid45_compressed.cdf +0 -0
  134. imap_processing/ultra/lookup_tables/ultra45_back-pos-luts.csv +0 -4097
  135. imap_processing/ultra/lookup_tables/ultra45_tdc_norm.csv +0 -2050
  136. imap_processing/ultra/lookup_tables/ultra90_back-pos-luts.csv +0 -4097
  137. imap_processing/ultra/lookup_tables/ultra90_tdc_norm.csv +0 -2050
  138. imap_processing/ultra/lookup_tables/yadjust.csv +0 -257
  139. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/LICENSE +0 -0
  140. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/WHEEL +0 -0
  141. {imap_processing-0.17.0.dist-info → imap_processing-0.19.0.dist-info}/entry_points.txt +0 -0
@@ -3,9 +3,9 @@
3
3
  import logging
4
4
  from enum import IntEnum
5
5
  from pathlib import Path
6
- from typing import Union
7
6
 
8
7
  import numpy as np
8
+ import pandas as pd
9
9
  import xarray as xr
10
10
 
11
11
  from imap_processing import imap_module_directory
@@ -15,6 +15,7 @@ from imap_processing.hi.hi_l1a import HALF_CLOCK_TICK_S
15
15
  from imap_processing.hi.utils import (
16
16
  HIAPID,
17
17
  CoincidenceBitmap,
18
+ EsaEnergyStepLookupTable,
18
19
  HiConstants,
19
20
  create_dataset_variables,
20
21
  parse_sensor_number,
@@ -27,7 +28,7 @@ from imap_processing.spice.spin import (
27
28
  get_instrument_spin_phase,
28
29
  get_spacecraft_spin_phase,
29
30
  )
30
- from imap_processing.spice.time import met_to_sclkticks, sct_to_et
31
+ from imap_processing.spice.time import met_to_sclkticks, met_to_utc, sct_to_et
31
32
  from imap_processing.utils import packet_file_to_datasets
32
33
 
33
34
 
@@ -45,52 +46,7 @@ ATTR_MGR.add_instrument_global_attrs("hi")
45
46
  ATTR_MGR.add_instrument_variable_attrs(instrument="hi", level=None)
46
47
 
47
48
 
48
- def hi_l1b(dependency: Union[str, Path, xr.Dataset]) -> list[xr.Dataset]:
49
- """
50
- High level IMAP-HI L1B processing function.
51
-
52
- Parameters
53
- ----------
54
- dependency : str or xarray.Dataset
55
- Path to L0 file or L1A dataset to process.
56
-
57
- Returns
58
- -------
59
- l1b_dataset : list[xarray.Dataset]
60
- Processed xarray datasets.
61
- """
62
- # Housekeeping processing
63
- if isinstance(dependency, (Path, str)):
64
- logger.info(f"Running Hi L1B processing on file: {dependency}")
65
- l1b_datasets = housekeeping(dependency)
66
- elif isinstance(dependency, xr.Dataset):
67
- l1a_dataset = dependency
68
- logger.info(
69
- f"Running Hi L1B processing on dataset: "
70
- f"{l1a_dataset.attrs['Logical_source']}"
71
- )
72
- logical_source_parts = parse_filename_like(l1a_dataset.attrs["Logical_source"])
73
- # TODO: apid is not currently stored in all L1A data but should be.
74
- # Use apid to determine what L1B processing function to call
75
-
76
- # DE processing
77
- if logical_source_parts["descriptor"].endswith("de"):
78
- l1b_datasets = [annotate_direct_events(l1a_dataset)]
79
- l1b_datasets[0].attrs["Logical_source"] = (
80
- l1b_datasets[0]
81
- .attrs["Logical_source"]
82
- .format(sensor=logical_source_parts["sensor"])
83
- )
84
- else:
85
- raise NotImplementedError(
86
- f"No Hi L1B processing defined for file type: "
87
- f"{l1a_dataset.attrs['Logical_source']}"
88
- )
89
-
90
- return l1b_datasets
91
-
92
-
93
- def housekeeping(packet_file_path: Union[str, Path]) -> list[xr.Dataset]:
49
+ def housekeeping(packet_file_path: str | Path) -> list[xr.Dataset]:
94
50
  """
95
51
  Will process IMAP raw data to l1b housekeeping dataset.
96
52
 
@@ -108,6 +64,7 @@ def housekeeping(packet_file_path: Union[str, Path]) -> list[xr.Dataset]:
108
64
  processed_data : list[xarray.Dataset]
109
65
  Housekeeping datasets with engineering units.
110
66
  """
67
+ logger.info(f"Running Hi L1B processing on file: {packet_file_path}")
111
68
  packet_def_file = (
112
69
  imap_module_directory / "hi/packet_definitions/TLM_HI_COMBINED_SCI.xml"
113
70
  )
@@ -137,33 +94,46 @@ def housekeeping(packet_file_path: Union[str, Path]) -> list[xr.Dataset]:
137
94
  return datasets
138
95
 
139
96
 
140
- def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset:
97
+ def annotate_direct_events(
98
+ l1a_de_dataset: xr.Dataset, l1b_hk_dataset: xr.Dataset, esa_energies_anc: Path
99
+ ) -> list[xr.Dataset]:
141
100
  """
142
101
  Perform Hi L1B processing on direct event data.
143
102
 
144
103
  Parameters
145
104
  ----------
146
- l1a_dataset : xarray.Dataset
105
+ l1a_de_dataset : xarray.Dataset
147
106
  L1A direct event data.
107
+ l1b_hk_dataset : xarray.Dataset
108
+ L1B housekeeping data coincident with the L1A DE data.
109
+ esa_energies_anc : pathlib.Path
110
+ Location of the esa-energies ancillary csv file.
148
111
 
149
112
  Returns
150
113
  -------
151
- l1b_dataset : xarray.Dataset
152
- L1B direct event data.
114
+ l1b_datasets : list[xarray.Dataset]
115
+ List containing exactly one L1B direct event dataset.
153
116
  """
154
- l1b_dataset = l1a_dataset.copy()
155
- l1b_dataset.update(de_esa_energy_step(l1b_dataset))
156
- l1b_dataset.update(compute_coincidence_type_and_tofs(l1b_dataset))
157
- l1b_dataset.update(de_nominal_bin_and_spin_phase(l1b_dataset))
158
- l1b_dataset.update(compute_hae_coordinates(l1b_dataset))
159
- l1b_dataset.update(
117
+ logger.info(
118
+ f"Running Hi L1B processing on dataset: "
119
+ f"{l1a_de_dataset.attrs['Logical_source']}"
120
+ )
121
+
122
+ l1b_de_dataset = l1a_de_dataset.copy()
123
+ l1b_de_dataset.update(
124
+ de_esa_energy_step(l1b_de_dataset, l1b_hk_dataset, esa_energies_anc)
125
+ )
126
+ l1b_de_dataset.update(compute_coincidence_type_and_tofs(l1b_de_dataset))
127
+ l1b_de_dataset.update(de_nominal_bin_and_spin_phase(l1b_de_dataset))
128
+ l1b_de_dataset.update(compute_hae_coordinates(l1b_de_dataset))
129
+ l1b_de_dataset.update(
160
130
  create_dataset_variables(
161
131
  ["quality_flag"],
162
- l1b_dataset["event_met"].size,
132
+ l1b_de_dataset["event_met"].size,
163
133
  att_manager_lookup_str="hi_de_{0}",
164
134
  )
165
135
  )
166
- l1b_dataset = l1b_dataset.drop_vars(
136
+ l1b_de_dataset = l1b_de_dataset.drop_vars(
167
137
  [
168
138
  "src_seq_ctr",
169
139
  "pkt_len",
@@ -179,8 +149,13 @@ def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset:
179
149
  )
180
150
 
181
151
  de_global_attrs = ATTR_MGR.get_global_attributes("imap_hi_l1b_de_attrs")
182
- l1b_dataset.attrs.update(**de_global_attrs)
183
- return l1b_dataset
152
+ l1b_de_dataset.attrs.update(**de_global_attrs)
153
+
154
+ logical_source_parts = parse_filename_like(l1a_de_dataset.attrs["Logical_source"])
155
+ l1b_de_dataset.attrs["Logical_source"] = l1b_de_dataset.attrs[
156
+ "Logical_source"
157
+ ].format(sensor=logical_source_parts["sensor"])
158
+ return [l1b_de_dataset]
184
159
 
185
160
 
186
161
  def compute_coincidence_type_and_tofs(
@@ -387,18 +362,20 @@ def compute_hae_coordinates(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
387
362
  return new_vars
388
363
 
389
364
 
390
- def de_esa_energy_step(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
365
+ def de_esa_energy_step(
366
+ l1b_de_ds: xr.Dataset, l1b_hk_ds: xr.Dataset, esa_energies_anc: Path
367
+ ) -> dict[str, xr.DataArray]:
391
368
  """
392
369
  Compute esa_energy_step for each direct event.
393
370
 
394
- TODO: For now this function just returns the esa_step from the input dataset.
395
- Eventually, it will take L1B housekeeping data and determine the esa
396
- energy steps from that data.
397
-
398
371
  Parameters
399
372
  ----------
400
- dataset : xarray.Dataset
373
+ l1b_de_ds : xarray.Dataset
401
374
  The partial L1B dataset.
375
+ l1b_hk_ds : xarray.Dataset
376
+ L1B housekeeping data coincident with the L1A DE data.
377
+ esa_energies_anc : pathlib.Path
378
+ Location of the esa-energies ancillary csv file.
402
379
 
403
380
  Returns
404
381
  -------
@@ -407,10 +384,116 @@ def de_esa_energy_step(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
407
384
  """
408
385
  new_vars = create_dataset_variables(
409
386
  ["esa_energy_step"],
410
- len(dataset.epoch),
387
+ len(l1b_de_ds.epoch),
411
388
  att_manager_lookup_str="hi_de_{0}",
412
389
  )
413
- # TODO: Implement this algorithm
414
- new_vars["esa_energy_step"].values = dataset.esa_step.values
390
+
391
+ # Get the LUT object using the HK data and esa-energies ancillary csv
392
+ esa_energies_lut = pd.read_csv(esa_energies_anc, comment="#")
393
+ esa_to_esa_energy_step_lut = get_esa_to_esa_energy_step_lut(
394
+ l1b_hk_ds, esa_energies_lut
395
+ )
396
+ new_vars["esa_energy_step"].values = esa_to_esa_energy_step_lut.query(
397
+ l1b_de_ds["ccsds_met"].data, l1b_de_ds["esa_step"].data
398
+ )
415
399
 
416
400
  return new_vars
401
+
402
+
403
+ def get_esa_to_esa_energy_step_lut(
404
+ l1b_hk_ds: xr.Dataset, esa_energies_lut: pd.DataFrame
405
+ ) -> EsaEnergyStepLookupTable:
406
+ """
407
+ Generate a lookup table that associates an esa_step to an esa_energy_step.
408
+
409
+ Parameters
410
+ ----------
411
+ l1b_hk_ds : xarray.Dataset
412
+ L1B housekeeping dataset.
413
+ esa_energies_lut : pandas.DataFrame
414
+ Esa energies lookup table derived from ancillary file.
415
+
416
+ Returns
417
+ -------
418
+ esa_energy_step_lut : EsaEnergyStepLookupTable
419
+ A lookup table object that can be used to query by MET time and esa_step
420
+ for the associated esa_energy_step values.
421
+
422
+ Notes
423
+ -----
424
+ Algorithm definition in section 2.1.2 of IMAP Hi Algorithm Document.
425
+ """
426
+ # Instantiate a lookup table object
427
+ esa_energy_step_lut = EsaEnergyStepLookupTable()
428
+ # Get the set of esa_steps visited
429
+ esa_steps = list(sorted(set(l1b_hk_ds["sci_esa_step"].data)))
430
+ # Break into contiguous segments where op_mode == "HVSCI"
431
+ # Pad the boolean array `op_mode == HVSCI` with False values on each end.
432
+ # This treats starting or ending in HVSCI mode as a transition in the next
433
+ # step where np.diff is used to find op_mode transitions into and out of
434
+ # HVSCI
435
+ padded_mask = np.pad(
436
+ l1b_hk_ds["op_mode"].data == "HVSCI", (1, 1), constant_values=False
437
+ )
438
+ mode_changes = np.diff(padded_mask.astype(int))
439
+ hsvsci_starts = np.nonzero(mode_changes == 1)[0]
440
+ hsvsci_ends = np.nonzero(mode_changes == -1)[0]
441
+ for i_start, i_end in zip(hsvsci_starts, hsvsci_ends, strict=False):
442
+ contiguous_hvsci_ds = l1b_hk_ds.isel(dict(epoch=slice(i_start, i_end)))
443
+ # Find median inner and outer ESA voltages for each ESA step
444
+ for esa_step in esa_steps:
445
+ single_esa_ds = contiguous_hvsci_ds.where(
446
+ contiguous_hvsci_ds["sci_esa_step"] == esa_step, drop=True
447
+ )
448
+ if len(single_esa_ds["epoch"].data) == 0:
449
+ logger.debug(
450
+ f"No instances of sci_esa_step == {esa_step} "
451
+ f"present in contiguous HVSCI block with interval: "
452
+ f"({met_to_utc(contiguous_hvsci_ds['shcoarse'].data[[0, -1]])})"
453
+ )
454
+ continue
455
+ inner_esa_voltage = np.where(
456
+ single_esa_ds["inner_esa_state"].data == "LO",
457
+ single_esa_ds["inner_esa_lo"].data,
458
+ single_esa_ds["inner_esa_hi"].data,
459
+ )
460
+ median_inner_esa = np.median(inner_esa_voltage)
461
+ median_outer_esa = np.median(single_esa_ds["outer_esa"].data)
462
+ # Match median voltages to ESA Energies LUT
463
+ inner_voltage_match = (
464
+ np.abs(median_inner_esa - esa_energies_lut["inner_esa_voltage"])
465
+ <= esa_energies_lut["inner_esa_delta_v"]
466
+ )
467
+ outer_voltage_match = (
468
+ np.abs(median_outer_esa - esa_energies_lut["outer_esa_voltage"])
469
+ <= esa_energies_lut["outer_esa_delta_v"]
470
+ )
471
+ matching_esa_energy = esa_energies_lut[
472
+ np.logical_and(inner_voltage_match, outer_voltage_match)
473
+ ]
474
+ if len(matching_esa_energy) != 1:
475
+ if len(matching_esa_energy) == 0:
476
+ logger.critical(
477
+ f"No esa_energy_step matches found for esa_step "
478
+ f"{esa_step} during interval: "
479
+ f"({met_to_utc(single_esa_ds['shcoarse'].data[[0, -1]])}) "
480
+ f"with median esa voltages: "
481
+ f"{median_inner_esa}, {median_outer_esa}."
482
+ )
483
+ if len(matching_esa_energy) > 1:
484
+ logger.critical(
485
+ f"Multiple esa_energy_step matches found for esa_step "
486
+ f"{esa_step} during interval: "
487
+ f"({met_to_utc(single_esa_ds['shcoarse'].data[[0, -1]])}) "
488
+ f"with median esa voltages: "
489
+ f"{median_inner_esa}, {median_outer_esa}."
490
+ )
491
+ continue
492
+ # Set LUT to matching esa_energy_step for time range
493
+ esa_energy_step_lut.add_entry(
494
+ contiguous_hvsci_ds["shcoarse"].data[0],
495
+ contiguous_hvsci_ds["shcoarse"].data[-1],
496
+ esa_step,
497
+ matching_esa_energy["esa_energy_step"].values[0],
498
+ )
499
+ return esa_energy_step_lut
@@ -2,13 +2,17 @@
2
2
 
3
3
  import logging
4
4
  from pathlib import Path
5
- from typing import Literal
6
5
 
7
6
  import numpy as np
7
+ import pandas as pd
8
8
  import xarray as xr
9
9
 
10
- from imap_processing.ena_maps.ena_maps import HiPointingSet, RectangularSkyMap
11
- from imap_processing.spice.geometry import SpiceFrame
10
+ from imap_processing.ena_maps.ena_maps import (
11
+ AbstractSkyMap,
12
+ HiPointingSet,
13
+ RectangularSkyMap,
14
+ )
15
+ from imap_processing.ena_maps.utils.naming import MapDescriptor
12
16
 
13
17
  logger = logging.getLogger(__name__)
14
18
 
@@ -42,24 +46,36 @@ def hi_l2(
42
46
  l2_dataset : list[xarray.Dataset]
43
47
  Level 2 IMAP-Hi dataset ready to be written to a CDF file.
44
48
  """
45
- # TODO: parse descriptor to determine map configuration
46
- sensor = "45" if "45" in descriptor else "90"
47
- direction: Literal["full"] = "full"
48
49
  cg_corrected = False
49
- map_spacing = 4
50
+ map_descriptor = MapDescriptor.from_string(descriptor)
50
51
 
51
- rect_map = generate_hi_map(
52
+ sky_map = generate_hi_map(
52
53
  psets,
53
54
  geometric_factors_path,
54
55
  esa_energies_path,
55
- direction=direction,
56
+ spin_phase=map_descriptor.spin_phase,
57
+ output_map=map_descriptor.to_empty_map(),
56
58
  cg_corrected=cg_corrected,
57
- map_spacing=map_spacing,
58
59
  )
59
60
 
60
61
  # Get the map dataset with variables/coordinates in the correct shape
61
62
  # TODO get the correct descriptor and frame
62
- l2_ds = rect_map.build_cdf_dataset("hi", "l2", "sf", descriptor, sensor=sensor)
63
+
64
+ if not isinstance(sky_map, RectangularSkyMap):
65
+ raise NotImplementedError("HEALPix map output not supported for Hi")
66
+ if not isinstance(map_descriptor.sensor, str):
67
+ raise ValueError(
68
+ "Invalid map_descriptor. Sensor attribute must be of type str "
69
+ "and be either '45' or '90'"
70
+ )
71
+
72
+ l2_ds = sky_map.build_cdf_dataset(
73
+ "hi",
74
+ "l2",
75
+ map_descriptor.frame_descriptor,
76
+ descriptor,
77
+ sensor=map_descriptor.sensor,
78
+ )
63
79
 
64
80
  return [l2_ds]
65
81
 
@@ -68,12 +84,12 @@ def generate_hi_map(
68
84
  psets: list[str | Path],
69
85
  geometric_factors_path: str | Path,
70
86
  esa_energies_path: str | Path,
87
+ output_map: AbstractSkyMap,
71
88
  cg_corrected: bool = False,
72
- direction: Literal["ram", "anti-ram", "full"] = "full",
73
- map_spacing: int = 4,
74
- ) -> RectangularSkyMap:
89
+ spin_phase: str = "full",
90
+ ) -> AbstractSkyMap:
75
91
  """
76
- Project Hi PSET data into a rectangular sky map.
92
+ Project Hi PSET data into a sky map.
77
93
 
78
94
  Parameters
79
95
  ----------
@@ -83,34 +99,28 @@ def generate_hi_map(
83
99
  Where to get the geometric factors from.
84
100
  esa_energies_path : str or pathlib.Path
85
101
  Where to get the energies from.
102
+ output_map : AbstractSkyMap
103
+ The map object to collect data into. Determines pixel spacing,
104
+ coordinate system, etc.
86
105
  cg_corrected : bool, Optional
87
106
  Whether to apply Compton-Getting correction to the energies. Defaults to
88
107
  False.
89
- direction : str, Optional
108
+ spin_phase : str, Optional
90
109
  Apply filtering to PSET data include ram or anti-ram or full spin data.
91
110
  Defaults to "full".
92
- map_spacing : int, Optional
93
- Pixel spacing, in degrees, of the output map in degrees. Defaults to 4.
94
111
 
95
112
  Returns
96
113
  -------
97
- sky_map : RectangularSkyMap
114
+ sky_map : AbstractSkyMap
98
115
  The sky map with all the PSET data projected into the map.
99
116
  """
100
- rect_map = RectangularSkyMap(
101
- spacing_deg=map_spacing, spice_frame=SpiceFrame.ECLIPJ2000
102
- )
103
-
104
117
  # TODO: Implement Compton-Getting correction
105
118
  if cg_corrected:
106
119
  raise NotImplementedError
107
- # TODO: Implement directional filtering
108
- if direction != "full":
109
- raise NotImplementedError
110
120
 
111
121
  for pset_path in psets:
112
122
  logger.info(f"Processing {pset_path}")
113
- pset = HiPointingSet(pset_path)
123
+ pset = HiPointingSet(pset_path, spin_phase=spin_phase)
114
124
 
115
125
  # Background rate and uncertainty are exposure time weighted means in
116
126
  # the map.
@@ -118,7 +128,7 @@ def generate_hi_map(
118
128
  pset.data[var] *= pset.data["exposure_factor"]
119
129
 
120
130
  # Project (bin) the PSET variables into the map pixels
121
- rect_map.project_pset_values_to_map(
131
+ output_map.project_pset_values_to_map(
122
132
  pset,
123
133
  ["counts", "exposure_factor", "bg_rates", "bg_rates_unc", "obs_date"],
124
134
  )
@@ -127,35 +137,34 @@ def generate_hi_map(
127
137
  # Allow divide by zero to fill set pixels with zero exposure time to NaN
128
138
  with np.errstate(divide="ignore"):
129
139
  for var in VARS_TO_EXPOSURE_TIME_AVERAGE:
130
- rect_map.data_1d[var] /= rect_map.data_1d["exposure_factor"]
140
+ output_map.data_1d[var] /= output_map.data_1d["exposure_factor"]
131
141
 
132
- rect_map.data_1d.update(calculate_ena_signal_rates(rect_map.data_1d))
133
- rect_map.data_1d.update(
142
+ output_map.data_1d.update(calculate_ena_signal_rates(output_map.data_1d))
143
+ output_map.data_1d.update(
134
144
  calculate_ena_intensity(
135
- rect_map.data_1d, geometric_factors_path, esa_energies_path
145
+ output_map.data_1d, geometric_factors_path, esa_energies_path
136
146
  )
137
147
  )
138
148
 
139
- rect_map.data_1d["obs_date"].data = rect_map.data_1d["obs_date"].data.astype(
149
+ output_map.data_1d["obs_date"].data = output_map.data_1d["obs_date"].data.astype(
140
150
  np.int64
141
151
  )
142
152
  # TODO: Figure out how to compute obs_date_range (stddev of obs_date)
143
- rect_map.data_1d["obs_date_range"] = xr.zeros_like(rect_map.data_1d["obs_date"])
153
+ output_map.data_1d["obs_date_range"] = xr.zeros_like(output_map.data_1d["obs_date"])
144
154
 
145
155
  # Rename and convert coordinate from esa_energy_step energy
146
- # TODO: the correct conversion from esa_energy_step to esa_energy
147
- esa_energy_step_conversion = (np.arange(10, dtype=float) + 1) * 1000
148
- rect_map.data_1d = rect_map.data_1d.rename({"esa_energy_step": "energy"})
149
- rect_map.data_1d = rect_map.data_1d.assign_coords(
150
- energy=esa_energy_step_conversion[rect_map.data_1d["energy"].values]
156
+ esa_energies = esa_energy_lookup(
157
+ esa_energies_path, output_map.data_1d["esa_energy_step"].data
151
158
  )
159
+ output_map.data_1d = output_map.data_1d.rename({"esa_energy_step": "energy"})
160
+ output_map.data_1d = output_map.data_1d.assign_coords(energy=esa_energies)
152
161
  # Set the energy_step_delta values
153
162
  # TODO: get the correct energy delta values (they are set to NaN) in
154
- # rect_map.build_cdf_dataset()
163
+ # output_map.build_cdf_dataset()
155
164
 
156
- rect_map.data_1d = rect_map.data_1d.drop("esa_energy_step_label")
165
+ output_map.data_1d = output_map.data_1d.drop("esa_energy_step_label")
157
166
 
158
- return rect_map
167
+ return output_map
159
168
 
160
169
 
161
170
  def calculate_ena_signal_rates(map_ds: xr.Dataset) -> dict[str, xr.DataArray]:
@@ -228,19 +237,16 @@ def calculate_ena_intensity(
228
237
  np.ones((map_ds["esa_energy_step"].size, map_ds["calibration_prod"].size)),
229
238
  coords=[map_ds["esa_energy_step"], map_ds["calibration_prod"]],
230
239
  )
231
- # TODO: Implement esa energies lookup
232
- if esa_energies_path:
233
- raise NotImplementedError
234
- esa_energy = xr.ones_like(map_ds["esa_energy_step"])
240
+
241
+ esa_energy = esa_energy_lookup(esa_energies_path, map_ds["esa_energy_step"].data)
235
242
 
236
243
  # Convert ENA Signal Rate to Flux
244
+ flux_conversion_divisor = geometric_factor * esa_energy[:, np.newaxis]
237
245
  intensity_vars = {
238
- "ena_intensity": map_ds["ena_signal_rates"] / (geometric_factor * esa_energy),
246
+ "ena_intensity": map_ds["ena_signal_rates"] / flux_conversion_divisor,
239
247
  "ena_intensity_stat_unc": map_ds["ena_signal_rate_stat_unc"]
240
- / geometric_factor
241
- / esa_energy,
242
- "ena_intensity_sys_err": map_ds["bg_rates_unc"]
243
- / (geometric_factor * esa_energy),
248
+ / flux_conversion_divisor,
249
+ "ena_intensity_sys_err": map_ds["bg_rates_unc"] / flux_conversion_divisor,
244
250
  }
245
251
 
246
252
  # TODO: Correctly implement combining of calibration products. For now, just sum
@@ -259,3 +265,30 @@ def calculate_ena_intensity(
259
265
  )
260
266
 
261
267
  return intensity_vars
268
+
269
+
270
+ def esa_energy_lookup(
271
+ esa_energies_path: str | Path, esa_energy_steps: np.ndarray
272
+ ) -> np.ndarray:
273
+ """
274
+ Lookup the nominal central energy values for given esa energy steps.
275
+
276
+ Parameters
277
+ ----------
278
+ esa_energies_path : str or pathlib.Path
279
+ Location of the calibration csv file containing the lookup data.
280
+ esa_energy_steps : numpy.ndarray
281
+ The ESA energy steps to get energies for.
282
+
283
+ Returns
284
+ -------
285
+ esa_energies: numpy.ndarray
286
+ The nominal central energy for the given esa energy steps.
287
+ """
288
+ esa_energies_lut = pd.read_csv(
289
+ esa_energies_path, comment="#", index_col="esa_energy_step"
290
+ )
291
+ esa_energies = esa_energies_lut.loc[esa_energy_steps][
292
+ "nominal_central_energy"
293
+ ].values
294
+ return esa_energies