imap-processing 0.18.0__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of imap-processing might be problematic. Click here for more details.

Files changed (104) hide show
  1. imap_processing/_version.py +2 -2
  2. imap_processing/ancillary/ancillary_dataset_combiner.py +161 -1
  3. imap_processing/cdf/config/imap_codice_l1a_variable_attrs.yaml +301 -274
  4. imap_processing/cdf/config/imap_codice_l1b_variable_attrs.yaml +28 -28
  5. imap_processing/cdf/config/imap_codice_l2_variable_attrs.yaml +1044 -203
  6. imap_processing/cdf/config/imap_constant_attrs.yaml +4 -2
  7. imap_processing/cdf/config/imap_glows_l1b_variable_attrs.yaml +12 -0
  8. imap_processing/cdf/config/imap_hi_global_cdf_attrs.yaml +5 -0
  9. imap_processing/cdf/config/imap_hit_global_cdf_attrs.yaml +10 -4
  10. imap_processing/cdf/config/imap_idex_l2a_variable_attrs.yaml +33 -4
  11. imap_processing/cdf/config/imap_idex_l2b_variable_attrs.yaml +8 -91
  12. imap_processing/cdf/config/imap_idex_l2c_variable_attrs.yaml +106 -16
  13. imap_processing/cdf/config/imap_lo_l1a_variable_attrs.yaml +4 -15
  14. imap_processing/cdf/config/imap_lo_l1c_variable_attrs.yaml +189 -98
  15. imap_processing/cdf/config/imap_mag_global_cdf_attrs.yaml +85 -2
  16. imap_processing/cdf/config/imap_mag_l1c_variable_attrs.yaml +24 -1
  17. imap_processing/cdf/config/imap_ultra_l1b_variable_attrs.yaml +12 -4
  18. imap_processing/cdf/config/imap_ultra_l1c_variable_attrs.yaml +50 -7
  19. imap_processing/cli.py +95 -41
  20. imap_processing/codice/codice_l1a.py +131 -31
  21. imap_processing/codice/codice_l2.py +118 -10
  22. imap_processing/codice/constants.py +740 -595
  23. imap_processing/decom.py +1 -4
  24. imap_processing/ena_maps/ena_maps.py +32 -25
  25. imap_processing/ena_maps/utils/naming.py +8 -2
  26. imap_processing/glows/ancillary/imap_glows_exclusions-by-instr-team_20250923_v002.dat +10 -0
  27. imap_processing/glows/ancillary/imap_glows_map-of-excluded-regions_20250923_v002.dat +393 -0
  28. imap_processing/glows/ancillary/imap_glows_map-of-uv-sources_20250923_v002.dat +593 -0
  29. imap_processing/glows/ancillary/imap_glows_pipeline_settings_20250923_v002.json +54 -0
  30. imap_processing/glows/ancillary/imap_glows_suspected-transients_20250923_v002.dat +10 -0
  31. imap_processing/glows/l1b/glows_l1b.py +99 -9
  32. imap_processing/glows/l1b/glows_l1b_data.py +350 -38
  33. imap_processing/glows/l2/glows_l2.py +11 -0
  34. imap_processing/hi/hi_l1a.py +124 -3
  35. imap_processing/hi/hi_l1b.py +154 -71
  36. imap_processing/hi/hi_l2.py +84 -51
  37. imap_processing/hi/utils.py +153 -8
  38. imap_processing/hit/l0/constants.py +3 -0
  39. imap_processing/hit/l0/decom_hit.py +3 -6
  40. imap_processing/hit/l1a/hit_l1a.py +311 -21
  41. imap_processing/hit/l1b/hit_l1b.py +54 -126
  42. imap_processing/hit/l2/hit_l2.py +6 -6
  43. imap_processing/ialirt/calculate_ingest.py +219 -0
  44. imap_processing/ialirt/constants.py +12 -2
  45. imap_processing/ialirt/generate_coverage.py +15 -2
  46. imap_processing/ialirt/l0/ialirt_spice.py +5 -2
  47. imap_processing/ialirt/l0/parse_mag.py +293 -42
  48. imap_processing/ialirt/l0/process_hit.py +5 -3
  49. imap_processing/ialirt/l0/process_swapi.py +41 -25
  50. imap_processing/ialirt/process_ephemeris.py +70 -14
  51. imap_processing/idex/idex_l0.py +2 -2
  52. imap_processing/idex/idex_l1a.py +2 -3
  53. imap_processing/idex/idex_l1b.py +2 -3
  54. imap_processing/idex/idex_l2a.py +130 -4
  55. imap_processing/idex/idex_l2b.py +158 -143
  56. imap_processing/idex/idex_utils.py +1 -3
  57. imap_processing/lo/l0/lo_science.py +25 -24
  58. imap_processing/lo/l1b/lo_l1b.py +3 -3
  59. imap_processing/lo/l1c/lo_l1c.py +116 -50
  60. imap_processing/lo/l2/lo_l2.py +29 -29
  61. imap_processing/lo/lo_ancillary.py +55 -0
  62. imap_processing/mag/l1a/mag_l1a.py +1 -0
  63. imap_processing/mag/l1a/mag_l1a_data.py +26 -0
  64. imap_processing/mag/l1b/mag_l1b.py +3 -2
  65. imap_processing/mag/l1c/interpolation_methods.py +14 -15
  66. imap_processing/mag/l1c/mag_l1c.py +23 -6
  67. imap_processing/mag/l1d/mag_l1d.py +57 -14
  68. imap_processing/mag/l1d/mag_l1d_data.py +167 -30
  69. imap_processing/mag/l2/mag_l2_data.py +10 -2
  70. imap_processing/quality_flags.py +9 -1
  71. imap_processing/spice/geometry.py +76 -33
  72. imap_processing/spice/pointing_frame.py +0 -6
  73. imap_processing/spice/repoint.py +29 -2
  74. imap_processing/spice/spin.py +28 -8
  75. imap_processing/spice/time.py +12 -22
  76. imap_processing/swapi/l1/swapi_l1.py +10 -4
  77. imap_processing/swapi/l2/swapi_l2.py +15 -17
  78. imap_processing/swe/l1b/swe_l1b.py +1 -2
  79. imap_processing/ultra/constants.py +1 -24
  80. imap_processing/ultra/l0/ultra_utils.py +9 -11
  81. imap_processing/ultra/l1a/ultra_l1a.py +1 -2
  82. imap_processing/ultra/l1b/cullingmask.py +6 -3
  83. imap_processing/ultra/l1b/de.py +81 -23
  84. imap_processing/ultra/l1b/extendedspin.py +13 -10
  85. imap_processing/ultra/l1b/lookup_utils.py +281 -28
  86. imap_processing/ultra/l1b/quality_flag_filters.py +10 -1
  87. imap_processing/ultra/l1b/ultra_l1b_culling.py +161 -3
  88. imap_processing/ultra/l1b/ultra_l1b_extended.py +253 -47
  89. imap_processing/ultra/l1c/helio_pset.py +97 -24
  90. imap_processing/ultra/l1c/l1c_lookup_utils.py +256 -0
  91. imap_processing/ultra/l1c/spacecraft_pset.py +83 -16
  92. imap_processing/ultra/l1c/ultra_l1c.py +6 -2
  93. imap_processing/ultra/l1c/ultra_l1c_culling.py +85 -0
  94. imap_processing/ultra/l1c/ultra_l1c_pset_bins.py +385 -277
  95. imap_processing/ultra/l2/ultra_l2.py +0 -1
  96. imap_processing/ultra/utils/ultra_l1_utils.py +28 -3
  97. imap_processing/utils.py +3 -4
  98. {imap_processing-0.18.0.dist-info → imap_processing-0.19.0.dist-info}/METADATA +2 -2
  99. {imap_processing-0.18.0.dist-info → imap_processing-0.19.0.dist-info}/RECORD +102 -95
  100. imap_processing/idex/idex_l2c.py +0 -84
  101. imap_processing/spice/kernels.py +0 -187
  102. {imap_processing-0.18.0.dist-info → imap_processing-0.19.0.dist-info}/LICENSE +0 -0
  103. {imap_processing-0.18.0.dist-info → imap_processing-0.19.0.dist-info}/WHEEL +0 -0
  104. {imap_processing-0.18.0.dist-info → imap_processing-0.19.0.dist-info}/entry_points.txt +0 -0
@@ -4,12 +4,26 @@ from dataclasses import Field
4
4
  from enum import Enum
5
5
 
6
6
  import numpy as np
7
- import pandas as pd
8
7
  import xarray as xr
9
8
  from scipy.stats import binned_statistic_dd
10
9
 
11
10
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
12
- from imap_processing.spice.time import met_to_ttj2000ns
11
+ from imap_processing.lo import lo_ancillary
12
+ from imap_processing.spice.repoint import get_pointing_times
13
+ from imap_processing.spice.spin import get_spin_number
14
+ from imap_processing.spice.time import met_to_ttj2000ns, ttj2000ns_to_met
15
+
16
+ N_ESA_ENERGY_STEPS = 7
17
+ N_SPIN_ANGLE_BINS = 3600
18
+ N_OFF_ANGLE_BINS = 40
19
+ # 1 time, 7 energy steps, 3600 spin angle bins, and 40 off angle bins
20
+ PSET_SHAPE = (1, N_ESA_ENERGY_STEPS, N_SPIN_ANGLE_BINS, N_OFF_ANGLE_BINS)
21
+ PSET_DIMS = ["epoch", "esa_energy_step", "spin_angle", "off_angle"]
22
+ ESA_ENERGY_STEPS = np.arange(N_ESA_ENERGY_STEPS) + 1 # 1 to 7 inclusive
23
+ SPIN_ANGLE_BIN_EDGES = np.linspace(0, 360, N_SPIN_ANGLE_BINS + 1)
24
+ SPIN_ANGLE_BIN_CENTERS = (SPIN_ANGLE_BIN_EDGES[:-1] + SPIN_ANGLE_BIN_EDGES[1:]) / 2
25
+ OFF_ANGLE_BIN_EDGES = np.linspace(-2, 2, N_OFF_ANGLE_BINS + 1)
26
+ OFF_ANGLE_BIN_CENTERS = (OFF_ANGLE_BIN_EDGES[:-1] + OFF_ANGLE_BIN_EDGES[1:]) / 2
13
27
 
14
28
 
15
29
  class FilterType(str, Enum):
@@ -52,10 +66,45 @@ def lo_l1c(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
52
66
  if "imap_lo_l1b_de" in sci_dependencies:
53
67
  logical_source = "imap_lo_l1c_pset"
54
68
  l1b_de = sci_dependencies["imap_lo_l1b_de"]
55
-
56
69
  l1b_goodtimes_only = filter_goodtimes(l1b_de, anc_dependencies)
57
70
  pset = initialize_pset(l1b_goodtimes_only, attr_mgr, logical_source)
58
71
  full_counts = create_pset_counts(l1b_goodtimes_only)
72
+
73
+ # Set the pointing start and end times based on the first epoch
74
+ pointing_start_met, pointing_end_met = get_pointing_times(
75
+ ttj2000ns_to_met(l1b_goodtimes_only["epoch"][0].item())
76
+ )
77
+
78
+ pset["pointing_start_met"] = xr.DataArray(
79
+ np.array([pointing_start_met]),
80
+ dims="epoch",
81
+ attrs=attr_mgr.get_variable_attributes("pointing_start_met"),
82
+ )
83
+ pset["pointing_end_met"] = xr.DataArray(
84
+ np.array([pointing_end_met]),
85
+ dims="epoch",
86
+ attrs=attr_mgr.get_variable_attributes("pointing_end_met"),
87
+ )
88
+
89
+ # Set the epoch to the start of the pointing
90
+ pset["epoch"] = xr.DataArray(
91
+ met_to_ttj2000ns(pset["pointing_start_met"].values),
92
+ attrs=attr_mgr.get_variable_attributes("epoch"),
93
+ )
94
+
95
+ # Get the start and end spin numbers based on the pointing start and end MET
96
+ pset["start_spin_number"] = xr.DataArray(
97
+ [get_spin_number(pset["pointing_start_met"].item())],
98
+ dims="epoch",
99
+ attrs=attr_mgr.get_variable_attributes("start_spin_number"),
100
+ )
101
+ pset["end_spin_number"] = xr.DataArray(
102
+ [get_spin_number(pset["pointing_end_met"].item())],
103
+ dims="epoch",
104
+ attrs=attr_mgr.get_variable_attributes("end_spin_number"),
105
+ )
106
+
107
+ # Set the counts
59
108
  pset["triples_counts"] = create_pset_counts(
60
109
  l1b_goodtimes_only, FilterType.TRIPLES
61
110
  )
@@ -64,19 +113,18 @@ def lo_l1c(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
64
113
  )
65
114
  pset["h_counts"] = create_pset_counts(l1b_goodtimes_only, FilterType.HYDROGEN)
66
115
  pset["o_counts"] = create_pset_counts(l1b_goodtimes_only, FilterType.OXYGEN)
116
+
117
+ # Set the exposure time
67
118
  pset["exposure_time"] = calculate_exposure_times(
68
119
  full_counts, l1b_goodtimes_only
69
120
  )
70
121
  pset.attrs = attr_mgr.get_global_attributes(logical_source)
71
- # TODO: Temp fix before adding attribute variables.
72
- # CDF won't open if DEPEND_0 is not deleted currently.
73
- del pset["epoch"].attrs["DEPEND_0"]
74
122
 
75
123
  pset = pset.assign_coords(
76
124
  {
77
- "energy": np.arange(1, 8),
78
- "longitude": np.arange(3600),
79
- "latitude": np.arange(40),
125
+ "esa_energy_step": ESA_ENERGY_STEPS,
126
+ "spin_angle": SPIN_ANGLE_BIN_CENTERS,
127
+ "off_angle": OFF_ANGLE_BIN_CENTERS,
80
128
  }
81
129
  )
82
130
 
@@ -141,7 +189,7 @@ def filter_goodtimes(l1b_de: xr.Dataset, anc_dependencies: list) -> xr.Dataset:
141
189
  Filtered L1B Direct Event dataset.
142
190
  """
143
191
  # the goodtimes are currently the only ancillary file needed for L1C processing
144
- goodtimes_table_df = pd.read_csv(anc_dependencies[0])
192
+ goodtimes_table_df = lo_ancillary.read_ancillary_file(anc_dependencies[0])
145
193
 
146
194
  # convert goodtimes from MET to TTJ2000
147
195
  goodtimes_start = met_to_ttj2000ns(goodtimes_table_df["GoodTime_strt"])
@@ -151,7 +199,7 @@ def filter_goodtimes(l1b_de: xr.Dataset, anc_dependencies: list) -> xr.Dataset:
151
199
  goodtimes_mask = np.zeros_like(l1b_de["epoch"], dtype=bool)
152
200
 
153
201
  # Iterate over the good times and create a mask
154
- for start, end in zip(goodtimes_start, goodtimes_end):
202
+ for start, end in zip(goodtimes_start, goodtimes_end, strict=False):
155
203
  goodtimes_mask |= (l1b_de["epoch"] >= start) & (l1b_de["epoch"] < end)
156
204
 
157
205
  # Filter the dataset using the mask
@@ -249,7 +297,7 @@ def create_pset_counts(
249
297
 
250
298
  counts = xr.DataArray(
251
299
  data=hist.astype(np.int16),
252
- dims=["epoch", "energy", "longitude", "latitude"],
300
+ dims=PSET_DIMS,
253
301
  )
254
302
 
255
303
  return counts
@@ -275,11 +323,6 @@ def calculate_exposure_times(counts: xr.DataArray, l1b_de: xr.Dataset) -> xr.Dat
275
323
  exposure_time : xarray.DataArray
276
324
  The exposure times for the L1B Direct Event dataset.
277
325
  """
278
- # Create bin edges
279
- lon_edges = np.arange(3601)
280
- lat_edges = np.arange(41)
281
- energy_edges = np.arange(8)
282
-
283
326
  data = np.column_stack(
284
327
  (l1b_de["esa_step"], l1b_de["pointing_bin_lon"], l1b_de["pointing_bin_lat"])
285
328
  )
@@ -289,14 +332,19 @@ def calculate_exposure_times(counts: xr.DataArray, l1b_de: xr.Dataset) -> xr.Dat
289
332
  # exposure time equation from Lo Alg Document 10.1.1.4
290
333
  4 * l1b_de["avg_spin_durations"].to_numpy() / 3600,
291
334
  statistic="mean",
292
- bins=[energy_edges, lon_edges, lat_edges],
335
+ # NOTE: The l1b pointing_bin_lon is bin number, not actual angle
336
+ bins=[
337
+ np.arange(N_ESA_ENERGY_STEPS + 1),
338
+ np.arange(N_SPIN_ANGLE_BINS + 1),
339
+ np.arange(N_OFF_ANGLE_BINS + 1),
340
+ ],
293
341
  )
294
342
 
295
343
  stat = result.statistic[np.newaxis, :, :, :]
296
344
 
297
345
  exposure_time = xr.DataArray(
298
346
  data=stat.astype(np.float16),
299
- dims=["epoch", "energy", "longitude", "latitude"],
347
+ dims=PSET_DIMS,
300
348
  )
301
349
 
302
350
  return exposure_time
@@ -328,8 +376,6 @@ def create_datasets(
328
376
  # can be used direction
329
377
  epoch_converted_time = [1]
330
378
 
331
- # Create a data array for the epoch time
332
- # TODO: might need to update the attrs to use new YAML file
333
379
  epoch_time = xr.DataArray(
334
380
  data=epoch_converted_time,
335
381
  name="epoch",
@@ -338,38 +384,54 @@ def create_datasets(
338
384
  )
339
385
 
340
386
  if logical_source == "imap_lo_l1c_pset":
341
- esa_step = xr.DataArray(
342
- data=[1, 2, 3, 4, 5, 6, 7],
343
- name="esa_step",
344
- dims=["esa_step"],
345
- attrs=attr_mgr.get_variable_attributes("esa_step"),
346
- )
347
- pointing_bins = xr.DataArray(
348
- data=np.arange(3600),
349
- name="pointing_bins",
350
- dims=["pointing_bins"],
351
- attrs=attr_mgr.get_variable_attributes("pointing_bins"),
387
+ esa_energy_step = xr.DataArray(
388
+ data=ESA_ENERGY_STEPS,
389
+ name="esa_energy_step",
390
+ dims=["esa_energy_step"],
391
+ attrs=attr_mgr.get_variable_attributes("esa_energy_step"),
352
392
  )
353
-
354
- esa_step_label = xr.DataArray(
355
- esa_step.values.astype(str),
393
+ esa_energy_step_label = xr.DataArray(
394
+ esa_energy_step.values.astype(str),
356
395
  name="esa_step_label",
357
396
  dims=["esa_step_label"],
358
397
  attrs=attr_mgr.get_variable_attributes("esa_step_label"),
359
398
  )
360
- pointing_bins_label = xr.DataArray(
361
- pointing_bins.values.astype(str),
362
- name="pointing_bins_label",
363
- dims=["pointing_bins_label"],
364
- attrs=attr_mgr.get_variable_attributes("pointing_bins_label"),
399
+
400
+ spin_angle = xr.DataArray(
401
+ data=SPIN_ANGLE_BIN_CENTERS,
402
+ name="spin_angle",
403
+ dims=["spin_angle"],
404
+ attrs=attr_mgr.get_variable_attributes("spin_angle"),
405
+ )
406
+ spin_angle_label = xr.DataArray(
407
+ spin_angle.values.astype(str),
408
+ name="spin_angle_label",
409
+ dims=["spin_angle_label"],
410
+ attrs=attr_mgr.get_variable_attributes("spin_angle_label"),
411
+ )
412
+
413
+ off_angle = xr.DataArray(
414
+ data=OFF_ANGLE_BIN_CENTERS,
415
+ name="off_angle",
416
+ dims=["off_angle"],
417
+ attrs=attr_mgr.get_variable_attributes("off_angle"),
418
+ )
419
+ off_angle_label = xr.DataArray(
420
+ off_angle.values.astype(str),
421
+ name="off_angle_label",
422
+ dims=["off_angle_label"],
423
+ attrs=attr_mgr.get_variable_attributes("off_angle_label"),
365
424
  )
425
+
366
426
  dataset = xr.Dataset(
367
427
  coords={
368
428
  "epoch": epoch_time,
369
- "pointing_bins": pointing_bins,
370
- "pointing_bins_label": pointing_bins_label,
371
- "esa_step": esa_step,
372
- "esa_step_label": esa_step_label,
429
+ "esa_energy_step": esa_energy_step,
430
+ "esa_energy_step_label": esa_energy_step_label,
431
+ "spin_angle": spin_angle,
432
+ "spin_angle_label": spin_angle_label,
433
+ "off_angle": off_angle,
434
+ "off_angle_label": off_angle_label,
373
435
  },
374
436
  attrs=attr_mgr.get_global_attributes(logical_source),
375
437
  )
@@ -389,30 +451,34 @@ def create_datasets(
389
451
 
390
452
  # Create a data array for the current field and add it to the dataset
391
453
  # TODO: TEMPORARY. need to update to use l1b data once that's available.
392
- if field in ["pointing_start", "pointing_end", "mode", "pivot_angle"]:
454
+ if field in [
455
+ "pointing_start_met",
456
+ "pointing_end_met",
457
+ "esa_mode",
458
+ "pivot_angle",
459
+ ]:
393
460
  dataset[field] = xr.DataArray(
394
461
  data=[1],
395
462
  dims=dims,
396
463
  attrs=attr_mgr.get_variable_attributes(field),
397
464
  )
398
465
  # TODO: This is temporary.
399
- # The data type will be set in the data class when that's created
400
466
  elif field == "exposure_time":
401
467
  dataset[field] = xr.DataArray(
402
- data=np.ones((1, 7), dtype=np.float16),
468
+ data=np.ones((1, 7, 3600, 40), dtype=np.float16),
403
469
  dims=dims,
404
470
  attrs=attr_mgr.get_variable_attributes(field),
405
471
  )
406
472
 
407
- elif "rate" in field:
473
+ elif "rates" in field:
408
474
  dataset[field] = xr.DataArray(
409
- data=np.ones((1, 3600, 7), dtype=np.float16),
475
+ data=np.ones(PSET_SHAPE, dtype=np.float16),
410
476
  dims=dims,
411
477
  attrs=attr_mgr.get_variable_attributes(field),
412
478
  )
413
479
  else:
414
480
  dataset[field] = xr.DataArray(
415
- data=np.ones((1, 3600, 7), dtype=np.int16),
481
+ data=np.ones(PSET_SHAPE, dtype=np.int16),
416
482
  dims=dims,
417
483
  attrs=attr_mgr.get_variable_attributes(field),
418
484
  )
@@ -5,12 +5,13 @@ import xarray as xr
5
5
 
6
6
  from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
7
7
  from imap_processing.ena_maps import ena_maps
8
- from imap_processing.ena_maps.ena_maps import RectangularSkyMap
9
- from imap_processing.spice import geometry
10
- from imap_processing.spice.geometry import SpiceFrame
8
+ from imap_processing.ena_maps.ena_maps import AbstractSkyMap, RectangularSkyMap
9
+ from imap_processing.ena_maps.utils.naming import MapDescriptor
11
10
 
12
11
 
13
- def lo_l2(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
12
+ def lo_l2(
13
+ sci_dependencies: dict, anc_dependencies: list, descriptor: str
14
+ ) -> list[xr.Dataset]:
14
15
  """
15
16
  Will process IMAP-Lo L1C data into Le CDF data products.
16
17
 
@@ -20,6 +21,8 @@ def lo_l2(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
20
21
  Dictionary of datasets needed for L2 data product creation in xarray Datasets.
21
22
  anc_dependencies : list
22
23
  Ancillary files needed for L2 data product creation.
24
+ descriptor : str
25
+ The map descriptor to be produced.
23
26
 
24
27
  Returns
25
28
  -------
@@ -37,18 +40,19 @@ def lo_l2(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
37
40
  logical_source = "imap_lo_l2_l090-ena-h-sf-nsp-ram-hae-6deg-3mo"
38
41
  psets = sci_dependencies["imap_lo_l1c_pset"]
39
42
 
40
- # Create the rectangular sky map from the pointing set.
41
- lo_rect_map = project_pset_to_rect_map(
42
- psets, spacing_deg=6, spice_frame=geometry.SpiceFrame.ECLIPJ2000
43
- )
43
+ # Create an AbstractSkyMap (Rectangular or HEALPIX) from the pointing set
44
+ lo_sky_map = project_pset_to_sky_map(psets, descriptor)
45
+ if not isinstance(lo_sky_map, RectangularSkyMap):
46
+ raise NotImplementedError("HEALPix map output not supported for Lo")
47
+
44
48
  # Add the hydrogen rates to the rectangular map dataset.
45
- lo_rect_map.data_1d["h_rate"] = calculate_rates(
46
- lo_rect_map.data_1d["h_counts"], lo_rect_map.data_1d["exposure_time"]
49
+ lo_sky_map.data_1d["h_rate"] = calculate_rates(
50
+ lo_sky_map.data_1d["h_counts"], lo_sky_map.data_1d["exposure_time"]
47
51
  )
48
52
  # Add the hydrogen flux to the rectangular map dataset.
49
- lo_rect_map.data_1d["h_flux"] = calculate_fluxes(lo_rect_map.data_1d["h_rate"])
53
+ lo_sky_map.data_1d["h_flux"] = calculate_fluxes(lo_sky_map.data_1d["h_rate"])
50
54
  # Create the dataset from the rectangular map.
51
- lo_rect_map_ds = lo_rect_map.to_dataset()
55
+ lo_rect_map_ds = lo_sky_map.to_dataset()
52
56
  # Add the attributes to the dataset.
53
57
  lo_rect_map_ds = add_attributes(
54
58
  lo_rect_map_ds, attr_mgr, logical_source=logical_source
@@ -57,41 +61,37 @@ def lo_l2(sci_dependencies: dict, anc_dependencies: list) -> list[xr.Dataset]:
57
61
  return [lo_rect_map_ds]
58
62
 
59
63
 
60
- def project_pset_to_rect_map(
61
- psets: list[xr.Dataset], spacing_deg: int, spice_frame: SpiceFrame
62
- ) -> RectangularSkyMap:
64
+ def project_pset_to_sky_map(psets: list[xr.Dataset], descriptor: str) -> AbstractSkyMap:
63
65
  """
64
- Project the pointing set to a rectangular sky map.
66
+ Project the pointing set to a sky map.
65
67
 
66
- This function is used to create a rectangular sky map from the pointing set
68
+ This function is used to create a sky map from the pointing set
67
69
  data in the L1C dataset.
68
70
 
69
71
  Parameters
70
72
  ----------
71
73
  psets : list[xr.Dataset]
72
74
  List of pointing sets in xarray Dataset format.
73
- spacing_deg : int
74
- The spacing in degrees for the rectangular sky map.
75
- spice_frame : SpiceFrame
76
- The SPICE frame to use for the rectangular sky map projection.
75
+ descriptor : str
76
+ The map descriptor for the map to be produced,
77
+ contains details about the map projection.
77
78
 
78
79
  Returns
79
80
  -------
80
- RectangularSkyMap
81
- The rectangular sky map created from the pointing set data.
81
+ AbstractSkyMap
82
+ The sky map created from the pointing set data.
82
83
  """
83
- lo_rect_map = ena_maps.RectangularSkyMap(
84
- spacing_deg=spacing_deg,
85
- spice_frame=spice_frame,
86
- )
84
+ map_descriptor = MapDescriptor.from_string(descriptor)
85
+ output_map = map_descriptor.to_empty_map()
86
+
87
87
  for pset in psets:
88
88
  lo_pset = ena_maps.LoPointingSet(pset)
89
- lo_rect_map.project_pset_values_to_map(
89
+ output_map.project_pset_values_to_map(
90
90
  pointing_set=lo_pset,
91
91
  value_keys=["h_counts", "exposure_time"],
92
92
  index_match_method=ena_maps.IndexMatchMethod.PUSH,
93
93
  )
94
- return lo_rect_map
94
+ return output_map
95
95
 
96
96
 
97
97
  def calculate_rates(counts: xr.DataArray, exposure_time: xr.DataArray) -> xr.DataArray:
@@ -0,0 +1,55 @@
1
+ """Ancillary file reading for IMAP-Lo processing."""
2
+
3
+ from pathlib import Path
4
+
5
+ import pandas as pd
6
+
7
+ # convert the YYYYDDD datetime format directly upon reading
8
+ _CONVERTERS = {
9
+ "YYYYDDD": lambda x: pd.to_datetime(str(x), format="%Y%j"),
10
+ "#YYYYDDD": lambda x: pd.to_datetime(str(x), format="%Y%j"),
11
+ "YYYYDDD_strt": lambda x: pd.to_datetime(str(x), format="%Y%j"),
12
+ "YYYYDDD_end": lambda x: pd.to_datetime(str(x), format="%Y%j"),
13
+ }
14
+
15
+ # Columns in the csv files to rename for consistency
16
+ _RENAME_COLUMNS = {
17
+ "YYYYDDD": "Date",
18
+ "#YYYYDDD": "Date",
19
+ "#Comments": "Comments",
20
+ "YYYYDDD_strt": "StartDate",
21
+ "YYYYDDD_end": "EndDate",
22
+ }
23
+
24
+
25
+ def read_ancillary_file(ancillary_file: str | Path) -> pd.DataFrame:
26
+ """
27
+ Read a generic ancillary CSV file into a pandas DataFrame.
28
+
29
+ Parameters
30
+ ----------
31
+ ancillary_file : str or Path
32
+ Path to the ancillary CSV file.
33
+
34
+ Returns
35
+ -------
36
+ pd.DataFrame
37
+ DataFrame containing the ancillary data.
38
+ """
39
+ skiprows = None
40
+ if "esa-mode-lut" in str(ancillary_file):
41
+ # skip the first row which is a comment
42
+ skiprows = [0]
43
+ elif "geometric-factor" in str(ancillary_file):
44
+ # skip the rows with comment headers indicating Hi_Res and Hi_Thr
45
+ skiprows = [1, 38]
46
+ df = pd.read_csv(ancillary_file, converters=_CONVERTERS, skiprows=skiprows)
47
+ df = df.rename(columns=_RENAME_COLUMNS)
48
+
49
+ if "geometric-factor" in str(ancillary_file):
50
+ # Add an ESA mode column based on the known structure of the file.
51
+ # The first 36 rows are ESA mode 0 (HiRes), the second 36 are ESA mode 1 (HiThr)
52
+ df["esa_mode"] = 0
53
+ df.loc[36:, "esa_mode"] = 1
54
+
55
+ return df
@@ -328,6 +328,7 @@ def generate_dataset(
328
328
  global_attributes = attribute_manager.get_global_attributes(logical_file_id)
329
329
  global_attributes["is_mago"] = str(bool(single_file_l1a.is_mago))
330
330
  global_attributes["is_active"] = str(bool(single_file_l1a.is_active))
331
+ global_attributes["all_vectors_primary"] = single_file_l1a.all_vectors_primary()
331
332
  global_attributes["vectors_per_second"] = (
332
333
  single_file_l1a.vectors_per_second_attribute()
333
334
  )
@@ -15,6 +15,7 @@ from imap_processing.mag.constants import (
15
15
  MAX_COMPRESSED_VECTOR_BITS,
16
16
  MAX_FINE_TIME,
17
17
  RANGE_BIT_WIDTH,
18
+ PrimarySensor,
18
19
  )
19
20
  from imap_processing.spice.time import met_to_ttj2000ns
20
21
 
@@ -241,6 +242,7 @@ class MagL1a:
241
242
  twos_complement()
242
243
  update_compression_array()
243
244
  vectors_per_second_attribute()
245
+ all_vectors_primary()
244
246
  """
245
247
 
246
248
  is_mago: bool
@@ -1117,3 +1119,27 @@ class MagL1a:
1117
1119
  last_vectors_per_second = vecsec
1118
1120
 
1119
1121
  return output_str
1122
+
1123
+ def all_vectors_primary(self) -> bool:
1124
+ """
1125
+ Check if all vectors in the file are from the primary sensor.
1126
+
1127
+ For MAGO datasets, this checks if MAGO was consistently the primary sensor
1128
+ across all packets. For MAGI datasets, this checks if MAGI was consistently
1129
+ the primary sensor across all packets.
1130
+
1131
+ Returns
1132
+ -------
1133
+ bool
1134
+ True if all vectors are from the primary sensor across all packets,
1135
+ False otherwise.
1136
+ """
1137
+ expected_primary_value = (
1138
+ PrimarySensor.MAGO.value if self.is_mago else PrimarySensor.MAGI.value
1139
+ )
1140
+
1141
+ for _, packet in self.packet_definitions.items():
1142
+ if packet.mago_is_primary != expected_primary_value:
1143
+ return False
1144
+
1145
+ return True
@@ -74,7 +74,6 @@ def mag_l1b(
74
74
  calibration_matrix, time_shift = retrieve_matrix_from_l1b_calibration(
75
75
  calibration_dataset, day_to_process, is_mago
76
76
  )
77
- print(f"Using calibration matrix: {calibration_matrix}")
78
77
 
79
78
  output_dataset = mag_l1b_processing(
80
79
  input_dataset, calibration_matrix, time_shift, mag_attributes, source
@@ -182,6 +181,9 @@ def mag_l1b_processing(
182
181
  try:
183
182
  global_attributes["is_mago"] = input_dataset.attrs["is_mago"]
184
183
  global_attributes["is_active"] = input_dataset.attrs["is_active"]
184
+ global_attributes["all_vectors_primary"] = input_dataset.attrs[
185
+ "all_vectors_primary"
186
+ ]
185
187
  global_attributes["vectors_per_second"] = timeshift_vectors_per_second(
186
188
  input_dataset.attrs["vectors_per_second"], time_shift
187
189
  )
@@ -245,7 +247,6 @@ def retrieve_matrix_from_l1b_calibration(
245
247
  The calibration matrix and time shift. These can be passed directly into
246
248
  update_vector, calibrate_vector, and shift_time.
247
249
  """
248
- print(f"Finding data for day {day}")
249
250
  if is_mago:
250
251
  calibration_matrix = calibration_dataset.sel(epoch=day)["MFOTOURFO"]
251
252
  time_shift = calibration_dataset.sel(epoch=day)["OTS"]
@@ -3,7 +3,6 @@
3
3
 
4
4
  import logging
5
5
  from enum import Enum
6
- from typing import Optional
7
6
 
8
7
  import numpy as np
9
8
  from scipy.interpolate import make_interp_spline
@@ -44,8 +43,8 @@ def linear(
44
43
  input_vectors: np.ndarray,
45
44
  input_timestamps: np.ndarray,
46
45
  output_timestamps: np.ndarray,
47
- input_rate: Optional[VecSec] = None,
48
- output_rate: Optional[VecSec] = None,
46
+ input_rate: VecSec | None = None,
47
+ output_rate: VecSec | None = None,
49
48
  ) -> np.ndarray:
50
49
  """
51
50
  Linear interpolation of input vectors to output timestamps.
@@ -80,8 +79,8 @@ def quadratic(
80
79
  input_vectors: np.ndarray,
81
80
  input_timestamps: np.ndarray,
82
81
  output_timestamps: np.ndarray,
83
- input_rate: Optional[VecSec] = None,
84
- output_rate: Optional[VecSec] = None,
82
+ input_rate: VecSec | None = None,
83
+ output_rate: VecSec | None = None,
85
84
  ) -> np.ndarray:
86
85
  """
87
86
  Quadratic interpolation of input vectors to output timestamps.
@@ -115,8 +114,8 @@ def cubic(
115
114
  input_vectors: np.ndarray,
116
115
  input_timestamps: np.ndarray,
117
116
  output_timestamps: np.ndarray,
118
- input_rate: Optional[VecSec] = None,
119
- output_rate: Optional[VecSec] = None,
117
+ input_rate: VecSec | None = None,
118
+ output_rate: VecSec | None = None,
120
119
  ) -> np.ndarray:
121
120
  """
122
121
  Cubic interpolation of input vectors to output timestamps.
@@ -175,8 +174,8 @@ def cic_filter(
175
174
  input_vectors: np.ndarray,
176
175
  input_timestamps: np.ndarray,
177
176
  output_timestamps: np.ndarray,
178
- input_rate: Optional[VecSec],
179
- output_rate: Optional[VecSec],
177
+ input_rate: VecSec | None,
178
+ output_rate: VecSec | None,
180
179
  ):
181
180
  """
182
181
  Apply CIC filter to data before interpolating.
@@ -242,8 +241,8 @@ def linear_filtered(
242
241
  input_vectors: np.ndarray,
243
242
  input_timestamps: np.ndarray,
244
243
  output_timestamps: np.ndarray,
245
- input_rate: Optional[VecSec] = None,
246
- output_rate: Optional[VecSec] = None,
244
+ input_rate: VecSec | None = None,
245
+ output_rate: VecSec | None = None,
247
246
  ) -> np.ndarray:
248
247
  """
249
248
  Linear filtered interpolation of input vectors to output timestamps.
@@ -281,8 +280,8 @@ def quadratic_filtered(
281
280
  input_vectors: np.ndarray,
282
281
  input_timestamps: np.ndarray,
283
282
  output_timestamps: np.ndarray,
284
- input_rate: Optional[VecSec] = None,
285
- output_rate: Optional[VecSec] = None,
283
+ input_rate: VecSec | None = None,
284
+ output_rate: VecSec | None = None,
286
285
  ) -> np.ndarray:
287
286
  """
288
287
  Quadratic filtered interpolation of input vectors to output timestamps.
@@ -320,8 +319,8 @@ def cubic_filtered(
320
319
  input_vectors: np.ndarray,
321
320
  input_timestamps: np.ndarray,
322
321
  output_timestamps: np.ndarray,
323
- input_rate: Optional[VecSec] = None,
324
- output_rate: Optional[VecSec] = None,
322
+ input_rate: VecSec | None = None,
323
+ output_rate: VecSec | None = None,
325
324
  ) -> np.ndarray:
326
325
  """
327
326
  Cubic filtered interpolation of input vectors to output timestamps.