dkist-processing-common 10.9.0__py3-none-any.whl → 11.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,27 @@
1
+ """Return hard-coded EarthLocation of the DKIST.
2
+
3
+ Cartesian geocentric coordinates of DKIST on Earth as retrieved from
4
+ https://github.com/astropy/astropy-data/blob/gh-pages/coordinates/sites.json#L838
5
+ """
6
+ import astropy.units as u
7
+ from astropy.coordinates import EarthLocation
8
+
9
+
10
+ _dkist_site_info = {
11
+ "aliases": ["DKIST", "ATST"],
12
+ "name": "Daniel K. Inouye Solar Telescope",
13
+ "elevation": 3067,
14
+ "elevation_unit": "meter",
15
+ "latitude": 20.7067,
16
+ "latitude_unit": "degree",
17
+ "longitude": 203.7436,
18
+ "longitude_unit": "degree",
19
+ "timezone": "US/Hawaii",
20
+ "source": "DKIST website: https://www.nso.edu/telescopes/dki-solar-telescope/",
21
+ }
22
+
23
+ location_of_dkist: EarthLocation = EarthLocation.from_geodetic(
24
+ _dkist_site_info["longitude"] * u.Unit(_dkist_site_info["longitude_unit"]),
25
+ _dkist_site_info["latitude"] * u.Unit(_dkist_site_info["latitude_unit"]),
26
+ _dkist_site_info["elevation"] * u.Unit(_dkist_site_info["elevation_unit"]),
27
+ )
@@ -23,3 +23,4 @@ class MetricCode(StrEnum):
23
23
  range = "RANGE"
24
24
  sensitivity = "SENSITIVITY"
25
25
  task_types = "TASK_TYPES"
26
+ wavecal_fit = "WAVECAL_FIT"
@@ -2,6 +2,9 @@
2
2
  from typing import Any
3
3
 
4
4
  from pydantic import BaseModel
5
+ from pydantic import Field
6
+ from pydantic import field_validator
7
+ from pydantic_core.core_schema import ValidationInfo
5
8
 
6
9
 
7
10
  class Plot2D(BaseModel):
@@ -13,6 +16,50 @@ class Plot2D(BaseModel):
13
16
  series_name: str | None = None
14
17
  ylabel_horizontal: bool = False
15
18
  ylim: tuple[float, float] | None = None
19
+ plot_kwargs: dict[str, dict[str, Any]] = Field(default_factory=dict)
20
+ sort_series: bool = True
21
+
22
+
23
+ class VerticalMultiPanePlot2D(BaseModel):
24
+ """
25
+ Support class to hold a multi-pane plot with plots stacked vertically.
26
+
27
+ This type of metric is really geared towards plots that share an X axis and have no gap between them. If you just
28
+ want two separate plots it's probably better to use a list of `Plot2D` objects.
29
+ """
30
+
31
+ top_to_bottom_plot_list: list[Plot2D]
32
+ match_x_axes: bool = True
33
+ no_gap: bool = True
34
+ top_to_bottom_height_ratios: list[float] | None = None
35
+
36
+ @field_validator("top_to_bottom_height_ratios")
37
+ @classmethod
38
+ def ensure_same_number_of_height_ratios_and_plots(
39
+ cls, height_ratios: list[float] | None, info: ValidationInfo
40
+ ) -> list[float]:
41
+ """
42
+ Make sure that the number of height ratios is the same as the number of plots.
43
+
44
+ Also populates default, same-size ratios if no ratios were given.
45
+ """
46
+ try:
47
+ plot_list = info.data["top_to_bottom_plot_list"]
48
+ except KeyError:
49
+ # The plot list didn't validate for some reason. We're about to error anyway.
50
+ return [1.0]
51
+
52
+ num_plots = len(plot_list)
53
+ if height_ratios is None:
54
+ return [1.0] * num_plots
55
+
56
+ if len(height_ratios) != num_plots:
57
+ raise ValueError(
58
+ f"The number of items in `top_to_bottom_height_ratios` list ({len(height_ratios)}) is not "
59
+ f"the same as the number of plots ({num_plots})"
60
+ )
61
+
62
+ return height_ratios
16
63
 
17
64
 
18
65
  class SimpleTable(BaseModel):
@@ -57,11 +104,7 @@ class PlotRaincloud(BaseModel):
57
104
 
58
105
 
59
106
  class ReportMetric(BaseModel):
60
- """
61
- A Quality Report is made up of a list of metrics with the schema defined by this class.
62
-
63
- Additionally, this class can produce a Flowable or List of Flowables to be render the metric in the PDF Report
64
- """
107
+ """A Quality Report is made up of a list of metrics with the schema defined by this class."""
65
108
 
66
109
  name: str
67
110
  description: str
@@ -69,6 +112,7 @@ class ReportMetric(BaseModel):
69
112
  facet: str | None = None
70
113
  statement: str | list[str] | None = None
71
114
  plot_data: Plot2D | list[Plot2D] | None = None
115
+ vertical_multi_pane_plot_data: VerticalMultiPanePlot2D | None = None
72
116
  histogram_data: PlotHistogram | list[PlotHistogram] | None = None
73
117
  table_data: SimpleTable | list[SimpleTable] | None = None
74
118
  modmat_data: ModulationMatrixHistograms | None = None
@@ -17,7 +17,7 @@ class TotalDspsRepeatsBud(TaskUniqueBud):
17
17
  super().__init__(
18
18
  constant_name=BudName.num_dsps_repeats.value,
19
19
  metadata_key="num_dsps_repeats",
20
- ip_task_type=TaskName.observe.value,
20
+ ip_task_types=TaskName.observe.value,
21
21
  )
22
22
 
23
23
 
@@ -15,7 +15,7 @@ class IdBud(TaskUniqueBud):
15
15
  super().__init__(
16
16
  constant_name=constant_name,
17
17
  metadata_key=metadata_key,
18
- ip_task_type=TaskName.observe.value,
18
+ ip_task_types=TaskName.observe.value,
19
19
  )
20
20
 
21
21
 
@@ -74,7 +74,7 @@ class NearFloatBud(Stem):
74
74
 
75
75
  class TaskNearFloatBud(NearFloatBud):
76
76
  """
77
- Subclass of `NearFloatBud` that only considers objects that have a specific task type.
77
+ Subclass of `NearFloatBud` that only considers objects that have specific task types.
78
78
 
79
79
  Parameters
80
80
  ----------
@@ -84,8 +84,8 @@ class TaskNearFloatBud(NearFloatBud):
84
84
  metadata_key
85
85
  The metadata key associated with the constant
86
86
 
87
- ip_task_type
88
- Only consider objects whose parsed header IP task type matches this string
87
+ ip_task_types
88
+ Only consider objects whose parsed header IP task type matches a string in this list
89
89
 
90
90
  task_type_parsing_function
91
91
  The function used to convert a header into an IP task type
@@ -98,7 +98,7 @@ class TaskNearFloatBud(NearFloatBud):
98
98
  self,
99
99
  constant_name: str,
100
100
  metadata_key: str,
101
- ip_task_type: str,
101
+ ip_task_types: str | list[str],
102
102
  tolerance: float,
103
103
  task_type_parsing_function: Callable = passthrough_header_ip_task,
104
104
  ):
@@ -106,14 +106,16 @@ class TaskNearFloatBud(NearFloatBud):
106
106
  constant_name=constant_name, metadata_key=metadata_key, tolerance=tolerance
107
107
  )
108
108
 
109
- self.ip_task_type = ip_task_type.casefold()
109
+ if isinstance(ip_task_types, str):
110
+ ip_task_types = [ip_task_types]
111
+ self.ip_task_types = [task.casefold() for task in ip_task_types]
110
112
  self.parsing_function = task_type_parsing_function
111
113
 
112
114
  def setter(self, fits_obj: L0FitsAccess):
113
115
  """Ingest an object only if its parsed IP task type matches what's desired."""
114
116
  task = self.parsing_function(fits_obj)
115
117
 
116
- if task.casefold() == self.ip_task_type:
118
+ if task.casefold() in self.ip_task_types:
117
119
  return super().setter(fits_obj)
118
120
 
119
121
  return SpilledDirt
@@ -20,7 +20,7 @@ class RetarderNameBud(TaskUniqueBud):
20
20
  super().__init__(
21
21
  constant_name=BudName.retarder_name.value,
22
22
  metadata_key="gos_retarder_status",
23
- ip_task_type=TaskName.polcal.value,
23
+ ip_task_types=TaskName.polcal.value,
24
24
  )
25
25
 
26
26
  def getter(self, key) -> str:
@@ -29,7 +29,7 @@ class ObsIpStartTimeBud(TaskUniqueBud):
29
29
  super().__init__(
30
30
  constant_name=BudName.obs_ip_start_time.value,
31
31
  metadata_key="ip_start_time",
32
- ip_task_type=TaskName.observe.value,
32
+ ip_task_types=TaskName.observe.value,
33
33
  )
34
34
 
35
35
 
@@ -187,31 +187,48 @@ class ReadoutExpTimeFlower(TimeFlowerBase):
187
187
 
188
188
  class TaskTimeBudBase(Stem):
189
189
  """
190
- Base class for making time-related buds that are computed for a specific task type.
190
+ Base class for making time-related buds that are computed for specific task types.
191
191
 
192
192
  By "time-related" we mean values that generally need rounding when ingested into the database.
193
193
 
194
194
  Complicated parsing of the header into a task type can be achieved by passing in a different
195
195
  header task parsing function.
196
+
197
+ Parameters
198
+ ----------
199
+ constant_name
200
+ The name for the constant to be defined
201
+
202
+ metadata_key
203
+ The metadata key associated with the constant
204
+
205
+ ip_task_types
206
+ Only consider objects whose parsed header IP task type matches a string in this list
207
+
208
+ header_task_parsing_func
209
+ The function used to convert a header into an IP task type
196
210
  """
197
211
 
198
212
  def __init__(
199
213
  self,
200
214
  stem_name: str,
201
215
  metadata_key: str,
202
- ip_task_type: str,
216
+ ip_task_types: str | list[str],
203
217
  header_task_parsing_func: Callable = passthrough_header_ip_task,
204
218
  ):
205
219
  super().__init__(stem_name=stem_name)
220
+
221
+ if isinstance(ip_task_types, str):
222
+ ip_task_types = [ip_task_types]
206
223
  self.metadata_key = metadata_key
207
- self.ip_task_type = ip_task_type
224
+ self.ip_task_types = [task.casefold() for task in ip_task_types]
208
225
  self.header_parsing_function = header_task_parsing_func
209
226
 
210
227
  def setter(self, fits_obj: L0FitsAccess):
211
228
  """Return the desired metadata key only if the parsed task type matches the Bud's task type."""
212
229
  task = self.header_parsing_function(fits_obj)
213
230
 
214
- if task.casefold() == self.ip_task_type.casefold():
231
+ if task.casefold() in self.ip_task_types:
215
232
  raw_value = getattr(fits_obj, self.metadata_key)
216
233
  return round(raw_value, EXP_TIME_ROUND_DIGITS)
217
234
 
@@ -229,13 +246,13 @@ class TaskExposureTimesBud(TaskTimeBudBase):
229
246
  def __init__(
230
247
  self,
231
248
  stem_name: str,
232
- ip_task_type: str,
249
+ ip_task_types: str | list[str],
233
250
  header_task_parsing_func: Callable = passthrough_header_ip_task,
234
251
  ):
235
252
  super().__init__(
236
253
  stem_name=stem_name,
237
254
  metadata_key="fpa_exposure_time_ms",
238
- ip_task_type=ip_task_type,
255
+ ip_task_types=ip_task_types,
239
256
  header_task_parsing_func=header_task_parsing_func,
240
257
  )
241
258
 
@@ -246,12 +263,12 @@ class TaskReadoutExpTimesBud(TaskTimeBudBase):
246
263
  def __init__(
247
264
  self,
248
265
  stem_name: str,
249
- ip_task_type: str,
266
+ ip_task_types: str | list[str],
250
267
  header_task_parsing_func: Callable = passthrough_header_ip_task,
251
268
  ):
252
269
  super().__init__(
253
270
  stem_name=stem_name,
254
271
  metadata_key="sensor_readout_exposure_time_ms",
255
- ip_task_type=ip_task_type,
272
+ ip_task_types=ip_task_types,
256
273
  header_task_parsing_func=header_task_parsing_func,
257
274
  )
@@ -64,7 +64,7 @@ class UniqueBud(Stem):
64
64
 
65
65
  class TaskUniqueBud(UniqueBud):
66
66
  """
67
- Subclass of `UniqueBud` that only considers objects that have a specific task type.
67
+ Subclass of `UniqueBud` that only considers objects that have specific task types.
68
68
 
69
69
  Parameters
70
70
  ----------
@@ -74,8 +74,8 @@ class TaskUniqueBud(UniqueBud):
74
74
  metadata_key
75
75
  The metadata key associated with the constant
76
76
 
77
- ip_task_type
78
- Only consider objects whose parsed header IP task type matches this string
77
+ ip_task_types
78
+ Only consider objects whose parsed header IP task type matches a string in this list
79
79
 
80
80
  task_type_parsing_function
81
81
  The function used to convert a header into an IP task type
@@ -85,19 +85,21 @@ class TaskUniqueBud(UniqueBud):
85
85
  self,
86
86
  constant_name: str,
87
87
  metadata_key: str,
88
- ip_task_type: str,
88
+ ip_task_types: str | list[str],
89
89
  task_type_parsing_function: Callable = passthrough_header_ip_task,
90
90
  ):
91
91
  super().__init__(constant_name=constant_name, metadata_key=metadata_key)
92
92
 
93
- self.ip_task_type = ip_task_type.casefold()
93
+ if isinstance(ip_task_types, str):
94
+ ip_task_types = [ip_task_types]
95
+ self.ip_task_types = [task.casefold() for task in ip_task_types]
94
96
  self.parsing_function = task_type_parsing_function
95
97
 
96
98
  def setter(self, fits_obj: L0FitsAccess):
97
99
  """Ingest an object only if its parsed IP task type matches what's desired."""
98
100
  task = self.parsing_function(fits_obj)
99
101
 
100
- if task.casefold() == self.ip_task_type:
102
+ if task.casefold() in self.ip_task_types:
101
103
  return super().setter(fits_obj)
102
104
 
103
105
  return SpilledDirt
@@ -11,5 +11,5 @@ class ObserveWavelengthBud(TaskUniqueBud):
11
11
  super().__init__(
12
12
  constant_name=BudName.wavelength.value,
13
13
  metadata_key="wavelength",
14
- ip_task_type=TaskName.observe.value,
14
+ ip_task_types=TaskName.observe.value,
15
15
  )
@@ -11,10 +11,15 @@ from dkist_processing_common.tasks.mixin.quality._metrics import _PolcalQualityM
11
11
  from dkist_processing_common.tasks.mixin.quality._metrics import _SimplePlotQualityMixin
12
12
  from dkist_processing_common.tasks.mixin.quality._metrics import _SimpleQualityMixin
13
13
  from dkist_processing_common.tasks.mixin.quality._metrics import _TableQualityMixin
14
+ from dkist_processing_common.tasks.mixin.quality._metrics import _WavecalQualityMixin
14
15
 
15
16
 
16
17
  class QualityMixin(
17
- _SimpleQualityMixin, _SimplePlotQualityMixin, _TableQualityMixin, _PolcalQualityMixin
18
+ _SimpleQualityMixin,
19
+ _SimplePlotQualityMixin,
20
+ _TableQualityMixin,
21
+ _PolcalQualityMixin,
22
+ _WavecalQualityMixin,
18
23
  ):
19
24
  """Mixin class supporting the generation of the quality reports."""
20
25
 
@@ -81,6 +86,7 @@ class QualityMixin(
81
86
  "HISTORICAL": self.quality_build_historical,
82
87
  "AO_STATUS": self.quality_build_ao_status,
83
88
  "RANGE": self.quality_build_range,
89
+ "WAVECAL_FIT": self.quality_build_wavecal_results,
84
90
  }
85
91
 
86
92
  @property
@@ -12,14 +12,18 @@ from typing import Any
12
12
  from typing import Iterable
13
13
  from typing import Literal
14
14
 
15
+ import astropy.units as u
15
16
  import numpy as np
17
+ from astropy.wcs import WCS
16
18
  from dkist_processing_pac.fitter.fitter_parameters import CU_PARAMS
17
19
  from dkist_processing_pac.fitter.fitter_parameters import GLOBAL_PARAMS
18
20
  from dkist_processing_pac.fitter.fitter_parameters import TELESCOPE_PARAMS
19
21
  from dkist_processing_pac.fitter.fitting_core import compare_I
20
22
  from dkist_processing_pac.fitter.polcal_fitter import PolcalFitter
21
23
  from pandas import DataFrame
24
+ from solar_wavelength_calibration.fitter.wavelength_fitter import FitResult
22
25
 
26
+ from dkist_processing_common.codecs.json import json_decoder
23
27
  from dkist_processing_common.models.fried_parameter import r0_valid
24
28
  from dkist_processing_common.models.metric_code import MetricCode
25
29
  from dkist_processing_common.models.quality import EfficiencyHistograms
@@ -29,6 +33,7 @@ from dkist_processing_common.models.quality import PlotHistogram
29
33
  from dkist_processing_common.models.quality import PlotRaincloud
30
34
  from dkist_processing_common.models.quality import ReportMetric
31
35
  from dkist_processing_common.models.quality import SimpleTable
36
+ from dkist_processing_common.models.quality import VerticalMultiPanePlot2D
32
37
  from dkist_processing_common.models.tags import Tag
33
38
 
34
39
  logger = logging.getLogger(__name__)
@@ -1324,3 +1329,127 @@ class _PolcalQualityMixin:
1324
1329
  base_str += " bins."
1325
1330
 
1326
1331
  return base_str
1332
+
1333
+
1334
+ class _WavecalQualityMixin:
1335
+ """Mixin class supporting the recording and building of wavecal-related metrics."""
1336
+
1337
+ def quality_store_wavecal_results(
1338
+ self,
1339
+ *,
1340
+ input_wavelength: u.Quantity,
1341
+ input_spectrum: np.ndarray,
1342
+ fit_result: FitResult,
1343
+ weights: None | np.ndarray = None,
1344
+ ):
1345
+ """
1346
+ Store the results of a wavelength solution fit.
1347
+
1348
+ Namely, save the:
1349
+
1350
+ * Input spectrum and wavelength
1351
+ * Best-fit combined atlas spectrum
1352
+ * Best-fit wavelength vector
1353
+ * Fit residuals
1354
+
1355
+ Note that the residuals are the *unweighed* residuals.
1356
+ """
1357
+ weight_data = np.ones(input_wavelength.size) if weights is None else weights
1358
+ prepared_weights = np.sqrt(weight_data / np.sum(weight_data))
1359
+ residuals = fit_result.minimizer_result.residual / prepared_weights
1360
+ residuals[~np.isfinite(residuals)] = 0.0
1361
+ best_fit_atlas = input_spectrum - residuals
1362
+ normalized_residuals = residuals / input_spectrum
1363
+
1364
+ best_fit_header = fit_result.wavelength_parameters.to_header(axis_num=1)
1365
+ wcs = WCS(best_fit_header)
1366
+ best_fit_wavelength = wcs.spectral.pixel_to_world(np.arange(input_spectrum.size))
1367
+
1368
+ data = {
1369
+ "input_wavelength_nm": input_wavelength.to_value(u.nm).tolist(),
1370
+ "input_spectrum": input_spectrum.tolist(),
1371
+ "best_fit_wavelength_nm": best_fit_wavelength.to_value(u.nm).tolist(),
1372
+ "best_fit_atlas": best_fit_atlas.tolist(),
1373
+ "normalized_residuals": normalized_residuals.tolist(),
1374
+ "weights": None if weights is None else weight_data.tolist(),
1375
+ }
1376
+
1377
+ self._record_values(values=data, tags=[Tag.quality(MetricCode.wavecal_fit)])
1378
+
1379
+ def quality_build_wavecal_results(self) -> dict:
1380
+ """Build a ReportMetric containing a multi-pane plot showing the fit spectra and residuals."""
1381
+ data = next(self.read(tags=[Tag.quality(MetricCode.wavecal_fit)], decoder=json_decoder))
1382
+
1383
+ input_wave_list = data["input_wavelength_nm"]
1384
+ input_spectrum_list = data["input_spectrum"]
1385
+ best_fit_wave_list = data["best_fit_wavelength_nm"]
1386
+ best_fit_atlas_list = data["best_fit_atlas"]
1387
+ residuals_list = data["normalized_residuals"]
1388
+ weights = data["weights"]
1389
+
1390
+ fit_series = {
1391
+ "Best Fit Observations": [best_fit_wave_list, input_spectrum_list],
1392
+ "Input Spectrum": [input_wave_list, input_spectrum_list],
1393
+ "Best Fit Atlas": [best_fit_wave_list, best_fit_atlas_list],
1394
+ }
1395
+ fit_plot_kwargs = {
1396
+ "Best Fit Observations": {"ls": "-", "lw": 4, "alpha": 0.8, "ms": 0},
1397
+ "Input Spectrum": {"ls": "-", "alpha": 0.4, "ms": 0},
1398
+ "Best Fit Atlas": {"color": "k", "ls": "-", "ms": 0},
1399
+ }
1400
+
1401
+ fit_plot = Plot2D(
1402
+ xlabel="Wavelength [nm]",
1403
+ ylabel="Signal",
1404
+ series_data=fit_series,
1405
+ plot_kwargs=fit_plot_kwargs,
1406
+ sort_series=False,
1407
+ )
1408
+
1409
+ residuals_series = {"Residuals": [best_fit_wave_list, residuals_list]}
1410
+ residuals_plot_kwargs = {"Residuals": {"ls": "-", "color": "k", "ms": 0}}
1411
+
1412
+ y_min = np.nanpercentile(residuals_list, 2)
1413
+ y_max = np.nanpercentile(residuals_list, 98)
1414
+ y_range = y_max - y_min
1415
+ y_min -= 0.1 * y_range
1416
+ y_max += 0.1 * y_range
1417
+ residuals_plot = Plot2D(
1418
+ xlabel="Wavelength [nm]",
1419
+ ylabel=r"$\frac{\mathrm{Obs - Atlas}}{\mathrm{Obs}}$",
1420
+ series_data=residuals_series,
1421
+ plot_kwargs=residuals_plot_kwargs,
1422
+ ylim=(y_min, y_max),
1423
+ )
1424
+
1425
+ plot_list = [fit_plot, residuals_plot]
1426
+ height_ratios = [1.5, 1.0]
1427
+ if weights is not None:
1428
+ weight_series = {"Weights": [best_fit_wave_list, weights]}
1429
+ weight_plot_kwargs = {"Weights": {"ls": "-", "color": "k", "ms": 0}}
1430
+ weight_plot = Plot2D(
1431
+ xlabel="Wavelength [nm]",
1432
+ ylabel="Fit Weights",
1433
+ series_data=weight_series,
1434
+ plot_kwargs=weight_plot_kwargs,
1435
+ )
1436
+ plot_list.append(weight_plot)
1437
+ height_ratios.append(1.0)
1438
+
1439
+ full_plot = VerticalMultiPanePlot2D(
1440
+ top_to_bottom_plot_list=plot_list,
1441
+ match_x_axes=True,
1442
+ no_gap=True,
1443
+ top_to_bottom_height_ratios=height_ratios,
1444
+ )
1445
+
1446
+ metric = ReportMetric(
1447
+ name="Wavelength Calibration Results",
1448
+ description="These plots show the wavelength solution computed based on fits to a Solar FTS atlas. "
1449
+ "The top plot shows the input and best-fit spectra along with the best-fit atlas, which is "
1450
+ "a combination of Solar and Telluric spectra. The bottom plot shows the fit residuals.",
1451
+ metric_code=MetricCode.wavecal_fit,
1452
+ vertical_multi_pane_plot_data=full_plot,
1453
+ )
1454
+
1455
+ return metric.model_dump()
@@ -20,7 +20,6 @@ that makes the rest of the pipeline easy to write.
20
20
  In other words, we can find exactly the frame we need (tags) and, once we have it, we never need to look
21
21
  at a different frame to get information (constants).
22
22
  """
23
- import json
24
23
  import logging
25
24
  from abc import ABC
26
25
  from abc import abstractmethod
@@ -70,8 +69,10 @@ def default_constant_bud_factory() -> list[S]:
70
69
  MaximumCadenceBud(),
71
70
  MinimumCadenceBud(),
72
71
  VarianceCadenceBud(),
73
- TaskExposureTimesBud(stem_name=BudName.dark_exposure_times.value, ip_task_type="dark"),
74
- TaskReadoutExpTimesBud(stem_name=BudName.dark_readout_exp_times.value, ip_task_type="dark"),
72
+ TaskExposureTimesBud(stem_name=BudName.dark_exposure_times.value, ip_task_types="dark"),
73
+ TaskReadoutExpTimesBud(
74
+ stem_name=BudName.dark_readout_exp_times.value, ip_task_types="dark"
75
+ ),
75
76
  ]
76
77
 
77
78
 
@@ -11,7 +11,6 @@ from typing import Literal
11
11
 
12
12
  import astropy.units as u
13
13
  import numpy as np
14
- from astropy.coordinates import EarthLocation
15
14
  from astropy.io import fits
16
15
  from astropy.time import Time
17
16
  from dkist_fits_specifications import __version__ as spec_version
@@ -29,6 +28,7 @@ from sunpy.coordinates import Helioprojective
29
28
 
30
29
  from dkist_processing_common.codecs.fits import fits_access_decoder
31
30
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
31
+ from dkist_processing_common.models.dkist_location import location_of_dkist
32
32
  from dkist_processing_common.models.fried_parameter import r0_valid
33
33
  from dkist_processing_common.models.tags import Tag
34
34
  from dkist_processing_common.models.wavelength import WavelengthRange
@@ -251,45 +251,17 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
251
251
  For spectrographs, this is the wavelengths covered by the spectral axis of the data.
252
252
  """
253
253
 
254
- @property
255
- def location_of_dkist(self) -> EarthLocation:
256
- """Return hard-coded EarthLocation of the DKIST.
257
-
258
- Cartesian geocentric coordinates of DKIST on Earth as retrieved from
259
- https://github.com/astropy/astropy-data/blob/gh-pages/coordinates/sites.json#L838
260
- """
261
- _dkist_site_info = {
262
- "aliases": ["DKIST", "ATST"],
263
- "name": "Daniel K. Inouye Solar Telescope",
264
- "elevation": 3067,
265
- "elevation_unit": "meter",
266
- "latitude": 20.7067,
267
- "latitude_unit": "degree",
268
- "longitude": 203.7436,
269
- "longitude_unit": "degree",
270
- "timezone": "US/Hawaii",
271
- "source": "DKIST website: https://www.nso.edu/telescopes/dki-solar-telescope/",
272
- }
273
- location_of_dkist = EarthLocation.from_geodetic(
274
- _dkist_site_info["longitude"] * u.Unit(_dkist_site_info["longitude_unit"]),
275
- _dkist_site_info["latitude"] * u.Unit(_dkist_site_info["latitude_unit"]),
276
- _dkist_site_info["elevation"] * u.Unit(_dkist_site_info["elevation_unit"]),
277
- )
278
-
279
- return location_of_dkist
280
-
281
254
  def add_solarnet_headers(self, header: fits.Header) -> fits.Header:
282
255
  """Add headers recommended by solarnet that haven't already been added."""
283
256
  header["DATE-AVG"] = self.calculate_date_avg(header=header)
284
257
  header["TELAPSE"] = self.calculate_telapse(header=header)
285
258
  header["DATEREF"] = header["DATE-BEG"]
286
- dkist_loc = self.location_of_dkist
287
- header["OBSGEO-X"] = dkist_loc.x.to_value(unit=u.m)
288
- header["OBSGEO-Y"] = dkist_loc.y.to_value(unit=u.m)
289
- header["OBSGEO-Z"] = dkist_loc.z.to_value(unit=u.m)
259
+ header["OBSGEO-X"] = location_of_dkist.x.to_value(unit=u.m)
260
+ header["OBSGEO-Y"] = location_of_dkist.y.to_value(unit=u.m)
261
+ header["OBSGEO-Z"] = location_of_dkist.z.to_value(unit=u.m)
290
262
  obstime = Time(header["DATE-AVG"])
291
263
  header["OBS_VR"] = (
292
- dkist_loc.get_gcrs(obstime=obstime)
264
+ location_of_dkist.get_gcrs(obstime=obstime)
293
265
  .transform_to(HeliocentricInertial(obstime=obstime))
294
266
  .d_distance.to_value(unit=u.m / u.s)
295
267
  ) # relative velocity of observer with respect to the sun in m/s
@@ -514,7 +486,7 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
514
486
  as seen by an observer located at the DKIST site at the given time of observation.
515
487
  """
516
488
  dummy_theta_coord = 0 * u.arcsec
517
- dkist_at_obstime = self.location_of_dkist.get_itrs(obstime=obstime)
489
+ dkist_at_obstime = location_of_dkist.get_itrs(obstime=obstime)
518
490
  sun_coordinate = Helioprojective(
519
491
  Tx=dummy_theta_coord, Ty=dummy_theta_coord, observer=dkist_at_obstime
520
492
  )
@@ -535,7 +507,6 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
535
507
  If the spectral line rest wavelength in air does not fall in the wavelength range of the data,
536
508
  do not populate the keyword.
537
509
  """
538
- print(wavelength_range)
539
510
  closest_line = get_closest_spectral_line(wavelength=wavelength)
540
511
  rest_wavelength = closest_line.rest_wavelength_in_air
541
512
  if rest_wavelength < wavelength_range.min or rest_wavelength > wavelength_range.max: