dkist-header-validator 5.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,114 @@
1
+ import pytest
2
+ from astropy.io import fits
3
+
4
+ from dkist_header_validator.translator import remove_spec_122_keys_and_spec_214_l0_keys
5
+ from dkist_header_validator.translator import sanitize_to_spec214_level1
6
+ from dkist_header_validator.translator import translate_spec122_to_spec214_l0
7
+
8
+
9
+ def test_spec122_to_214_l0_valid(valid_translator_object):
10
+ """
11
+ Strips headers down to a 214 L1 object
12
+ Given: A valid SPEC-0122 object
13
+ When: Sanitizing headers
14
+ Then:
15
+ """
16
+ translate_spec122_to_spec214_l0(valid_translator_object)
17
+
18
+
19
+ def test_spec122_to_214_l0_missing_required_keys(invalid_spec_122_object):
20
+ """
21
+ Translates an invalid SPEC-0122 object missing required
22
+ keys to a SPEC-214 l0 object
23
+ Given: A valid SPEC-0122 object
24
+ When: Translating headers
25
+ Then: Raises a KeyError exception
26
+ """
27
+ with pytest.raises(KeyError):
28
+ translate_spec122_to_spec214_l0(invalid_spec_122_object)
29
+
30
+
31
+ @pytest.mark.parametrize("instrument", ["vbi"])
32
+ def test_spec122_to_214_l0_missing_required_instrument_keys(
33
+ invalid_instrument_table_spec_122_object,
34
+ ):
35
+ """
36
+ Given: A SPEC-0122 object missing a required instrument key
37
+ When: Translating headers
38
+ Then: Raises a KeyError exception
39
+ """
40
+ invalid_obj, _ = invalid_instrument_table_spec_122_object
41
+ with pytest.raises(KeyError):
42
+ translate_spec122_to_spec214_l0(invalid_obj)
43
+
44
+
45
+ def test_translate_to_214_l0_required_only_headers(valid_translator_object_required_only):
46
+ """
47
+ Translates a spec122 compliant header with only the keywords required by the DC
48
+ Given: A spec122 compliant header with only required keywords
49
+ When: Translating headers
50
+ Then: For a fits file input, return a HDUList and do not raise an exception
51
+ For a dict, HDUList, or header input, return a dictionary and do not raise an exception
52
+ """
53
+ translate_spec122_to_spec214_l0(valid_translator_object_required_only)
54
+
55
+
56
+ def test_translate_to_214_l0_expected_only_headers(valid_translator_object_expected_only):
57
+ """
58
+ Translates a spec122 compliant header with only the keywords required by the DC
59
+ Given: A spec122 compliant header with only required keywords
60
+ When: Translating headers
61
+ Then: For a fits file input, return a HDUList and do not raise an exception
62
+ For a dict, HDUList, or header input, return a dictionary and do not raise an exception
63
+ """
64
+ translate_spec122_to_spec214_l0(valid_translator_object_expected_only)
65
+
66
+
67
+ # I removed all of the compressed tests because a file (compressed or not) cannot go directly into the translator.
68
+ # It has to go into the validator first, which will hand it to the translator in the right format
69
+
70
+
71
+ def test_sanitize_s122(valid_translator_object):
72
+ """
73
+ Tries to sanitize a Spec-0122 object
74
+ Given: A valid SPEC-0122 object
75
+ When: Stripping down headers to 214 L1 (no 122 headers)
76
+ Then: Returns a header
77
+ """
78
+ sanitize_to_spec214_level1(valid_translator_object)
79
+
80
+
81
+ def test_sanitize_translated_s214l0(valid_translator_object):
82
+ """
83
+ Tries to sanitize a Spec-214 L0 object
84
+ Given: A valid SPEC-0122 object
85
+ When: Translating and stripping down headers to 214 L1 (no 122 headers)
86
+ Then: Returns a header
87
+ """
88
+ s214_l0_header = translate_spec122_to_spec214_l0(valid_translator_object)
89
+ isinstance(sanitize_to_spec214_level1(s214_l0_header), fits.Header)
90
+
91
+
92
+ def test_sanitize(valid_spec_214_no_file):
93
+ """
94
+ Sanitizes a SPEC-214 L1 object
95
+ Given: A valid SPEC-214 L1 object
96
+ When: Stripping down headers to 214 L1 (no 122 headers)
97
+ Then: For a header or a dict, return a fits header and do not raise an exception
98
+ For an HDUList or a file, return an HDUList and do not raise an exception
99
+ """
100
+ isinstance(sanitize_to_spec214_level1(valid_spec_214_no_file), fits.Header)
101
+
102
+
103
+ def test_remove_spec_122_keys():
104
+ """
105
+ Given: A fits header containing 'level0_only' keys
106
+ When: Removing the 'level0_only' headers
107
+ Then: Those headers are no longer present
108
+ """
109
+ header = fits.header.Header()
110
+ header["VALID"] = "valid"
111
+ header["IPTASK"] = "should be removed"
112
+ processed_header = remove_spec_122_keys_and_spec_214_l0_keys(header)
113
+ assert "VALID" in processed_header.keys()
114
+ assert "IPTASK" not in processed_header.keys()
@@ -0,0 +1,251 @@
1
+ import logging
2
+ from datetime import datetime
3
+ from functools import reduce
4
+ from typing import Any
5
+ from typing import IO
6
+
7
+ from astropy.io import fits
8
+ from astropy.io.fits.hdu.hdulist import HDUList
9
+ from dkist_fits_specifications.spec122 import load_spec122
10
+ from dkist_fits_specifications.spec214 import get_214_l0_only_keys
11
+ from dkist_fits_specifications.spec214 import level0
12
+ from dkist_fits_specifications.spec214 import load_processed_spec214
13
+ from dkist_fits_specifications.spec214 import load_spec214
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+ __all__ = [
18
+ "translate_spec122_to_spec214_l0",
19
+ "sanitize_to_spec214_level1",
20
+ "remove_extra_axis_keys",
21
+ "remove_spec_122_keys",
22
+ ]
23
+
24
+ type_map = {"int": int, "float": float, "str": str, "bool": bool}
25
+
26
+
27
+ def translate_spec122_to_spec214_l0(
28
+ spec122_input: HDUList | dict | fits.header.Header | str | IO | list,
29
+ ) -> dict | HDUList:
30
+ """
31
+ Convert spec 122 headers to spec 214 l0 headers
32
+
33
+ Parameters
34
+ ----------
35
+ spec122_input
36
+ Spec 122 headers or headers + data to convert
37
+ Returns
38
+ -------
39
+ spec214 l0 headers and (possibly) data
40
+ """
41
+ # extract headers and data
42
+ input_headers, input_data = _parse_fits_like_input(spec122_input)
43
+ # convert headers
44
+ output_headers = _add_214_l0_headers(input_headers)
45
+ # update DATE keyword
46
+ output_headers["DATE"] = datetime.now().isoformat()
47
+ if input_data is not None: # return hdu list if the input had data
48
+ return _format_output_hdu(output_headers, input_data)
49
+ return output_headers # return headers if only headers were given
50
+
51
+
52
+ def sanitize_to_spec214_level1(
53
+ input_headers: HDUList | dict | fits.header.Header | str | IO | list,
54
+ ) -> dict | HDUList:
55
+ """
56
+ Remove all non-214 compliant header values
57
+
58
+ Parameters
59
+ ----------
60
+ input_headers
61
+ Spec 214 headers or headers + data to convert
62
+ Returns
63
+ -------
64
+ spec214 l1 headers and (possibly) data
65
+ """
66
+ # extract headers and data
67
+ input_headers, input_data = _parse_fits_like_input(input_headers)
68
+ header = fits.Header(input_headers)
69
+ # convert headers
70
+ expanded_214 = load_processed_spec214(**dict(input_headers))
71
+ all_214_keys = reduce(list.__add__, map(list, expanded_214.values()))
72
+
73
+ for keyword in tuple(header.keys()):
74
+ if keyword not in all_214_keys:
75
+ header.remove(keyword)
76
+
77
+ if input_data is not None: # return hdu list if the input had data
78
+ return _format_output_hdu(header, input_data)
79
+ return header # return headers if only headers were given
80
+
81
+
82
+ def remove_spec_122_keys_and_spec_214_l0_keys(
83
+ input_headers: HDUList | dict | fits.header.Header | str | IO | list,
84
+ ) -> dict | HDUList:
85
+ """
86
+ Remove all spec 122 keys that are not in spec 214, along with all keys marked as spec 214 l0.
87
+
88
+ Parameters
89
+ ----------
90
+ input_headers
91
+ Spec 214 headers or headers + data to convert
92
+ Returns
93
+ -------
94
+ spec214 l1 headers and (possibly) data
95
+ """
96
+ # extract headers and data
97
+ input_headers, input_data = _parse_fits_like_input(input_headers)
98
+ header = input_headers
99
+ # Get "level0_only" keys and remove them from the header
100
+ for keyword in get_214_l0_only_keys():
101
+ header.remove(keyword=keyword, ignore_missing=True)
102
+ # Get header schemas
103
+ expanded_122 = load_spec122()
104
+ all_122_keys = reduce(list.__add__, map(list, expanded_122.values()))
105
+ expanded_214 = load_processed_spec214(**dict(input_headers))
106
+ all_214_keys = reduce(list.__add__, map(list, expanded_214.values()))
107
+
108
+ for keyword in header.keys():
109
+ # Only remove the keys that are in spec 122 AND not in spec 214
110
+ if (keyword in all_122_keys) and (keyword not in all_214_keys):
111
+ header.remove(keyword)
112
+
113
+ if input_data is not None: # return hdu list if the input had data
114
+ return _format_output_hdu(header, input_data)
115
+ return header # return headers if only headers were given
116
+
117
+
118
+ def remove_extra_axis_keys(
119
+ input_headers: HDUList | dict | fits.header.Header | str | IO | list,
120
+ ) -> dict | HDUList:
121
+ """
122
+ Remove all keywords that refer to axes that don't exist in the data array
123
+
124
+ Parameters
125
+ ----------
126
+ input_headers
127
+ Spec 214 headers or headers + data
128
+ Returns
129
+ -------
130
+ Stripped headers and (possibly) data
131
+ """
132
+ # extract headers and data
133
+ input_headers, input_data = _parse_fits_like_input(input_headers)
134
+ header = fits.Header(input_headers)
135
+
136
+ # Get a list of all unexpanded keywords that have "n" in them
137
+ all_keys = reduce(list.__add__, map(list, load_spec214().values()))
138
+ n_keys = [i for i in all_keys if "n" in i]
139
+
140
+ # Remove all keywords where "n" is substituted by a larger number than NAXIS
141
+ for n_key in n_keys:
142
+ for i in range(header["NAXIS"] + 1, 6):
143
+ expanded_key = n_key.replace("n", str(i))
144
+ header.pop(expanded_key, None)
145
+
146
+ if input_data is not None: # return hdu list if the input had data
147
+ return _format_output_hdu(header, input_data)
148
+ return header # return headers if only headers were given
149
+
150
+
151
+ def _parse_fits_like_input(
152
+ spec122_input: HDUList | dict | fits.header.Header | str | IO | list,
153
+ ) -> tuple[fits.Header, bytes | None]:
154
+ """
155
+ Parse out a header and optional data from the various types of input
156
+ """
157
+ if isinstance(spec122_input, dict):
158
+ return fits.Header(spec122_input), None
159
+ if isinstance(spec122_input, fits.header.Header):
160
+ return spec122_input, None
161
+ if isinstance(spec122_input, HDUList):
162
+ try:
163
+ return spec122_input[1].header, spec122_input[1].data
164
+ except IndexError: # non-compressed
165
+ return spec122_input[0].header, spec122_input[0].data
166
+
167
+ # If headers are of any other type, see if it is a file and try to open that
168
+ try: # compressed
169
+ with fits.open(spec122_input) as hdus:
170
+ return hdus[1].header, hdus[1].data
171
+ except IndexError: # non-compressed
172
+ with fits.open(spec122_input) as hdus:
173
+ return hdus[0].header, hdus[0].data
174
+
175
+
176
+ def _format_output_hdu(hdr, data) -> HDUList:
177
+ new_hdu = fits.PrimaryHDU(data)
178
+ hdu_list = fits.HDUList([new_hdu])
179
+ for key, value in hdr.items():
180
+ hdu_list[0].header[key] = value
181
+ return hdu_list
182
+
183
+
184
+ def _add_214_l0_headers(header: dict[str, Any] | fits.Header):
185
+ """
186
+ Translates 122 keywords to 214 l0 keywords and returns a dictionary
187
+ """
188
+ result = {} # output headers
189
+
190
+ # We need an extra layer of sanitation here because `load_level0_spec214` is a cached function
191
+ # and the weird header cards are not hashable
192
+ sanitized_header = {
193
+ k: v for k, v in header.items() if not isinstance(v, fits.header._HeaderCommentaryCards)
194
+ }
195
+
196
+ # Passing only INSTRUME=header["INSTRUME"] is a hack for speed. We know that right now the only schema processor
197
+ # that gets applied to 122 or 214 level0 schema (this happens in `load_level0_spec214`) is the instrument requiredness
198
+ # processor. Thus, we only need to pass a header consisting of `{INSTRUME: sanitized_header["INSTRUME"]}`.
199
+ # This results in a massive speed increase during translation because `load_level0_spec214` is cached. If we pass
200
+ # the whole header then each file will result in a new call to this function, but if we only pass the instrument
201
+ # then we only make new calls to `load_level0_spec214` at most 5 times (for each DKIST instrumet).
202
+ spec214_l0_schema_sections = level0.load_level0_spec214(
203
+ INSTRUME=sanitized_header.get("INSTRUME", "None")
204
+ ).values()
205
+ flat_spec214_l0_schema = {
206
+ key: schema
207
+ for definition in spec214_l0_schema_sections
208
+ for (key, schema) in definition.items()
209
+ }
210
+
211
+ # translate 122 -> 214 headers
212
+ for key, key_schema in flat_spec214_l0_schema.items():
213
+ result.update(_translate_key(key, key_schema, header))
214
+
215
+ # add remaining header values to result
216
+ hdr_keys_not_translated = {k: v for k, v in header.items() if k not in result}
217
+ result.update(hdr_keys_not_translated)
218
+ return result
219
+
220
+
221
+ def _translate_key(key, key_schema, hdr) -> dict:
222
+ default_values = {"str": "default", "int": -999, "float": -999.9, "bool": False}
223
+ key_is_copied = key_schema.get("copy")
224
+ copy_schema_only = key_schema.get("copy") == "schema"
225
+ key_is_renamed = key_schema.get("rename")
226
+ renamed_key_is_in_header = key_is_renamed and (key_is_renamed in hdr)
227
+ key_is_required = key_schema.get("required")
228
+ key_is_in_header = key in hdr
229
+
230
+ if copy_schema_only and key_is_in_header:
231
+ return {key: default_values[key_schema["type"]]}
232
+ if key_is_copied and key_is_in_header:
233
+ return {key: hdr[key]}
234
+ if key_is_copied and not key_is_in_header and renamed_key_is_in_header:
235
+ return {key: hdr[key_schema.get("rename")]}
236
+ if (
237
+ key_is_copied
238
+ and not key_is_in_header
239
+ and key_is_renamed
240
+ and not renamed_key_is_in_header
241
+ and key_is_required
242
+ ):
243
+ raise KeyError(f" Required keyword {key!r} not found.")
244
+ if key_is_copied and not key_is_in_header and not key_is_renamed and key_is_required:
245
+ raise KeyError(f" Required keyword {key!r} not found.")
246
+ if not key_is_copied and key_is_required and key_is_in_header:
247
+ return {key: hdr[key]}
248
+ if not key_is_copied and key_is_required and not key_is_in_header:
249
+ return {key: default_values[key_schema["type"]]}
250
+ # nothing to translate
251
+ return {}
File without changes
@@ -0,0 +1,18 @@
1
+ from dkist_fits_specifications.utils import schema_type_hint
2
+ from dkist_fits_specifications.utils.spec_processors.expansion import expand_schema
3
+ from dkist_fits_specifications.utils.spec_processors.expansion import ExpansionIndex
4
+
5
+
6
+ def expand_naxis(naxis: int, schema: schema_type_hint) -> schema_type_hint:
7
+ naxis_range = range(1, naxis + 1)
8
+ n_expansion = ExpansionIndex(index="n", size=1, values=naxis_range)
9
+ i_expansion = ExpansionIndex(index="i", size=1, values=naxis_range)
10
+ j_expansion = ExpansionIndex(index="j", size=1, values=naxis_range)
11
+ pp_expansion = ExpansionIndex(index="pp", size=2, values=[1, 10, 25, 75, 90, 95, 98, 99])
12
+ expansions = [n_expansion, i_expansion, j_expansion, pp_expansion]
13
+ return expand_schema(schema=schema, expansions=expansions)
14
+
15
+
16
+ def expand_index_d(schema: schema_type_hint, *, DNAXIS: int, **kwargs) -> schema_type_hint:
17
+ d_expansion = ExpansionIndex(index="d", size=1, values=range(1, DNAXIS + 1))
18
+ return expand_schema(schema=schema, expansions=[d_expansion])
@@ -0,0 +1,8 @@
1
+ # Note that we need to fall back to the hard-coded version if either
2
+ # setuptools_scm can't be imported or setuptools_scm can't determine the
3
+ # version, so we catch the generic 'Exception'.
4
+ try:
5
+ from setuptools_scm import get_version
6
+ __version__ = get_version(root='..', relative_to=__file__)
7
+ except Exception:
8
+ __version__ = '5.2.0'
@@ -0,0 +1,151 @@
1
+ Metadata-Version: 2.4
2
+ Name: dkist-header-validator
3
+ Version: 5.2.0
4
+ Summary: DKIST data validator
5
+ Home-page: https://bitbucket.org/dkistdc/dkist-header-validator/src/main/
6
+ Author: NSO / AURA
7
+ Author-email: "aderks@nso.edu"
8
+ License: MIT
9
+ Classifier: Programming Language :: Python
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Requires-Python: >=3.10
14
+ Requires-Dist: astropy>=5.0
15
+ Requires-Dist: voluptuous<1.0.0,>=0.11.7
16
+ Requires-Dist: pyyaml>=6.0
17
+ Requires-Dist: dkist-fits-specifications>=4.1.0
18
+ Provides-Extra: test
19
+ Requires-Dist: pytest; extra == "test"
20
+ Requires-Dist: pytest-cov; extra == "test"
21
+ Requires-Dist: pytest-xdist; extra == "test"
22
+ Requires-Dist: deepdiff; extra == "test"
23
+ Requires-Dist: tox; extra == "test"
24
+ Requires-Dist: dkist-data-simulator; extra == "test"
25
+ Provides-Extra: cli
26
+ Requires-Dist: typer; extra == "cli"
27
+ Provides-Extra: docs
28
+ Requires-Dist: sphinx; extra == "docs"
29
+ Requires-Dist: sphinx-astropy; extra == "docs"
30
+ Requires-Dist: sphinx-changelog; extra == "docs"
31
+ Requires-Dist: sphinx-autoapi; extra == "docs"
32
+ Requires-Dist: pytest; extra == "docs"
33
+ Requires-Dist: towncrier; extra == "docs"
34
+ Requires-Dist: dkist-sphinx-theme; extra == "docs"
35
+
36
+ DKIST Data Validator
37
+ ====================
38
+
39
+ |codecov|
40
+
41
+ An interface containing a validator, and a spec translator for DKIST specs (specifically corresponding to SPEC122 RevE and SPEC214 RevA
42
+ data):
43
+
44
+ - SPEC-0122: Data received from the summit
45
+
46
+ - SPEC-0214 l0: Data ingested and parsed by the Data Center
47
+
48
+ - SPEC-0214: Data published by the Data Center (incomplete)
49
+
50
+ The validator references a machine readable version of the DKIST Header Specifications which are defined `here <https://pypi.org/project/dkist-fits-specifications/>`__, in the dkist-fits-specifications. Unless otherwise specified, the latest version of dkist-fits-specifications package is installed along with the validator. Data can be validated against past revisions of the DKIST Header Specifications if a previous version of the dkist-fits-specifications package is installed. If you are receiving validation errors, please reference the version of the DKIST Header Specifications identified in the dkist-fits-specifications package to make sure that your headers are expected to be compliant with that version.
51
+
52
+
53
+ Features
54
+ --------
55
+
56
+ - Uses `voluptuous <https://pypi.org/project/voluptuous/>`__ schemas to
57
+ validate a given input header against dkist specifications
58
+
59
+ - 3 keyword validations: type validation, required-ness validation, and value validation
60
+
61
+ - Failure exceptions include a dictionary of validation failure causes
62
+
63
+ - SPEC-0122 to SPEC-0214 l0 translation
64
+
65
+
66
+ Installation
67
+ ------------
68
+
69
+ .. code:: bash
70
+
71
+ pip install dkist-header-validator
72
+
73
+
74
+ Usage
75
+ --------
76
+ Currently, this package can be used to validate SPEC122 data or SPEC214 data. Please import the
77
+ corresponding methods (spec122_validator and Spec122ValidationException, or spec214_validator and Spec214ValidationException, or spec214_l0_validator and Spec214ValidationException).
78
+
79
+ This package can be used for validating data, or for validating and translating data (SPEC122 input only).
80
+
81
+ Input data can be one of:
82
+ - string file path
83
+ - File like object
84
+ - HDUList object
85
+ - fits.header.Header object
86
+ - Dictionary of header keys and values
87
+
88
+ To validate data:
89
+
90
+ .. code:: python
91
+
92
+ >>> from dkist_header_validator import spec122_validator, Spec122ValidationException
93
+
94
+ >>> spec122_validator.validate('dkist_rosa0181200000_observation.fits')
95
+
96
+ The cli can also be used to validate data:
97
+
98
+ .. code:: bash
99
+
100
+ >>> dkist-header-validator validate122 "path/to/file.fits"
101
+
102
+ >>> dkist-header-validator validate214 "path/to/file.fits"
103
+
104
+ To validate and translate data to spec214 l0:
105
+
106
+ .. code:: python
107
+
108
+ >>> from dkist_header_validator import spec122_validator, Spec122ValidationException
109
+
110
+ >>> spec122_validator.validate_and_translate_to_214_l0('dkist_rosa0181200000_observation.fits')
111
+
112
+
113
+ Within the validate and validate_and_translate methods, a series of flags can be set, otherwise they will take their default values:
114
+ - extra: Default value is true (allow extra keys). This flag determines if extra keywords are allowed in the schema to be validated. Ingest validation should allow extra keys.
115
+ - return_type: Default value is HDUList. This flag will determine the return type. Can be one of dict, Path, BytesIO, fits.header.Header, HDUList.
116
+
117
+
118
+ Examples
119
+ --------
120
+ 1. Validate a file:
121
+
122
+ .. code:: python
123
+
124
+ >>> from dkist_header_validator import spec122_validator, Spec122ValidationException
125
+ >>> spec122_validator.validate('dkist_rosa0181200000_observation.fits', return_type=dict)
126
+
127
+ >>> from pathlib import Path
128
+ >>> spec122_validator.validate('dkist_rosa0181200000_observation.fits', return_type=Path)
129
+
130
+ 2. Validate and translate a file:
131
+
132
+ .. code:: python
133
+
134
+ >>> from dkist_header_validator import spec122_validator, Spec122ValidationException
135
+ >>> spec122_validator.validate_and_translate_to_214_l0('dkist_rosa0181200000_observation.fits')
136
+
137
+
138
+ 3. Validate headers:
139
+
140
+ .. code:: python
141
+
142
+ >>> from dkist_header_validator import spec122_validator, Spec122ValidationException
143
+ >>> from astropy.io import fits
144
+ >>> hdus = fits.open('dkist_rosa0181200000_observation.fits')
145
+ >>> spec122_validator.validate(hdus[0].header, return_type=dict)
146
+
147
+
148
+ This project is Copyright (c) AURA/NSO.
149
+
150
+ .. |codecov| image:: https://codecov.io/bb/dkistdc/dkist-header-validator/graph/badge.svg?token=OZK0ZPO2JH
151
+ :target: https://codecov.io/bb/dkistdc/dkist-header-validator
@@ -0,0 +1,24 @@
1
+ dkist_header_validator/__init__.py,sha256=J4wfa3VQLsODIn0TDh_7KUwHcON8EofogFDTdctxFMw,421
2
+ dkist_header_validator/base_validator.py,sha256=0WvVQfjDyoONQLWMg3GBdkj04hOVxTrR777V-mpIk0Y,27990
3
+ dkist_header_validator/exceptions.py,sha256=8IA3xwntGjcJRyvzzgbD8Om0GxWtDY5kLnE3y8l_0Fw,1031
4
+ dkist_header_validator/spec_validators.py,sha256=OUIRJ5xurRhBnxZ-8JhmuVcIusvK63gKSwPfwfGjJZE,1369
5
+ dkist_header_validator/translator.py,sha256=gYjEQAF8IOB_pgWR9ZOlMlYUIl3P274gMaCI86qeqyw,9501
6
+ dkist_header_validator/version.py,sha256=lxYXDAy7pRI_OFyrqEbD-imjq1dV35QX6WSrKzzluqY,345
7
+ dkist_header_validator/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ dkist_header_validator/api/validate.py,sha256=iU-alEp9rkV9oXAFmH253VFWbLyE8l-1K87XuJym1Ek,2219
9
+ dkist_header_validator/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ dkist_header_validator/tests/conftest.py,sha256=7ipuZ21rGEix9G5VXbMw1JXMGcpU4EqAuHAk3MpzKf4,22362
11
+ dkist_header_validator/tests/test_base_validator.py,sha256=sSVeSLsjy6GnZK2vJ2O1BvZtnLsrOvOF6OVNVUGAARA,16213
12
+ dkist_header_validator/tests/test_spec122_translation.py,sha256=D3molsOeMx4Dt6f4YAQKBJ2eZhEa1OV_PhM2zh5LmJ8,8670
13
+ dkist_header_validator/tests/test_spec122_validation+.py,sha256=jxowHmQ6dwQt20r1vzgSsLgMXLlHGSg27AFv6fB8Bx0,4981
14
+ dkist_header_validator/tests/test_spec122_validation-.py,sha256=Seg58VRwuswUj3-9wHwjQf3qXZ1aoNAaWposiPR7ALo,4242
15
+ dkist_header_validator/tests/test_spec214_validation+.py,sha256=giuUlBcypMIptjOk0UdzeotzaImBRHld7SDSxH1lXxA,13832
16
+ dkist_header_validator/tests/test_spec214_validation-.py,sha256=lzRGDZTF2H4MEG4b8mglGhit7jGpBCLWtf1qv1R7oNM,7475
17
+ dkist_header_validator/tests/test_translator.py,sha256=6qiIxXJW1fq1UFHRljkwxEie6ya0QXEqtDz9PqJqbc4,4299
18
+ dkist_header_validator/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
+ dkist_header_validator/utils/expansions.py,sha256=cqT49UC0cqiP94jL7uZ5tnNNJAJ67BVNf8P82W4ipPA,1031
20
+ dkist_header_validator-5.2.0.dist-info/METADATA,sha256=K9bpLHdzBBrP6b03dSDXnn01Rydl6Z7Wgch43OZeKPo,5608
21
+ dkist_header_validator-5.2.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
22
+ dkist_header_validator-5.2.0.dist-info/entry_points.txt,sha256=H2575-kiHoXDKLmJxUHvzH9LK8qwG2ZW0TgH-67gU5Y,84
23
+ dkist_header_validator-5.2.0.dist-info/top_level.txt,sha256=Xg2Mfa3AKhLk6lld6tmbUCP7HhME2_gsTOs6DYC08fQ,23
24
+ dkist_header_validator-5.2.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (78.1.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ dkist-header-validator = dkist_header_validator.api.validate:main
@@ -0,0 +1 @@
1
+ dkist_header_validator