spectre-core 0.0.11__py3-none-any.whl → 0.0.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spectre_core/_file_io/file_handlers.py +12 -12
- spectre_core/batches/__init__.py +22 -0
- spectre_core/batches/_base.py +146 -0
- spectre_core/batches/_batches.py +197 -0
- spectre_core/batches/_factory.py +27 -0
- spectre_core/{chunks → batches}/_register.py +5 -5
- spectre_core/{chunks → batches}/library/_callisto.py +31 -33
- spectre_core/{chunks → batches}/library/_fixed_center_frequency.py +43 -38
- spectre_core/{chunks → batches}/library/_swept_center_frequency.py +22 -20
- spectre_core/capture_configs/_capture_templates.py +6 -6
- spectre_core/capture_configs/_parameters.py +3 -6
- spectre_core/capture_configs/_ptemplates.py +3 -3
- spectre_core/capture_configs/_pvalidators.py +4 -4
- spectre_core/config/__init__.py +2 -2
- spectre_core/config/_paths.py +5 -5
- spectre_core/config/_time_formats.py +5 -3
- spectre_core/exceptions.py +2 -2
- spectre_core/logging/_configure.py +1 -1
- spectre_core/logging/_log_handlers.py +1 -1
- spectre_core/plotting/_panels.py +1 -1
- spectre_core/post_processing/__init__.py +2 -2
- spectre_core/post_processing/_base.py +5 -5
- spectre_core/post_processing/_factory.py +3 -3
- spectre_core/post_processing/_post_processor.py +5 -5
- spectre_core/post_processing/library/_fixed_center_frequency.py +24 -25
- spectre_core/post_processing/library/_swept_center_frequency.py +68 -83
- spectre_core/receivers/gr/_base.py +1 -1
- spectre_core/receivers/gr/_rsp1a.py +3 -3
- spectre_core/receivers/gr/_rspduo.py +4 -4
- spectre_core/receivers/gr/_test.py +3 -3
- spectre_core/receivers/library/_test.py +3 -3
- spectre_core/spectrograms/_analytical.py +0 -6
- spectre_core/spectrograms/_spectrogram.py +113 -79
- spectre_core/spectrograms/_transform.py +19 -36
- spectre_core/wgetting/_callisto.py +20 -24
- {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/METADATA +1 -1
- spectre_core-0.0.12.dist-info/RECORD +64 -0
- spectre_core/chunks/__init__.py +0 -22
- spectre_core/chunks/_base.py +0 -116
- spectre_core/chunks/_chunks.py +0 -200
- spectre_core/chunks/_factory.py +0 -25
- spectre_core-0.0.11.dist-info/RECORD +0 -64
- {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/LICENSE +0 -0
- {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/WHEEL +0 -0
- {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/top_level.txt +0 -0
@@ -11,24 +11,29 @@ from astropy.io.fits.hdu.image import PrimaryHDU
|
|
11
11
|
from astropy.io.fits.hdu.table import BinTableHDU
|
12
12
|
from astropy.io.fits.hdu.hdulist import HDUList
|
13
13
|
|
14
|
+
from spectre_core.config import TimeFormats
|
14
15
|
from spectre_core.spectrograms import Spectrogram
|
15
16
|
from spectre_core.capture_configs import CaptureModes
|
16
|
-
from .._register import
|
17
|
-
from .._base import
|
17
|
+
from .._register import register_batch
|
18
|
+
from .._base import BaseBatch, BatchFile
|
18
19
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
self.add_file(
|
20
|
+
|
21
|
+
@register_batch(CaptureModes.FIXED_CENTER_FREQUENCY)
|
22
|
+
class _Batch(BaseBatch):
|
23
|
+
def __init__(self,
|
24
|
+
start_time: str,
|
25
|
+
tag: str):
|
26
|
+
super().__init__(start_time, tag)
|
27
|
+
self.add_file( BinFile(self.parent_dir_path, self.name) )
|
28
|
+
self.add_file( HdrFile(self.parent_dir_path, self.name) )
|
29
|
+
self.add_file( FitsFile(self.parent_dir_path, self.name))
|
27
30
|
|
28
31
|
|
29
|
-
class
|
30
|
-
def __init__(self,
|
31
|
-
|
32
|
+
class BinFile(BatchFile):
|
33
|
+
def __init__(self,
|
34
|
+
parent_dir_path: str,
|
35
|
+
base_file_name: str):
|
36
|
+
super().__init__(parent_dir_path, base_file_name, "bin")
|
32
37
|
|
33
38
|
def read(self) -> np.ndarray:
|
34
39
|
with open(self.file_path, "rb") as fh:
|
@@ -36,9 +41,11 @@ class BinChunk(ChunkFile):
|
|
36
41
|
|
37
42
|
|
38
43
|
|
39
|
-
class
|
40
|
-
def __init__(self,
|
41
|
-
|
44
|
+
class HdrFile(BatchFile):
|
45
|
+
def __init__(self,
|
46
|
+
parent_dir_path: str,
|
47
|
+
base_file_name: str):
|
48
|
+
super().__init__(parent_dir_path, base_file_name, "hdr")
|
42
49
|
|
43
50
|
|
44
51
|
def read(self) -> int:
|
@@ -63,11 +70,11 @@ class HdrChunk(ChunkFile):
|
|
63
70
|
return int(millisecond_correction_as_float)
|
64
71
|
|
65
72
|
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
super().__init__(
|
73
|
+
class FitsFile(BatchFile):
|
74
|
+
def __init__(self,
|
75
|
+
parent_dir_path: str,
|
76
|
+
base_file_name: str):
|
77
|
+
super().__init__(parent_dir_path, base_file_name, "fits")
|
71
78
|
|
72
79
|
|
73
80
|
@property
|
@@ -75,25 +82,24 @@ class FitsChunk(ChunkFile):
|
|
75
82
|
with fits.open(self.file_path, mode='readonly') as hdulist:
|
76
83
|
bintable_data = hdulist[1].data
|
77
84
|
times = bintable_data['TIME'][0]
|
78
|
-
return [self.
|
85
|
+
return [self.start_datetime + timedelta(seconds=t) for t in times]
|
79
86
|
|
80
87
|
|
81
88
|
def read(self) -> Spectrogram:
|
82
89
|
with fits.open(self.file_path, mode='readonly') as hdulist:
|
83
|
-
primary_hdu
|
84
|
-
dynamic_spectra
|
85
|
-
spectrum_type
|
86
|
-
|
87
|
-
bintable_hdu
|
88
|
-
times, frequencies
|
90
|
+
primary_hdu = self._get_primary_hdu(hdulist)
|
91
|
+
dynamic_spectra = self._get_dynamic_spectra(primary_hdu)
|
92
|
+
spectrum_type = self._get_spectrum_type(primary_hdu)
|
93
|
+
spectrogram_start_datetime = self._get_spectrogram_start_datetime(primary_hdu)
|
94
|
+
bintable_hdu = self._get_bintable_hdu(hdulist)
|
95
|
+
times, frequencies = self._get_time_and_frequency(bintable_hdu)
|
89
96
|
|
90
97
|
return Spectrogram(dynamic_spectra,
|
91
98
|
times,
|
92
99
|
frequencies,
|
93
|
-
self.tag,
|
94
|
-
|
95
|
-
|
96
|
-
spectrum_type = spectrum_type)
|
100
|
+
self.tag,
|
101
|
+
spectrogram_start_datetime,
|
102
|
+
spectrum_type)
|
97
103
|
|
98
104
|
|
99
105
|
def _get_primary_hdu(self, hdulist: HDUList) -> PrimaryHDU:
|
@@ -105,14 +111,13 @@ class FitsChunk(ChunkFile):
|
|
105
111
|
|
106
112
|
|
107
113
|
def _get_spectrum_type(self, primary_hdu: PrimaryHDU) -> str:
|
108
|
-
return primary_hdu.header
|
114
|
+
return primary_hdu.header['BUNIT']
|
109
115
|
|
110
116
|
|
111
|
-
def
|
112
|
-
date_obs = primary_hdu.header
|
113
|
-
time_obs = primary_hdu.header
|
114
|
-
|
115
|
-
return datetime_obs.microsecond
|
117
|
+
def _get_spectrogram_start_datetime(self, primary_hdu: PrimaryHDU) -> datetime:
|
118
|
+
date_obs = primary_hdu.header['DATE-OBS']
|
119
|
+
time_obs = primary_hdu.header['TIME-OBS']
|
120
|
+
return datetime.strptime(f"{date_obs}T{time_obs}", TimeFormats.PRECISE_DATETIME)
|
116
121
|
|
117
122
|
|
118
123
|
def _get_bintable_hdu(self, hdulist: HDUList) -> BinTableHDU:
|
@@ -9,14 +9,14 @@ import numpy as np
|
|
9
9
|
|
10
10
|
from spectre_core.exceptions import InvalidSweepMetadataError
|
11
11
|
from spectre_core.capture_configs import CaptureModes
|
12
|
-
from ._fixed_center_frequency import
|
13
|
-
from .._register import
|
14
|
-
from .._base import
|
12
|
+
from ._fixed_center_frequency import BinFile, FitsFile
|
13
|
+
from .._register import register_batch
|
14
|
+
from .._base import BaseBatch, BatchFile
|
15
15
|
|
16
16
|
|
17
17
|
@dataclass
|
18
18
|
class SweepMetadata:
|
19
|
-
"""Wrapper for metadata required to assign center frequencies to each IQ sample in the
|
19
|
+
"""Wrapper for metadata required to assign center frequencies to each IQ sample in the batch.
|
20
20
|
|
21
21
|
center_frequencies is an ordered list containing all the center frequencies that the IQ samples
|
22
22
|
were collected at. Typically, these will be ordered in "steps", where each step corresponds to
|
@@ -25,10 +25,8 @@ class SweepMetadata:
|
|
25
25
|
(freq_0, freq_1, ..., freq_M, freq_0, freq_1, ..., freq_M, ...), freq_0 < freq_1 < ... < freq_M
|
26
26
|
|
27
27
|
The n'th element of the num_samples list, tells us how many samples were collected at the n'th
|
28
|
-
element of center_frequencies
|
29
|
-
|
30
|
-
BinChunk, FitsChunk
|
31
|
-
)
|
28
|
+
element of center_frequencies.
|
29
|
+
|
32
30
|
Number of samples: (num_samples_at_freq_0, num_samples_at_freq_1, ...)
|
33
31
|
|
34
32
|
Both these lists together allow us to map for each IQ sample, the center frequency it was collected at.
|
@@ -37,19 +35,23 @@ chunks.library.fixed_center_frequency.chunk import (
|
|
37
35
|
num_samples: np.ndarray
|
38
36
|
|
39
37
|
|
40
|
-
@
|
41
|
-
class
|
42
|
-
def __init__(self,
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
self.add_file(
|
47
|
-
|
48
|
-
|
38
|
+
@register_batch(CaptureModes.SWEPT_CENTER_FREQUENCY)
|
39
|
+
class _Batch(BaseBatch):
|
40
|
+
def __init__(self,
|
41
|
+
start_time: str,
|
42
|
+
tag: str):
|
43
|
+
super().__init__(start_time, tag)
|
44
|
+
self.add_file( HdrFile(self.parent_dir_path, self.name) )
|
45
|
+
# reuse the binary and fits batch from the fixed center frequency case.
|
46
|
+
self.add_file( BinFile(self.parent_dir_path, self.name) )
|
47
|
+
self.add_file( FitsFile(self.parent_dir_path, self.name))
|
48
|
+
|
49
49
|
|
50
|
-
class
|
51
|
-
def __init__(self,
|
52
|
-
|
50
|
+
class HdrFile(BatchFile):
|
51
|
+
def __init__(self,
|
52
|
+
parent_dir_path: str,
|
53
|
+
base_file_name: str):
|
54
|
+
super().__init__(parent_dir_path, base_file_name, "hdr")
|
53
55
|
|
54
56
|
def read(self) -> Tuple[int, SweepMetadata]:
|
55
57
|
hdr_contents = self._read_file_contents()
|
@@ -138,7 +138,7 @@ def _make_fixed_frequency_capture_template(
|
|
138
138
|
capture_template = make_base_capture_template(
|
139
139
|
PNames.BATCH_SIZE,
|
140
140
|
PNames.CENTER_FREQUENCY,
|
141
|
-
PNames.
|
141
|
+
PNames.BATCH_KEY,
|
142
142
|
PNames.EVENT_HANDLER_KEY,
|
143
143
|
PNames.FREQUENCY_RESOLUTION,
|
144
144
|
PNames.INSTRUMENT,
|
@@ -158,12 +158,12 @@ def _make_fixed_frequency_capture_template(
|
|
158
158
|
)
|
159
159
|
capture_template.set_defaults(
|
160
160
|
(PNames.EVENT_HANDLER_KEY, CaptureModes.FIXED_CENTER_FREQUENCY),
|
161
|
-
(PNames.
|
161
|
+
(PNames.BATCH_KEY, CaptureModes.FIXED_CENTER_FREQUENCY),
|
162
162
|
(PNames.WATCH_EXTENSION, "bin")
|
163
163
|
)
|
164
164
|
capture_template.enforce_defaults(
|
165
165
|
PNames.EVENT_HANDLER_KEY,
|
166
|
-
PNames.
|
166
|
+
PNames.BATCH_KEY,
|
167
167
|
PNames.WATCH_EXTENSION
|
168
168
|
)
|
169
169
|
return capture_template
|
@@ -173,7 +173,7 @@ def _make_swept_frequency_capture_template(
|
|
173
173
|
"""The absolute minimum required parameters for any swept frequency capture template."""
|
174
174
|
capture_template = make_base_capture_template(
|
175
175
|
PNames.BATCH_SIZE,
|
176
|
-
PNames.
|
176
|
+
PNames.BATCH_KEY,
|
177
177
|
PNames.EVENT_HANDLER_KEY,
|
178
178
|
PNames.FREQUENCY_RESOLUTION,
|
179
179
|
PNames.FREQUENCY_STEP,
|
@@ -196,12 +196,12 @@ def _make_swept_frequency_capture_template(
|
|
196
196
|
PNames.WINDOW_TYPE)
|
197
197
|
capture_template.set_defaults(
|
198
198
|
(PNames.EVENT_HANDLER_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
|
199
|
-
(PNames.
|
199
|
+
(PNames.BATCH_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
|
200
200
|
(PNames.WATCH_EXTENSION, "bin")
|
201
201
|
)
|
202
202
|
capture_template.enforce_defaults(
|
203
203
|
PNames.EVENT_HANDLER_KEY,
|
204
|
-
PNames.
|
204
|
+
PNames.BATCH_KEY,
|
205
205
|
PNames.WATCH_EXTENSION
|
206
206
|
)
|
207
207
|
return capture_template
|
@@ -47,14 +47,11 @@ class Parameters:
|
|
47
47
|
|
48
48
|
def add_parameter(self,
|
49
49
|
name: str,
|
50
|
-
value: Optional[T] = None
|
51
|
-
force: bool = False) -> None:
|
50
|
+
value: Optional[T] = None) -> None:
|
52
51
|
"""Add a new parameter."""
|
53
|
-
if name in self._parameters
|
52
|
+
if name in self._parameters:
|
54
53
|
raise ValueError(f"Cannot add a parameter with name '{name}', "
|
55
|
-
f"since a parameter already exists with that name. "
|
56
|
-
f"You can overrride this functionality with 'force', "
|
57
|
-
f"to overwrite the existing parameter.")
|
54
|
+
f"since a parameter already exists with that name. ")
|
58
55
|
self._parameters[name] = Parameter(name, value)
|
59
56
|
|
60
57
|
|
@@ -178,7 +178,7 @@ class PNames:
|
|
178
178
|
WINDOW_SIZE : str = "window_size"
|
179
179
|
EVENT_HANDLER_KEY : str = "event_handler_key"
|
180
180
|
WATCH_EXTENSION : str = "watch_extension"
|
181
|
-
|
181
|
+
BATCH_KEY : str = "batch_key"
|
182
182
|
SAMPLES_PER_STEP : str = "samples_per_step"
|
183
183
|
MIN_SAMPLES_PER_STEP : str = "min_samples_per_step"
|
184
184
|
MAX_SAMPLES_PER_STEP : str = "max_samples_per_step"
|
@@ -271,10 +271,10 @@ _base_ptemplates = {
|
|
271
271
|
Identifies which post-processing functions to invoke
|
272
272
|
on newly created files.
|
273
273
|
"""),
|
274
|
-
PNames.
|
274
|
+
PNames.BATCH_KEY: PTemplate(PNames.BATCH_KEY,
|
275
275
|
str,
|
276
276
|
help = """
|
277
|
-
Identifies the type of data is stored in each
|
277
|
+
Identifies the type of data is stored in each batch.
|
278
278
|
""",
|
279
279
|
),
|
280
280
|
PNames.WINDOW_SIZE: PTemplate(PNames.WINDOW_SIZE,
|
@@ -26,9 +26,9 @@ def _validate_window(
|
|
26
26
|
|
27
27
|
window_interval = window_size*(1 / sample_rate)
|
28
28
|
if window_interval > batch_size:
|
29
|
-
raise ValueError((f"The windowing interval must be strictly less than the
|
29
|
+
raise ValueError((f"The windowing interval must be strictly less than the batch size. "
|
30
30
|
f"Computed the windowing interval to be {window_interval} [s], "
|
31
|
-
f"but the
|
31
|
+
f"but the batch size is {batch_size} [s]"))
|
32
32
|
|
33
33
|
try:
|
34
34
|
_ = get_window(window_type, window_size)
|
@@ -90,9 +90,9 @@ def _validate_sweep_interval(
|
|
90
90
|
num_samples_per_sweep = num_steps_per_sweep * samples_per_step
|
91
91
|
sweep_interval = num_samples_per_sweep * 1/sample_rate
|
92
92
|
if sweep_interval > batch_size:
|
93
|
-
raise ValueError((f"Sweep interval must be less than the
|
93
|
+
raise ValueError((f"Sweep interval must be less than the batch size. "
|
94
94
|
f"The computed sweep interval is {sweep_interval} [s], "
|
95
|
-
f"but the given
|
95
|
+
f"but the given batch size is {batch_size} [s]"))
|
96
96
|
|
97
97
|
|
98
98
|
def _validate_num_samples_per_step(
|
spectre_core/config/__init__.py
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
4
4
|
|
5
5
|
from ._paths import (
|
6
|
-
get_spectre_data_dir_path,
|
6
|
+
get_spectre_data_dir_path, get_batches_dir_path, get_configs_dir_path, get_logs_dir_path
|
7
7
|
)
|
8
8
|
from ._time_formats import (
|
9
9
|
TimeFormats
|
@@ -11,7 +11,7 @@ from ._time_formats import (
|
|
11
11
|
|
12
12
|
__all__ = [
|
13
13
|
"get_spectre_data_dir_path",
|
14
|
-
"
|
14
|
+
"get_batches_dir_path",
|
15
15
|
"get_configs_dir_path",
|
16
16
|
"get_logs_dir_path",
|
17
17
|
"DEFAULT_DATE_FORMAT",
|
spectre_core/config/_paths.py
CHANGED
@@ -12,9 +12,9 @@ _SPECTRE_DATA_DIR_PATH = os.environ.get("SPECTRE_DATA_DIR_PATH")
|
|
12
12
|
if _SPECTRE_DATA_DIR_PATH is None:
|
13
13
|
raise ValueError("The environment variable SPECTRE_DATA_DIR_PATH has not been set")
|
14
14
|
|
15
|
-
|
16
|
-
os.path.join(_SPECTRE_DATA_DIR_PATH, '
|
17
|
-
os.makedirs(
|
15
|
+
_BATCHES_DIR_PATH = os.environ.get("SPECTRE_BATCHES_DIR_PATH",
|
16
|
+
os.path.join(_SPECTRE_DATA_DIR_PATH, 'batches'))
|
17
|
+
os.makedirs(_BATCHES_DIR_PATH,
|
18
18
|
exist_ok=True)
|
19
19
|
|
20
20
|
_LOGS_DIR_PATH = os.environ.get("SPECTRE_LOGS_DIR_PATH",
|
@@ -52,11 +52,11 @@ def _get_date_based_dir_path(base_dir: str, year: int = None,
|
|
52
52
|
return os.path.join(base_dir, *date_dir_components)
|
53
53
|
|
54
54
|
|
55
|
-
def
|
55
|
+
def get_batches_dir_path(year: int = None,
|
56
56
|
month: int = None,
|
57
57
|
day: int = None
|
58
58
|
) -> str:
|
59
|
-
return _get_date_based_dir_path(
|
59
|
+
return _get_date_based_dir_path(_BATCHES_DIR_PATH,
|
60
60
|
year,
|
61
61
|
month,
|
62
62
|
day)
|
@@ -10,6 +10,8 @@ from dataclasses import dataclass
|
|
10
10
|
|
11
11
|
@dataclass(frozen=True)
|
12
12
|
class TimeFormats:
|
13
|
-
|
14
|
-
|
15
|
-
|
13
|
+
DATE = "%Y-%m-%d"
|
14
|
+
TIME = "%H:%M:%S"
|
15
|
+
PRECISE_TIME = "%H:%M:%S.%f"
|
16
|
+
DATETIME = f"{DATE}T{TIME}"
|
17
|
+
PRECISE_DATETIME = f"{DATE}T{PRECISE_TIME}"
|
spectre_core/exceptions.py
CHANGED
@@ -6,8 +6,8 @@
|
|
6
6
|
SPECTRE custom exceptions.
|
7
7
|
"""
|
8
8
|
|
9
|
-
class
|
10
|
-
class
|
9
|
+
class BatchNotFoundError(FileNotFoundError): ...
|
10
|
+
class BatchFileNotFoundError(FileNotFoundError): ...
|
11
11
|
class SpectrogramNotFoundError(FileNotFoundError): ...
|
12
12
|
class ModeNotFoundError(KeyError): ...
|
13
13
|
class EventHandlerNotFoundError(KeyError): ...
|
@@ -16,7 +16,7 @@ def configure_root_logger(process_type: str,
|
|
16
16
|
datetime_stamp = system_datetime.strftime(TimeFormats.DATETIME)
|
17
17
|
pid = os.getpid()
|
18
18
|
log_handler = LogHandler(datetime_stamp, pid, process_type)
|
19
|
-
log_handler.
|
19
|
+
log_handler.make_parent_dir_path()
|
20
20
|
|
21
21
|
# configure the root logger
|
22
22
|
logger = logging.getLogger()
|
@@ -66,8 +66,8 @@ class LogHandlers:
|
|
66
66
|
year: Optional[int] = None,
|
67
67
|
month: Optional[int] = None,
|
68
68
|
day: Optional[int] = None):
|
69
|
-
self._log_handler_map: dict[str, LogHandler] = OrderedDict()
|
70
69
|
self._process_type = process_type
|
70
|
+
self._log_handler_map: dict[str, LogHandler] = OrderedDict()
|
71
71
|
self.set_date(year, month, day)
|
72
72
|
|
73
73
|
|
spectre_core/plotting/_panels.py
CHANGED
@@ -170,7 +170,7 @@ class _SpectrogramPanel(BaseTimeSeriesPanel):
|
|
170
170
|
|
171
171
|
|
172
172
|
def draw(self):
|
173
|
-
dynamic_spectra = self._spectrogram.
|
173
|
+
dynamic_spectra = self._spectrogram.dynamic_spectra_dBb if self._dBb else self._spectrogram.dynamic_spectra
|
174
174
|
|
175
175
|
norm = LogNorm(vmin=np.nanmin(dynamic_spectra[dynamic_spectra > 0]),
|
176
176
|
vmax=np.nanmax(dynamic_spectra)) if self._log_norm else None
|
@@ -6,9 +6,9 @@
|
|
6
6
|
from .library._fixed_center_frequency import _EventHandler
|
7
7
|
from .library._swept_center_frequency import _EventHandler
|
8
8
|
|
9
|
-
from ._factory import
|
9
|
+
from ._factory import get_event_handler_cls_from_tag
|
10
10
|
from ._post_processor import PostProcessor
|
11
11
|
|
12
12
|
__all__ = [
|
13
|
-
"PostProcessor", "
|
13
|
+
"PostProcessor", "get_event_handler_cls_from_tag"
|
14
14
|
]
|
@@ -12,7 +12,7 @@ from scipy.signal import ShortTimeFFT, get_window
|
|
12
12
|
from watchdog.events import FileSystemEventHandler, FileCreatedEvent
|
13
13
|
|
14
14
|
from spectre_core.capture_configs import CaptureConfig, PNames
|
15
|
-
from spectre_core.
|
15
|
+
from spectre_core.batches import BaseBatch, get_batch_cls_from_tag
|
16
16
|
from spectre_core.spectrograms import Spectrogram, join_spectrograms
|
17
17
|
|
18
18
|
|
@@ -35,8 +35,8 @@ class BaseEventHandler(ABC, FileSystemEventHandler):
|
|
35
35
|
tag: str):
|
36
36
|
self._tag = tag
|
37
37
|
|
38
|
-
# the tag tells us 'what type' of data is stored in the files for each
|
39
|
-
self.
|
38
|
+
# the tag tells us 'what type' of data is stored in the files for each batch
|
39
|
+
self._Batch = get_batch_cls_from_tag(tag)
|
40
40
|
# load the capture config corresponding to the tag
|
41
41
|
self._capture_config = CaptureConfig(tag)
|
42
42
|
|
@@ -112,8 +112,8 @@ class BaseEventHandler(ABC, FileSystemEventHandler):
|
|
112
112
|
|
113
113
|
def _flush_cache(self) -> None:
|
114
114
|
if self._cached_spectrogram:
|
115
|
-
_LOGGER.info(f"Flushing spectrogram to file with
|
116
|
-
f"'{self._cached_spectrogram.
|
115
|
+
_LOGGER.info(f"Flushing spectrogram to file with start time "
|
116
|
+
f"'{self._cached_spectrogram.format_start_time(precise=True)}'")
|
117
117
|
self._cached_spectrogram.save()
|
118
118
|
_LOGGER.info("Flush successful, resetting spectrogram cache")
|
119
119
|
self._cached_spectrogram = None # reset the cache
|
@@ -8,7 +8,7 @@ from spectre_core.capture_configs import CaptureConfig, PNames
|
|
8
8
|
from spectre_core.exceptions import EventHandlerNotFoundError
|
9
9
|
|
10
10
|
|
11
|
-
def
|
11
|
+
def _get_event_handler_cls(event_handler_key: str) -> BaseEventHandler:
|
12
12
|
EventHandler = event_handler_map.get(event_handler_key)
|
13
13
|
if EventHandler is None:
|
14
14
|
valid_event_handler_keys = list(event_handler_map.keys())
|
@@ -17,7 +17,7 @@ def _get_event_handler(event_handler_key: str) -> BaseEventHandler:
|
|
17
17
|
return EventHandler
|
18
18
|
|
19
19
|
|
20
|
-
def
|
20
|
+
def get_event_handler_cls_from_tag(tag: str) -> BaseEventHandler:
|
21
21
|
capture_config = CaptureConfig(tag)
|
22
22
|
event_handler_key = capture_config.get_parameter_value(PNames.EVENT_HANDLER_KEY)
|
23
|
-
return
|
23
|
+
return _get_event_handler_cls(event_handler_key)
|
@@ -8,8 +8,8 @@ _LOGGER = getLogger(__name__)
|
|
8
8
|
from watchdog.observers import Observer
|
9
9
|
from watchdog.events import FileCreatedEvent
|
10
10
|
|
11
|
-
from ._factory import
|
12
|
-
from spectre_core.config import
|
11
|
+
from ._factory import get_event_handler_cls_from_tag
|
12
|
+
from spectre_core.config import get_batches_dir_path
|
13
13
|
|
14
14
|
class PostProcessor:
|
15
15
|
def __init__(self,
|
@@ -17,14 +17,14 @@ class PostProcessor:
|
|
17
17
|
|
18
18
|
self._observer = Observer()
|
19
19
|
|
20
|
-
EventHandler =
|
20
|
+
EventHandler = get_event_handler_cls_from_tag(tag)
|
21
21
|
self._event_handler = EventHandler(tag)
|
22
22
|
|
23
23
|
|
24
24
|
def start(self):
|
25
|
-
"""Start an observer to process newly created files in the
|
25
|
+
"""Start an observer to process newly created files in the batches directory"""
|
26
26
|
self._observer.schedule(self._event_handler,
|
27
|
-
|
27
|
+
get_batches_dir_path(),
|
28
28
|
recursive=True,
|
29
29
|
event_filter=[FileCreatedEvent])
|
30
30
|
|
@@ -7,11 +7,12 @@ _LOGGER = getLogger(__name__)
|
|
7
7
|
|
8
8
|
import numpy as np
|
9
9
|
from typing import Tuple
|
10
|
+
from datetime import timedelta
|
10
11
|
|
11
12
|
import os
|
12
13
|
|
13
14
|
from spectre_core.capture_configs import CaptureConfig, PNames, CaptureModes
|
14
|
-
from spectre_core.
|
15
|
+
from spectre_core.batches import BaseBatch
|
15
16
|
from spectre_core.spectrograms import Spectrogram, time_average, frequency_average
|
16
17
|
from .._base import BaseEventHandler, make_sft_instance
|
17
18
|
from .._register import register_event_handler
|
@@ -51,16 +52,13 @@ def _do_stfft(iq_data: np.array,
|
|
51
52
|
return times, frequencies, dynamic_spectra
|
52
53
|
|
53
54
|
|
54
|
-
def _build_spectrogram(
|
55
|
+
def _build_spectrogram(batch: BaseBatch,
|
55
56
|
capture_config: CaptureConfig) -> Spectrogram:
|
56
|
-
"""Create a spectrogram by performing a Short Time FFT on the IQ samples for this
|
57
|
+
"""Create a spectrogram by performing a Short Time FFT on the IQ samples for this batch."""
|
57
58
|
|
58
|
-
# read the data from the
|
59
|
-
millisecond_correction =
|
60
|
-
iq_data =
|
61
|
-
|
62
|
-
# units conversion
|
63
|
-
microsecond_correction = millisecond_correction * 1e3
|
59
|
+
# read the data from the batch
|
60
|
+
millisecond_correction = batch.read_file("hdr")
|
61
|
+
iq_data = batch.read_file("bin")
|
64
62
|
|
65
63
|
times, frequencies, dynamic_spectra = _do_stfft(iq_data,
|
66
64
|
capture_config)
|
@@ -70,12 +68,13 @@ def _build_spectrogram(chunk: BaseChunk,
|
|
70
68
|
frequencies = np.array(frequencies, dtype = 'float32')
|
71
69
|
dynamic_spectra = np.array(dynamic_spectra, dtype = 'float32')
|
72
70
|
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
71
|
+
# compute the start datetime for the spectrogram by adding the millisecond component to the batch start time
|
72
|
+
spectrogram_start_datetime = batch.start_datetime + timedelta(milliseconds=millisecond_correction)
|
73
|
+
return Spectrogram(dynamic_spectra,
|
74
|
+
times,
|
75
|
+
frequencies,
|
76
|
+
batch.tag,
|
77
|
+
spectrogram_start_datetime,
|
79
78
|
spectrum_type = "amplitude")
|
80
79
|
|
81
80
|
|
@@ -89,13 +88,13 @@ class _EventHandler(BaseEventHandler):
|
|
89
88
|
_LOGGER.info(f"Processing: {absolute_file_path}")
|
90
89
|
file_name = os.path.basename(absolute_file_path)
|
91
90
|
base_file_name, _ = os.path.splitext(file_name)
|
92
|
-
|
91
|
+
batch_start_time, tag = base_file_name.split('_')
|
93
92
|
|
94
|
-
# create an instance of the current
|
95
|
-
|
93
|
+
# create an instance of the current batch being processed
|
94
|
+
batch = self._Batch(batch_start_time, tag)
|
96
95
|
|
97
96
|
_LOGGER.info("Creating spectrogram")
|
98
|
-
spectrogram = _build_spectrogram(
|
97
|
+
spectrogram = _build_spectrogram(batch,
|
99
98
|
self._capture_config)
|
100
99
|
|
101
100
|
spectrogram = time_average(spectrogram,
|
@@ -106,10 +105,10 @@ class _EventHandler(BaseEventHandler):
|
|
106
105
|
|
107
106
|
self._cache_spectrogram(spectrogram)
|
108
107
|
|
109
|
-
|
110
|
-
_LOGGER.info(f"Deleting {
|
111
|
-
|
108
|
+
bin_file = batch.get_file('bin')
|
109
|
+
_LOGGER.info(f"Deleting {bin_file.file_path}")
|
110
|
+
bin_file.delete()
|
112
111
|
|
113
|
-
|
114
|
-
_LOGGER.info(f"Deleting {
|
115
|
-
|
112
|
+
hdr_file = batch.get_file('hdr')
|
113
|
+
_LOGGER.info(f"Deleting {hdr_file.file_path}")
|
114
|
+
hdr_file.delete()
|