spectre-core 0.0.10__py3-none-any.whl → 0.0.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. spectre_core/_file_io/file_handlers.py +12 -12
  2. spectre_core/batches/__init__.py +22 -0
  3. spectre_core/batches/_base.py +146 -0
  4. spectre_core/batches/_batches.py +197 -0
  5. spectre_core/batches/_factory.py +27 -0
  6. spectre_core/{chunks → batches}/_register.py +5 -5
  7. spectre_core/{chunks → batches}/library/_callisto.py +31 -33
  8. spectre_core/{chunks → batches}/library/_fixed_center_frequency.py +43 -38
  9. spectre_core/{chunks → batches}/library/_swept_center_frequency.py +22 -20
  10. spectre_core/capture_configs/_capture_templates.py +6 -6
  11. spectre_core/capture_configs/_parameters.py +3 -6
  12. spectre_core/capture_configs/_ptemplates.py +3 -3
  13. spectre_core/capture_configs/_pvalidators.py +6 -8
  14. spectre_core/config/__init__.py +2 -2
  15. spectre_core/config/_paths.py +5 -5
  16. spectre_core/config/_time_formats.py +5 -3
  17. spectre_core/exceptions.py +2 -2
  18. spectre_core/logging/_configure.py +1 -1
  19. spectre_core/logging/_log_handlers.py +1 -1
  20. spectre_core/plotting/_panels.py +1 -1
  21. spectre_core/post_processing/__init__.py +2 -2
  22. spectre_core/post_processing/_base.py +6 -6
  23. spectre_core/post_processing/_factory.py +3 -3
  24. spectre_core/post_processing/_post_processor.py +5 -5
  25. spectre_core/post_processing/library/_fixed_center_frequency.py +24 -25
  26. spectre_core/post_processing/library/_swept_center_frequency.py +68 -83
  27. spectre_core/receivers/__init__.py +1 -0
  28. spectre_core/receivers/_base.py +1 -173
  29. spectre_core/receivers/gr/_base.py +1 -1
  30. spectre_core/receivers/gr/_rsp1a.py +8 -8
  31. spectre_core/receivers/gr/_rspduo.py +227 -0
  32. spectre_core/receivers/gr/_test.py +5 -5
  33. spectre_core/receivers/library/_rsp1a.py +4 -4
  34. spectre_core/receivers/library/_rspduo.py +69 -0
  35. spectre_core/receivers/library/_sdrplay_receiver.py +185 -0
  36. spectre_core/receivers/library/_test.py +3 -3
  37. spectre_core/spectrograms/_analytical.py +0 -6
  38. spectre_core/spectrograms/_spectrogram.py +113 -79
  39. spectre_core/spectrograms/_transform.py +19 -36
  40. spectre_core/wgetting/_callisto.py +20 -24
  41. {spectre_core-0.0.10.dist-info → spectre_core-0.0.12.dist-info}/METADATA +1 -1
  42. spectre_core-0.0.12.dist-info/RECORD +64 -0
  43. spectre_core/chunks/__init__.py +0 -22
  44. spectre_core/chunks/_base.py +0 -116
  45. spectre_core/chunks/_chunks.py +0 -200
  46. spectre_core/chunks/_factory.py +0 -25
  47. spectre_core-0.0.10.dist-info/RECORD +0 -63
  48. {spectre_core-0.0.10.dist-info → spectre_core-0.0.12.dist-info}/LICENSE +0 -0
  49. {spectre_core-0.0.10.dist-info → spectre_core-0.0.12.dist-info}/WHEEL +0 -0
  50. {spectre_core-0.0.10.dist-info → spectre_core-0.0.12.dist-info}/top_level.txt +0 -0
@@ -11,24 +11,29 @@ from astropy.io.fits.hdu.image import PrimaryHDU
11
11
  from astropy.io.fits.hdu.table import BinTableHDU
12
12
  from astropy.io.fits.hdu.hdulist import HDUList
13
13
 
14
+ from spectre_core.config import TimeFormats
14
15
  from spectre_core.spectrograms import Spectrogram
15
16
  from spectre_core.capture_configs import CaptureModes
16
- from .._register import register_chunk
17
- from .._base import BaseChunk, ChunkFile
17
+ from .._register import register_batch
18
+ from .._base import BaseBatch, BatchFile
18
19
 
19
- @register_chunk(CaptureModes.FIXED_CENTER_FREQUENCY)
20
- class _Chunk(BaseChunk):
21
- def __init__(self, *args, **kwargs):
22
- super().__init__(*args, **kwargs)
23
-
24
- self.add_file(BinChunk(self.chunk_parent_path, self.chunk_name))
25
- self.add_file(FitsChunk(self.chunk_parent_path, self.chunk_name))
26
- self.add_file(HdrChunk(self.chunk_parent_path, self.chunk_name))
20
+
21
+ @register_batch(CaptureModes.FIXED_CENTER_FREQUENCY)
22
+ class _Batch(BaseBatch):
23
+ def __init__(self,
24
+ start_time: str,
25
+ tag: str):
26
+ super().__init__(start_time, tag)
27
+ self.add_file( BinFile(self.parent_dir_path, self.name) )
28
+ self.add_file( HdrFile(self.parent_dir_path, self.name) )
29
+ self.add_file( FitsFile(self.parent_dir_path, self.name))
27
30
 
28
31
 
29
- class BinChunk(ChunkFile):
30
- def __init__(self, chunk_parent_path: str, chunk_name: str):
31
- super().__init__(chunk_parent_path, chunk_name, "bin")
32
+ class BinFile(BatchFile):
33
+ def __init__(self,
34
+ parent_dir_path: str,
35
+ base_file_name: str):
36
+ super().__init__(parent_dir_path, base_file_name, "bin")
32
37
 
33
38
  def read(self) -> np.ndarray:
34
39
  with open(self.file_path, "rb") as fh:
@@ -36,9 +41,11 @@ class BinChunk(ChunkFile):
36
41
 
37
42
 
38
43
 
39
- class HdrChunk(ChunkFile):
40
- def __init__(self, chunk_parent_path: str, chunk_name: str):
41
- super().__init__(chunk_parent_path, chunk_name, "hdr")
44
+ class HdrFile(BatchFile):
45
+ def __init__(self,
46
+ parent_dir_path: str,
47
+ base_file_name: str):
48
+ super().__init__(parent_dir_path, base_file_name, "hdr")
42
49
 
43
50
 
44
51
  def read(self) -> int:
@@ -63,11 +70,11 @@ class HdrChunk(ChunkFile):
63
70
  return int(millisecond_correction_as_float)
64
71
 
65
72
 
66
-
67
-
68
- class FitsChunk(ChunkFile):
69
- def __init__(self, chunk_parent_path: str, chunk_name: str):
70
- super().__init__(chunk_parent_path, chunk_name, "fits")
73
+ class FitsFile(BatchFile):
74
+ def __init__(self,
75
+ parent_dir_path: str,
76
+ base_file_name: str):
77
+ super().__init__(parent_dir_path, base_file_name, "fits")
71
78
 
72
79
 
73
80
  @property
@@ -75,25 +82,24 @@ class FitsChunk(ChunkFile):
75
82
  with fits.open(self.file_path, mode='readonly') as hdulist:
76
83
  bintable_data = hdulist[1].data
77
84
  times = bintable_data['TIME'][0]
78
- return [self.chunk_start_datetime + timedelta(seconds=t) for t in times]
85
+ return [self.start_datetime + timedelta(seconds=t) for t in times]
79
86
 
80
87
 
81
88
  def read(self) -> Spectrogram:
82
89
  with fits.open(self.file_path, mode='readonly') as hdulist:
83
- primary_hdu = self._get_primary_hdu(hdulist)
84
- dynamic_spectra = self._get_dynamic_spectra(primary_hdu)
85
- spectrum_type = self._get_spectrum_type(primary_hdu)
86
- microsecond_correction = self._get_microsecond_correction(primary_hdu)
87
- bintable_hdu = self._get_bintable_hdu(hdulist)
88
- times, frequencies = self._get_time_and_frequency(bintable_hdu)
90
+ primary_hdu = self._get_primary_hdu(hdulist)
91
+ dynamic_spectra = self._get_dynamic_spectra(primary_hdu)
92
+ spectrum_type = self._get_spectrum_type(primary_hdu)
93
+ spectrogram_start_datetime = self._get_spectrogram_start_datetime(primary_hdu)
94
+ bintable_hdu = self._get_bintable_hdu(hdulist)
95
+ times, frequencies = self._get_time_and_frequency(bintable_hdu)
89
96
 
90
97
  return Spectrogram(dynamic_spectra,
91
98
  times,
92
99
  frequencies,
93
- self.tag,
94
- chunk_start_time=self.chunk_start_time,
95
- microsecond_correction=microsecond_correction,
96
- spectrum_type = spectrum_type)
100
+ self.tag,
101
+ spectrogram_start_datetime,
102
+ spectrum_type)
97
103
 
98
104
 
99
105
  def _get_primary_hdu(self, hdulist: HDUList) -> PrimaryHDU:
@@ -105,14 +111,13 @@ class FitsChunk(ChunkFile):
105
111
 
106
112
 
107
113
  def _get_spectrum_type(self, primary_hdu: PrimaryHDU) -> str:
108
- return primary_hdu.header.get('BUNIT', None)
114
+ return primary_hdu.header['BUNIT']
109
115
 
110
116
 
111
- def _get_microsecond_correction(self, primary_hdu: PrimaryHDU) -> int:
112
- date_obs = primary_hdu.header.get('DATE-OBS', None)
113
- time_obs = primary_hdu.header.get('TIME-OBS', None)
114
- datetime_obs = datetime.strptime(f"{date_obs}T{time_obs}", "%Y-%m-%dT%H:%M:%S.%f")
115
- return datetime_obs.microsecond
117
+ def _get_spectrogram_start_datetime(self, primary_hdu: PrimaryHDU) -> datetime:
118
+ date_obs = primary_hdu.header['DATE-OBS']
119
+ time_obs = primary_hdu.header['TIME-OBS']
120
+ return datetime.strptime(f"{date_obs}T{time_obs}", TimeFormats.PRECISE_DATETIME)
116
121
 
117
122
 
118
123
  def _get_bintable_hdu(self, hdulist: HDUList) -> BinTableHDU:
@@ -9,14 +9,14 @@ import numpy as np
9
9
 
10
10
  from spectre_core.exceptions import InvalidSweepMetadataError
11
11
  from spectre_core.capture_configs import CaptureModes
12
- from ._fixed_center_frequency import BinChunk, FitsChunk
13
- from .._register import register_chunk
14
- from .._base import BaseChunk, ChunkFile
12
+ from ._fixed_center_frequency import BinFile, FitsFile
13
+ from .._register import register_batch
14
+ from .._base import BaseBatch, BatchFile
15
15
 
16
16
 
17
17
  @dataclass
18
18
  class SweepMetadata:
19
- """Wrapper for metadata required to assign center frequencies to each IQ sample in the chunk.
19
+ """Wrapper for metadata required to assign center frequencies to each IQ sample in the batch.
20
20
 
21
21
  center_frequencies is an ordered list containing all the center frequencies that the IQ samples
22
22
  were collected at. Typically, these will be ordered in "steps", where each step corresponds to
@@ -25,10 +25,8 @@ class SweepMetadata:
25
25
  (freq_0, freq_1, ..., freq_M, freq_0, freq_1, ..., freq_M, ...), freq_0 < freq_1 < ... < freq_M
26
26
 
27
27
  The n'th element of the num_samples list, tells us how many samples were collected at the n'th
28
- element of center_frequencies:
29
- chunks.library.fixed_center_frequency.chunk import (
30
- BinChunk, FitsChunk
31
- )
28
+ element of center_frequencies.
29
+
32
30
  Number of samples: (num_samples_at_freq_0, num_samples_at_freq_1, ...)
33
31
 
34
32
  Both these lists together allow us to map for each IQ sample, the center frequency it was collected at.
@@ -37,19 +35,23 @@ chunks.library.fixed_center_frequency.chunk import (
37
35
  num_samples: np.ndarray
38
36
 
39
37
 
40
- @register_chunk(CaptureModes.SWEPT_CENTER_FREQUENCY)
41
- class _Chunk(BaseChunk):
42
- def __init__(self, chunk_start_time, tag):
43
- super().__init__(chunk_start_time, tag)
44
-
45
- self.add_file(BinChunk(self.chunk_parent_path, self.chunk_name))
46
- self.add_file(FitsChunk(self.chunk_parent_path, self.chunk_name))
47
- self.add_file(HdrChunk(self.chunk_parent_path, self.chunk_name))
48
-
38
+ @register_batch(CaptureModes.SWEPT_CENTER_FREQUENCY)
39
+ class _Batch(BaseBatch):
40
+ def __init__(self,
41
+ start_time: str,
42
+ tag: str):
43
+ super().__init__(start_time, tag)
44
+ self.add_file( HdrFile(self.parent_dir_path, self.name) )
45
+ # reuse the binary and fits batch from the fixed center frequency case.
46
+ self.add_file( BinFile(self.parent_dir_path, self.name) )
47
+ self.add_file( FitsFile(self.parent_dir_path, self.name))
48
+
49
49
 
50
- class HdrChunk(ChunkFile):
51
- def __init__(self, chunk_parent_path: str, chunk_name: str):
52
- super().__init__(chunk_parent_path, chunk_name, "hdr")
50
+ class HdrFile(BatchFile):
51
+ def __init__(self,
52
+ parent_dir_path: str,
53
+ base_file_name: str):
54
+ super().__init__(parent_dir_path, base_file_name, "hdr")
53
55
 
54
56
  def read(self) -> Tuple[int, SweepMetadata]:
55
57
  hdr_contents = self._read_file_contents()
@@ -138,7 +138,7 @@ def _make_fixed_frequency_capture_template(
138
138
  capture_template = make_base_capture_template(
139
139
  PNames.BATCH_SIZE,
140
140
  PNames.CENTER_FREQUENCY,
141
- PNames.CHUNK_KEY,
141
+ PNames.BATCH_KEY,
142
142
  PNames.EVENT_HANDLER_KEY,
143
143
  PNames.FREQUENCY_RESOLUTION,
144
144
  PNames.INSTRUMENT,
@@ -158,12 +158,12 @@ def _make_fixed_frequency_capture_template(
158
158
  )
159
159
  capture_template.set_defaults(
160
160
  (PNames.EVENT_HANDLER_KEY, CaptureModes.FIXED_CENTER_FREQUENCY),
161
- (PNames.CHUNK_KEY, CaptureModes.FIXED_CENTER_FREQUENCY),
161
+ (PNames.BATCH_KEY, CaptureModes.FIXED_CENTER_FREQUENCY),
162
162
  (PNames.WATCH_EXTENSION, "bin")
163
163
  )
164
164
  capture_template.enforce_defaults(
165
165
  PNames.EVENT_HANDLER_KEY,
166
- PNames.CHUNK_KEY,
166
+ PNames.BATCH_KEY,
167
167
  PNames.WATCH_EXTENSION
168
168
  )
169
169
  return capture_template
@@ -173,7 +173,7 @@ def _make_swept_frequency_capture_template(
173
173
  """The absolute minimum required parameters for any swept frequency capture template."""
174
174
  capture_template = make_base_capture_template(
175
175
  PNames.BATCH_SIZE,
176
- PNames.CHUNK_KEY,
176
+ PNames.BATCH_KEY,
177
177
  PNames.EVENT_HANDLER_KEY,
178
178
  PNames.FREQUENCY_RESOLUTION,
179
179
  PNames.FREQUENCY_STEP,
@@ -196,12 +196,12 @@ def _make_swept_frequency_capture_template(
196
196
  PNames.WINDOW_TYPE)
197
197
  capture_template.set_defaults(
198
198
  (PNames.EVENT_HANDLER_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
199
- (PNames.CHUNK_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
199
+ (PNames.BATCH_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
200
200
  (PNames.WATCH_EXTENSION, "bin")
201
201
  )
202
202
  capture_template.enforce_defaults(
203
203
  PNames.EVENT_HANDLER_KEY,
204
- PNames.CHUNK_KEY,
204
+ PNames.BATCH_KEY,
205
205
  PNames.WATCH_EXTENSION
206
206
  )
207
207
  return capture_template
@@ -47,14 +47,11 @@ class Parameters:
47
47
 
48
48
  def add_parameter(self,
49
49
  name: str,
50
- value: Optional[T] = None,
51
- force: bool = False) -> None:
50
+ value: Optional[T] = None) -> None:
52
51
  """Add a new parameter."""
53
- if name in self._parameters and not force:
52
+ if name in self._parameters:
54
53
  raise ValueError(f"Cannot add a parameter with name '{name}', "
55
- f"since a parameter already exists with that name. "
56
- f"You can overrride this functionality with 'force', "
57
- f"to overwrite the existing parameter.")
54
+ f"since a parameter already exists with that name. ")
58
55
  self._parameters[name] = Parameter(name, value)
59
56
 
60
57
 
@@ -178,7 +178,7 @@ class PNames:
178
178
  WINDOW_SIZE : str = "window_size"
179
179
  EVENT_HANDLER_KEY : str = "event_handler_key"
180
180
  WATCH_EXTENSION : str = "watch_extension"
181
- CHUNK_KEY : str = "chunk_key"
181
+ BATCH_KEY : str = "batch_key"
182
182
  SAMPLES_PER_STEP : str = "samples_per_step"
183
183
  MIN_SAMPLES_PER_STEP : str = "min_samples_per_step"
184
184
  MAX_SAMPLES_PER_STEP : str = "max_samples_per_step"
@@ -271,10 +271,10 @@ _base_ptemplates = {
271
271
  Identifies which post-processing functions to invoke
272
272
  on newly created files.
273
273
  """),
274
- PNames.CHUNK_KEY: PTemplate(PNames.CHUNK_KEY,
274
+ PNames.BATCH_KEY: PTemplate(PNames.BATCH_KEY,
275
275
  str,
276
276
  help = """
277
- Identifies the type of data is stored in each chunk.
277
+ Identifies the type of data is stored in each batch.
278
278
  """,
279
279
  ),
280
280
  PNames.WINDOW_SIZE: PTemplate(PNames.WINDOW_SIZE,
@@ -26,9 +26,9 @@ def _validate_window(
26
26
 
27
27
  window_interval = window_size*(1 / sample_rate)
28
28
  if window_interval > batch_size:
29
- raise ValueError((f"The windowing interval must be strictly less than the chunk size. "
29
+ raise ValueError((f"The windowing interval must be strictly less than the batch size. "
30
30
  f"Computed the windowing interval to be {window_interval} [s], "
31
- f"but the chunk size is {batch_size} [s]"))
31
+ f"but the batch size is {batch_size} [s]"))
32
32
 
33
33
  try:
34
34
  _ = get_window(window_type, window_size)
@@ -90,9 +90,9 @@ def _validate_sweep_interval(
90
90
  num_samples_per_sweep = num_steps_per_sweep * samples_per_step
91
91
  sweep_interval = num_samples_per_sweep * 1/sample_rate
92
92
  if sweep_interval > batch_size:
93
- raise ValueError((f"Sweep interval must be less than the chunk size. "
93
+ raise ValueError((f"Sweep interval must be less than the batch size. "
94
94
  f"The computed sweep interval is {sweep_interval} [s], "
95
- f"but the given chunk size is {batch_size} [s]"))
95
+ f"but the given batch size is {batch_size} [s]"))
96
96
 
97
97
 
98
98
  def _validate_num_samples_per_step(
@@ -131,10 +131,8 @@ def _validate_step_interval(
131
131
 
132
132
  step_interval = samples_per_step * 1/ sample_rate # [s]
133
133
  if step_interval < api_latency:
134
- warning_message = (f"The computed step interval of {step_interval} [s] is of the order of empirically "
135
- f"derived api latency {api_latency} [s]; you may experience undefined behaviour!")
136
- warn(warning_message)
137
- _LOGGER.warning(warning_message)
134
+ raise ValueError(f"The computed step interval of {step_interval} [s] is of the order of empirically "
135
+ f"derived api latency {api_latency} [s]; you may experience undefined behaviour!")
138
136
 
139
137
 
140
138
  def _validate_fixed_center_frequency_parameters(
@@ -3,7 +3,7 @@
3
3
  # SPDX-License-Identifier: GPL-3.0-or-later
4
4
 
5
5
  from ._paths import (
6
- get_spectre_data_dir_path, get_chunks_dir_path, get_configs_dir_path, get_logs_dir_path
6
+ get_spectre_data_dir_path, get_batches_dir_path, get_configs_dir_path, get_logs_dir_path
7
7
  )
8
8
  from ._time_formats import (
9
9
  TimeFormats
@@ -11,7 +11,7 @@ from ._time_formats import (
11
11
 
12
12
  __all__ = [
13
13
  "get_spectre_data_dir_path",
14
- "get_chunks_dir_path",
14
+ "get_batches_dir_path",
15
15
  "get_configs_dir_path",
16
16
  "get_logs_dir_path",
17
17
  "DEFAULT_DATE_FORMAT",
@@ -12,9 +12,9 @@ _SPECTRE_DATA_DIR_PATH = os.environ.get("SPECTRE_DATA_DIR_PATH")
12
12
  if _SPECTRE_DATA_DIR_PATH is None:
13
13
  raise ValueError("The environment variable SPECTRE_DATA_DIR_PATH has not been set")
14
14
 
15
- _CHUNKS_DIR_PATH = os.environ.get("SPECTRE_CHUNKS_DIR_PATH",
16
- os.path.join(_SPECTRE_DATA_DIR_PATH, 'chunks'))
17
- os.makedirs(_CHUNKS_DIR_PATH,
15
+ _BATCHES_DIR_PATH = os.environ.get("SPECTRE_BATCHES_DIR_PATH",
16
+ os.path.join(_SPECTRE_DATA_DIR_PATH, 'batches'))
17
+ os.makedirs(_BATCHES_DIR_PATH,
18
18
  exist_ok=True)
19
19
 
20
20
  _LOGS_DIR_PATH = os.environ.get("SPECTRE_LOGS_DIR_PATH",
@@ -52,11 +52,11 @@ def _get_date_based_dir_path(base_dir: str, year: int = None,
52
52
  return os.path.join(base_dir, *date_dir_components)
53
53
 
54
54
 
55
- def get_chunks_dir_path(year: int = None,
55
+ def get_batches_dir_path(year: int = None,
56
56
  month: int = None,
57
57
  day: int = None
58
58
  ) -> str:
59
- return _get_date_based_dir_path(_CHUNKS_DIR_PATH,
59
+ return _get_date_based_dir_path(_BATCHES_DIR_PATH,
60
60
  year,
61
61
  month,
62
62
  day)
@@ -10,6 +10,8 @@ from dataclasses import dataclass
10
10
 
11
11
  @dataclass(frozen=True)
12
12
  class TimeFormats:
13
- TIME = "%H:%M:%S"
14
- DATE = "%Y-%m-%d"
15
- DATETIME = f"{DATE}T{TIME}"
13
+ DATE = "%Y-%m-%d"
14
+ TIME = "%H:%M:%S"
15
+ PRECISE_TIME = "%H:%M:%S.%f"
16
+ DATETIME = f"{DATE}T{TIME}"
17
+ PRECISE_DATETIME = f"{DATE}T{PRECISE_TIME}"
@@ -6,8 +6,8 @@
6
6
  SPECTRE custom exceptions.
7
7
  """
8
8
 
9
- class ChunkNotFoundError(FileNotFoundError): ...
10
- class ChunkFileNotFoundError(FileNotFoundError): ...
9
+ class BatchNotFoundError(FileNotFoundError): ...
10
+ class BatchFileNotFoundError(FileNotFoundError): ...
11
11
  class SpectrogramNotFoundError(FileNotFoundError): ...
12
12
  class ModeNotFoundError(KeyError): ...
13
13
  class EventHandlerNotFoundError(KeyError): ...
@@ -16,7 +16,7 @@ def configure_root_logger(process_type: str,
16
16
  datetime_stamp = system_datetime.strftime(TimeFormats.DATETIME)
17
17
  pid = os.getpid()
18
18
  log_handler = LogHandler(datetime_stamp, pid, process_type)
19
- log_handler.make_parent_path()
19
+ log_handler.make_parent_dir_path()
20
20
 
21
21
  # configure the root logger
22
22
  logger = logging.getLogger()
@@ -66,8 +66,8 @@ class LogHandlers:
66
66
  year: Optional[int] = None,
67
67
  month: Optional[int] = None,
68
68
  day: Optional[int] = None):
69
- self._log_handler_map: dict[str, LogHandler] = OrderedDict()
70
69
  self._process_type = process_type
70
+ self._log_handler_map: dict[str, LogHandler] = OrderedDict()
71
71
  self.set_date(year, month, day)
72
72
 
73
73
 
@@ -170,7 +170,7 @@ class _SpectrogramPanel(BaseTimeSeriesPanel):
170
170
 
171
171
 
172
172
  def draw(self):
173
- dynamic_spectra = self._spectrogram.dynamic_spectra_as_dBb if self._dBb else self._spectrogram.dynamic_spectra
173
+ dynamic_spectra = self._spectrogram.dynamic_spectra_dBb if self._dBb else self._spectrogram.dynamic_spectra
174
174
 
175
175
  norm = LogNorm(vmin=np.nanmin(dynamic_spectra[dynamic_spectra > 0]),
176
176
  vmax=np.nanmax(dynamic_spectra)) if self._log_norm else None
@@ -6,9 +6,9 @@
6
6
  from .library._fixed_center_frequency import _EventHandler
7
7
  from .library._swept_center_frequency import _EventHandler
8
8
 
9
- from ._factory import get_event_handler_from_tag
9
+ from ._factory import get_event_handler_cls_from_tag
10
10
  from ._post_processor import PostProcessor
11
11
 
12
12
  __all__ = [
13
- "PostProcessor", "get_event_handler_from_tag"
13
+ "PostProcessor", "get_event_handler_cls_from_tag"
14
14
  ]
@@ -12,7 +12,7 @@ from scipy.signal import ShortTimeFFT, get_window
12
12
  from watchdog.events import FileSystemEventHandler, FileCreatedEvent
13
13
 
14
14
  from spectre_core.capture_configs import CaptureConfig, PNames
15
- from spectre_core.chunks import BaseChunk, get_chunk_from_tag
15
+ from spectre_core.batches import BaseBatch, get_batch_cls_from_tag
16
16
  from spectre_core.spectrograms import Spectrogram, join_spectrograms
17
17
 
18
18
 
@@ -35,8 +35,8 @@ class BaseEventHandler(ABC, FileSystemEventHandler):
35
35
  tag: str):
36
36
  self._tag = tag
37
37
 
38
- # the tag tells us 'what type' of data is stored in the files for each chunk
39
- self._Chunk: BaseChunk = get_chunk_from_tag(tag)
38
+ # the tag tells us 'what type' of data is stored in the files for each batch
39
+ self._Batch = get_batch_cls_from_tag(tag)
40
40
  # load the capture config corresponding to the tag
41
41
  self._capture_config = CaptureConfig(tag)
42
42
 
@@ -104,7 +104,7 @@ class BaseEventHandler(ABC, FileSystemEventHandler):
104
104
  self._cached_spectrogram = join_spectrograms([self._cached_spectrogram, spectrogram])
105
105
 
106
106
  # if the time range is not specified
107
- time_range = self._capture_config.get_parameter_value(PNames.TIME_RANGE) or self._capture_config.get_parameter_value(PNames.BATCH_SIZE)
107
+ time_range = self._capture_config.get_parameter_value(PNames.TIME_RANGE) or 0.0
108
108
 
109
109
  if self._cached_spectrogram.time_range >= time_range:
110
110
  self._flush_cache()
@@ -112,8 +112,8 @@ class BaseEventHandler(ABC, FileSystemEventHandler):
112
112
 
113
113
  def _flush_cache(self) -> None:
114
114
  if self._cached_spectrogram:
115
- _LOGGER.info(f"Flushing spectrogram to file with chunk start time "
116
- f"'{self._cached_spectrogram.chunk_start_time}'")
115
+ _LOGGER.info(f"Flushing spectrogram to file with start time "
116
+ f"'{self._cached_spectrogram.format_start_time(precise=True)}'")
117
117
  self._cached_spectrogram.save()
118
118
  _LOGGER.info("Flush successful, resetting spectrogram cache")
119
119
  self._cached_spectrogram = None # reset the cache
@@ -8,7 +8,7 @@ from spectre_core.capture_configs import CaptureConfig, PNames
8
8
  from spectre_core.exceptions import EventHandlerNotFoundError
9
9
 
10
10
 
11
- def _get_event_handler(event_handler_key: str) -> BaseEventHandler:
11
+ def _get_event_handler_cls(event_handler_key: str) -> BaseEventHandler:
12
12
  EventHandler = event_handler_map.get(event_handler_key)
13
13
  if EventHandler is None:
14
14
  valid_event_handler_keys = list(event_handler_map.keys())
@@ -17,7 +17,7 @@ def _get_event_handler(event_handler_key: str) -> BaseEventHandler:
17
17
  return EventHandler
18
18
 
19
19
 
20
- def get_event_handler_from_tag(tag: str) -> BaseEventHandler:
20
+ def get_event_handler_cls_from_tag(tag: str) -> BaseEventHandler:
21
21
  capture_config = CaptureConfig(tag)
22
22
  event_handler_key = capture_config.get_parameter_value(PNames.EVENT_HANDLER_KEY)
23
- return _get_event_handler(event_handler_key)
23
+ return _get_event_handler_cls(event_handler_key)
@@ -8,8 +8,8 @@ _LOGGER = getLogger(__name__)
8
8
  from watchdog.observers import Observer
9
9
  from watchdog.events import FileCreatedEvent
10
10
 
11
- from ._factory import get_event_handler_from_tag
12
- from spectre_core.config import get_chunks_dir_path
11
+ from ._factory import get_event_handler_cls_from_tag
12
+ from spectre_core.config import get_batches_dir_path
13
13
 
14
14
  class PostProcessor:
15
15
  def __init__(self,
@@ -17,14 +17,14 @@ class PostProcessor:
17
17
 
18
18
  self._observer = Observer()
19
19
 
20
- EventHandler = get_event_handler_from_tag(tag)
20
+ EventHandler = get_event_handler_cls_from_tag(tag)
21
21
  self._event_handler = EventHandler(tag)
22
22
 
23
23
 
24
24
  def start(self):
25
- """Start an observer to process newly created files in the chunks directory"""
25
+ """Start an observer to process newly created files in the batches directory"""
26
26
  self._observer.schedule(self._event_handler,
27
- get_chunks_dir_path(),
27
+ get_batches_dir_path(),
28
28
  recursive=True,
29
29
  event_filter=[FileCreatedEvent])
30
30
 
@@ -7,11 +7,12 @@ _LOGGER = getLogger(__name__)
7
7
 
8
8
  import numpy as np
9
9
  from typing import Tuple
10
+ from datetime import timedelta
10
11
 
11
12
  import os
12
13
 
13
14
  from spectre_core.capture_configs import CaptureConfig, PNames, CaptureModes
14
- from spectre_core.chunks import BaseChunk
15
+ from spectre_core.batches import BaseBatch
15
16
  from spectre_core.spectrograms import Spectrogram, time_average, frequency_average
16
17
  from .._base import BaseEventHandler, make_sft_instance
17
18
  from .._register import register_event_handler
@@ -51,16 +52,13 @@ def _do_stfft(iq_data: np.array,
51
52
  return times, frequencies, dynamic_spectra
52
53
 
53
54
 
54
- def _build_spectrogram(chunk: BaseChunk,
55
+ def _build_spectrogram(batch: BaseBatch,
55
56
  capture_config: CaptureConfig) -> Spectrogram:
56
- """Create a spectrogram by performing a Short Time FFT on the IQ samples for this chunk."""
57
+ """Create a spectrogram by performing a Short Time FFT on the IQ samples for this batch."""
57
58
 
58
- # read the data from the chunk
59
- millisecond_correction = chunk.read_file("hdr")
60
- iq_data = chunk.read_file("bin")
61
-
62
- # units conversion
63
- microsecond_correction = millisecond_correction * 1e3
59
+ # read the data from the batch
60
+ millisecond_correction = batch.read_file("hdr")
61
+ iq_data = batch.read_file("bin")
64
62
 
65
63
  times, frequencies, dynamic_spectra = _do_stfft(iq_data,
66
64
  capture_config)
@@ -70,12 +68,13 @@ def _build_spectrogram(chunk: BaseChunk,
70
68
  frequencies = np.array(frequencies, dtype = 'float32')
71
69
  dynamic_spectra = np.array(dynamic_spectra, dtype = 'float32')
72
70
 
73
- return Spectrogram(dynamic_spectra,
74
- times,
75
- frequencies,
76
- chunk.tag,
77
- chunk_start_time = chunk.chunk_start_time,
78
- microsecond_correction = microsecond_correction,
71
+ # compute the start datetime for the spectrogram by adding the millisecond component to the batch start time
72
+ spectrogram_start_datetime = batch.start_datetime + timedelta(milliseconds=millisecond_correction)
73
+ return Spectrogram(dynamic_spectra,
74
+ times,
75
+ frequencies,
76
+ batch.tag,
77
+ spectrogram_start_datetime,
79
78
  spectrum_type = "amplitude")
80
79
 
81
80
 
@@ -89,13 +88,13 @@ class _EventHandler(BaseEventHandler):
89
88
  _LOGGER.info(f"Processing: {absolute_file_path}")
90
89
  file_name = os.path.basename(absolute_file_path)
91
90
  base_file_name, _ = os.path.splitext(file_name)
92
- chunk_start_time, tag = base_file_name.split('_')
91
+ batch_start_time, tag = base_file_name.split('_')
93
92
 
94
- # create an instance of the current chunk being processed
95
- chunk = self._Chunk(chunk_start_time, tag)
93
+ # create an instance of the current batch being processed
94
+ batch = self._Batch(batch_start_time, tag)
96
95
 
97
96
  _LOGGER.info("Creating spectrogram")
98
- spectrogram = _build_spectrogram(chunk,
97
+ spectrogram = _build_spectrogram(batch,
99
98
  self._capture_config)
100
99
 
101
100
  spectrogram = time_average(spectrogram,
@@ -106,10 +105,10 @@ class _EventHandler(BaseEventHandler):
106
105
 
107
106
  self._cache_spectrogram(spectrogram)
108
107
 
109
- bin_chunk = chunk.get_file('bin')
110
- _LOGGER.info(f"Deleting {bin_chunk.file_path}")
111
- bin_chunk.delete()
108
+ bin_file = batch.get_file('bin')
109
+ _LOGGER.info(f"Deleting {bin_file.file_path}")
110
+ bin_file.delete()
112
111
 
113
- hdr_chunk = chunk.get_file('hdr')
114
- _LOGGER.info(f"Deleting {hdr_chunk.file_path}")
115
- hdr_chunk.delete()
112
+ hdr_file = batch.get_file('hdr')
113
+ _LOGGER.info(f"Deleting {hdr_file.file_path}")
114
+ hdr_file.delete()