spectre-core 0.0.11__py3-none-any.whl → 0.0.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. spectre_core/_file_io/file_handlers.py +12 -12
  2. spectre_core/batches/__init__.py +22 -0
  3. spectre_core/batches/_base.py +146 -0
  4. spectre_core/batches/_batches.py +197 -0
  5. spectre_core/batches/_factory.py +27 -0
  6. spectre_core/{chunks → batches}/_register.py +5 -5
  7. spectre_core/{chunks → batches}/library/_callisto.py +31 -33
  8. spectre_core/{chunks → batches}/library/_fixed_center_frequency.py +43 -38
  9. spectre_core/{chunks → batches}/library/_swept_center_frequency.py +22 -20
  10. spectre_core/capture_configs/_capture_templates.py +6 -6
  11. spectre_core/capture_configs/_parameters.py +3 -6
  12. spectre_core/capture_configs/_ptemplates.py +3 -3
  13. spectre_core/capture_configs/_pvalidators.py +4 -4
  14. spectre_core/config/__init__.py +2 -2
  15. spectre_core/config/_paths.py +5 -5
  16. spectre_core/config/_time_formats.py +5 -3
  17. spectre_core/exceptions.py +2 -2
  18. spectre_core/logging/_configure.py +1 -1
  19. spectre_core/logging/_log_handlers.py +1 -1
  20. spectre_core/plotting/_panels.py +1 -1
  21. spectre_core/post_processing/__init__.py +2 -2
  22. spectre_core/post_processing/_base.py +5 -5
  23. spectre_core/post_processing/_factory.py +3 -3
  24. spectre_core/post_processing/_post_processor.py +5 -5
  25. spectre_core/post_processing/library/_fixed_center_frequency.py +24 -25
  26. spectre_core/post_processing/library/_swept_center_frequency.py +68 -83
  27. spectre_core/receivers/gr/_base.py +1 -1
  28. spectre_core/receivers/gr/_rsp1a.py +3 -3
  29. spectre_core/receivers/gr/_rspduo.py +4 -4
  30. spectre_core/receivers/gr/_test.py +3 -3
  31. spectre_core/receivers/library/_test.py +3 -3
  32. spectre_core/spectrograms/_analytical.py +0 -6
  33. spectre_core/spectrograms/_spectrogram.py +113 -79
  34. spectre_core/spectrograms/_transform.py +19 -36
  35. spectre_core/wgetting/_callisto.py +20 -24
  36. {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/METADATA +1 -1
  37. spectre_core-0.0.12.dist-info/RECORD +64 -0
  38. spectre_core/chunks/__init__.py +0 -22
  39. spectre_core/chunks/_base.py +0 -116
  40. spectre_core/chunks/_chunks.py +0 -200
  41. spectre_core/chunks/_factory.py +0 -25
  42. spectre_core-0.0.11.dist-info/RECORD +0 -64
  43. {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/LICENSE +0 -0
  44. {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/WHEEL +0 -0
  45. {spectre_core-0.0.11.dist-info → spectre_core-0.0.12.dist-info}/top_level.txt +0 -0
@@ -16,8 +16,8 @@ from scipy.signal import ShortTimeFFT
16
16
  from spectre_core.spectrograms import Spectrogram, time_average, frequency_average
17
17
  from spectre_core.config import TimeFormats
18
18
  from spectre_core.capture_configs import CaptureConfig, PNames, CaptureModes
19
- from spectre_core.chunks import BaseChunk
20
- from spectre_core.chunks import SweepMetadata
19
+ from spectre_core.batches import BaseBatch
20
+ from spectre_core.batches import SweepMetadata
21
21
  from spectre_core.exceptions import InvalidSweepMetadataError
22
22
  from .._base import BaseEventHandler, make_sft_instance
23
23
  from .._register import register_event_handler
@@ -99,12 +99,12 @@ def _fill_stepped_dynamic_spectra(stepped_dynamic_spectra: np.ndarray,
99
99
 
100
100
  def _compute_num_max_slices_in_step(sft: ShortTimeFFT,
101
101
  num_samples: np.ndarray) -> int:
102
- """Compute the maximum number of slices over all steps, in all sweeps over the chunk."""
102
+ """Compute the maximum number of slices over all steps, in all sweeps over the batch."""
103
103
  return sft.upper_border_begin(np.max(num_samples))[1]
104
104
 
105
105
 
106
106
  def _compute_num_full_sweeps(center_frequencies: np.ndarray) -> int:
107
- """Compute the total number of full sweeps over the chunk.
107
+ """Compute the total number of full sweeps over the batch.
108
108
 
109
109
  Since the number of each samples in each step is variable, we only know a sweep is complete
110
110
  when there is a sweep after it. So we can define the total number of *full* sweeps as the number of
@@ -192,23 +192,12 @@ def _do_stfft(iq_data: np.ndarray,
192
192
  return times, frequencies, dynamic_spectra
193
193
 
194
194
 
195
- def _correct_timing(chunk_start_datetime: datetime,
196
- millisecond_correction: int,
197
- num_samples_prepended: int,
198
- sample_rate: int):
199
- """Correct the start time for this chunk based on the number of samples we prepended reconstructing the initial sweep."""
200
- sample_interval = (1 / sample_rate)
201
- elapsed_time = num_samples_prepended * sample_interval
202
- corrected_datetime = chunk_start_datetime + timedelta(milliseconds = millisecond_correction) - timedelta(seconds = float(elapsed_time))
203
- return corrected_datetime.strftime(TimeFormats.DATETIME), corrected_datetime.microsecond * 1e-3
204
-
205
-
206
195
  def _prepend_num_samples(carryover_num_samples: np.ndarray,
207
196
  num_samples: np.ndarray,
208
- final_step_spans_two_chunks: bool) -> np.ndarray:
209
- """Prepend the number of samples from the final sweep of the previous chunk."""
210
- if final_step_spans_two_chunks:
211
- # ensure the number of samples from the final step in the previous chunk are accounted for
197
+ final_step_spans_two_batches: bool) -> np.ndarray:
198
+ """Prepend the number of samples from the final sweep of the previous batch."""
199
+ if final_step_spans_two_batches:
200
+ # ensure the number of samples from the final step in the previous batch are accounted for
212
201
  num_samples[0] += carryover_num_samples[-1]
213
202
  # and truncate as required
214
203
  carryover_num_samples = carryover_num_samples[:-1]
@@ -217,11 +206,11 @@ def _prepend_num_samples(carryover_num_samples: np.ndarray,
217
206
 
218
207
  def _prepend_center_frequencies(carryover_center_frequencies: np.ndarray,
219
208
  center_frequencies: np.ndarray,
220
- final_step_spans_two_chunks: bool)-> np.ndarray:
221
- """Prepend the center frequencies from the final sweep of the previous chunk."""
222
- # in the case that the sweep has bled across chunks,
209
+ final_step_spans_two_batches: bool)-> np.ndarray:
210
+ """Prepend the center frequencies from the final sweep of the previous batch."""
211
+ # in the case that the sweep has bled across batches,
223
212
  # do not permit identical neighbours in the center frequency array
224
- if final_step_spans_two_chunks:
213
+ if final_step_spans_two_batches:
225
214
  # truncate the final frequency to prepend (as it already exists in the array we are appending to in this case)
226
215
  carryover_center_frequencies = carryover_center_frequencies[:-1]
227
216
  return np.concatenate((carryover_center_frequencies, center_frequencies))
@@ -229,16 +218,16 @@ def _prepend_center_frequencies(carryover_center_frequencies: np.ndarray,
229
218
 
230
219
  def _prepend_iq_data(carryover_iq_data: np.ndarray,
231
220
  iq_data: np.ndarray) -> np.ndarray:
232
- """Prepend the IQ samples from the final sweep of the previous chunk."""
221
+ """Prepend the IQ samples from the final sweep of the previous batch."""
233
222
  return np.concatenate((carryover_iq_data, iq_data))
234
223
 
235
224
 
236
- def _get_final_sweep(previous_chunk: BaseChunk
225
+ def _get_final_sweep(previous_batch: BaseBatch
237
226
  ) -> Tuple[np.ndarray, SweepMetadata]:
238
- """Get data from the final sweep of the previous chunk."""
239
- # unpack the data from the previous chunk
240
- previous_iq_data = previous_chunk.read_file("bin")
241
- _, previous_sweep_metadata = previous_chunk.read_file("hdr")
227
+ """Get data from the final sweep of the previous batch."""
228
+ # unpack the data from the previous batch
229
+ previous_iq_data = previous_batch.read_file("bin")
230
+ _, previous_sweep_metadata = previous_batch.read_file("hdr")
242
231
  # find the step index from the last sweep
243
232
  # [0] since the return of np.where is a 1 element Tuple,
244
233
  # containing a list of step indices corresponding to the smallest center frequencies
@@ -258,58 +247,55 @@ def _get_final_sweep(previous_chunk: BaseChunk
258
247
  return final_sweep_iq_data, SweepMetadata(final_center_frequencies, final_num_samples)
259
248
 
260
249
 
261
- def _reconstruct_initial_sweep(previous_chunk: BaseChunk,
250
+ def _reconstruct_initial_sweep(previous_batch: BaseBatch,
262
251
  iq_data: np.ndarray,
263
252
  sweep_metadata: SweepMetadata) -> Tuple[np.ndarray, SweepMetadata, int]:
264
- """Reconstruct the initial sweep of the current chunk, using data from the previous chunk."""
253
+ """Reconstruct the initial sweep of the current batch, using data from the previous batch."""
265
254
 
266
- # carryover the final sweep of the previous chunk, and prepend that data to the current chunk data
267
- carryover_iq_data, carryover_sweep_metadata = _get_final_sweep(previous_chunk)
255
+ # carryover the final sweep of the previous batch, and prepend that data to the current batch data
256
+ carryover_iq_data, carryover_sweep_metadata = _get_final_sweep(previous_batch)
268
257
 
269
- # prepend the iq data that was carried over from the previous chunk
258
+ # prepend the iq data that was carried over from the previous batch
270
259
  iq_data = _prepend_iq_data(carryover_iq_data,
271
260
  iq_data)
272
261
 
273
- # prepend the sweep metadata from the previous chunk
274
- final_step_spans_two_chunks = carryover_sweep_metadata.center_frequencies[-1] == sweep_metadata.center_frequencies[0]
262
+ # prepend the sweep metadata from the previous batch
263
+ final_step_spans_two_batches = carryover_sweep_metadata.center_frequencies[-1] == sweep_metadata.center_frequencies[0]
275
264
  center_frequencies = _prepend_center_frequencies(carryover_sweep_metadata.center_frequencies,
276
265
  sweep_metadata.center_frequencies,
277
- final_step_spans_two_chunks)
266
+ final_step_spans_two_batches)
278
267
  num_samples = _prepend_num_samples(carryover_sweep_metadata.num_samples,
279
268
  sweep_metadata.num_samples,
280
- final_step_spans_two_chunks)
269
+ final_step_spans_two_batches)
281
270
 
282
271
  # keep track of how many samples we prepended (required to adjust timing later)
283
272
  num_samples_prepended = np.sum(carryover_sweep_metadata.num_samples)
284
273
  return iq_data, SweepMetadata(center_frequencies, num_samples), num_samples_prepended
285
274
 
286
275
 
287
- def _build_spectrogram(chunk: BaseChunk,
276
+ def _build_spectrogram(batch: BaseBatch,
288
277
  capture_config: CaptureConfig,
289
- previous_chunk: Optional[BaseChunk] = None) -> Spectrogram:
290
- """Create a spectrogram by performing a Short Time FFT on the (swept) IQ samples for this chunk."""
291
- iq_data = chunk.read_file("bin")
292
- millisecond_correction, sweep_metadata = chunk.read_file("hdr")
293
-
294
- # if a previous chunk has been specified, this indicates that the initial sweep spans
295
- # between two adjacent batched files.
296
- if previous_chunk:
297
- # If this is the case, first reconstruct the initial sweep of the current chunk
298
- # by prepending the final sweep of the previous chunk
299
- iq_data, sweep_metadata, num_samples_prepended = _reconstruct_initial_sweep(previous_chunk,
278
+ previous_batch: Optional[BaseBatch] = None) -> Spectrogram:
279
+ """Create a spectrogram by performing a Short Time FFT on the (swept) IQ samples for this batch."""
280
+ iq_data = batch.read_file("bin")
281
+ millisecond_correction, sweep_metadata = batch.read_file("hdr")
282
+
283
+ # correct the batch start datetime with the millisecond correction stored in the detached header
284
+ spectrogram_start_datetime = batch.start_datetime + timedelta(milliseconds=millisecond_correction)
285
+
286
+ # if a previous batch has been specified, this indicates that the initial sweep spans between two adjacent batched files.
287
+ if previous_batch:
288
+ # If this is the case, first reconstruct the initial sweep of the current batch
289
+ # by prepending the final sweep of the previous batch
290
+ iq_data, sweep_metadata, num_samples_prepended = _reconstruct_initial_sweep(previous_batch,
300
291
  iq_data,
301
292
  sweep_metadata)
302
- # since we have prepended extra samples, we need to correct the chunk start time
303
- # appropriately
304
- chunk_start_time, millisecond_correction = _correct_timing(chunk.chunk_start_datetime,
305
- millisecond_correction,
306
- num_samples_prepended,
307
- capture_config.get_parameter_value(PNames.SAMPLE_RATE))
308
- # otherwise, no action is required
309
- else:
310
- chunk_start_time = chunk.chunk_start_time
311
-
312
- microsecond_correction = millisecond_correction * 1e3
293
+
294
+ # since we have prepended extra samples, we need to correct the spectrogram start time appropriately
295
+ elapsed_time = num_samples_prepended * (1 / capture_config.get_parameter_value(PNames.SAMPLE_RATE))
296
+ spectrogram_start_datetime -= timedelta(seconds = float(elapsed_time))
297
+
298
+
313
299
 
314
300
  times, frequencies, dynamic_spectra = _do_stfft(iq_data,
315
301
  sweep_metadata,
@@ -318,9 +304,8 @@ def _build_spectrogram(chunk: BaseChunk,
318
304
  return Spectrogram(dynamic_spectra,
319
305
  times,
320
306
  frequencies,
321
- chunk.tag,
322
- chunk_start_time,
323
- microsecond_correction,
307
+ batch.tag,
308
+ spectrogram_start_datetime,
324
309
  spectrum_type = "amplitude")
325
310
 
326
311
 
@@ -329,10 +314,10 @@ class _EventHandler(BaseEventHandler):
329
314
  def __init__(self, *args, **kwargs):
330
315
  super().__init__(*args, **kwargs)
331
316
 
332
- # the previous chunk is stored in order to fetch the
317
+ # the previous batch is stored in order to fetch the
333
318
  # data from the "final sweep" which was ignored during
334
319
  # processing.
335
- self._previous_chunk: BaseChunk = None
320
+ self._previous_batch: BaseBatch = None
336
321
 
337
322
 
338
323
  def process(self,
@@ -341,8 +326,8 @@ class _EventHandler(BaseEventHandler):
341
326
  file_name = os.path.basename(absolute_file_path)
342
327
  # discard the extension
343
328
  base_file_name, _ = os.path.splitext(file_name)
344
- chunk_start_time, tag = base_file_name.split('_')
345
- chunk = self._Chunk(chunk_start_time, tag)
329
+ batch_start_time, tag = base_file_name.split('_')
330
+ batch = self._Batch(batch_start_time, tag)
346
331
 
347
332
  # ensure that the file which has been created has the expected tag
348
333
  if tag != self._tag:
@@ -350,9 +335,9 @@ class _EventHandler(BaseEventHandler):
350
335
  f"but a file has been created with tag '{tag}'")
351
336
 
352
337
  _LOGGER.info("Creating spectrogram")
353
- spectrogram = _build_spectrogram(chunk,
338
+ spectrogram = _build_spectrogram(batch,
354
339
  self._capture_config,
355
- previous_chunk = self._previous_chunk)
340
+ previous_batch = self._previous_batch)
356
341
 
357
342
  spectrogram = time_average(spectrogram,
358
343
  resolution = self._capture_config.get_parameter_value(PNames.TIME_RESOLUTION))
@@ -362,21 +347,21 @@ class _EventHandler(BaseEventHandler):
362
347
 
363
348
  self._cache_spectrogram(spectrogram)
364
349
 
365
- # if the previous chunk has not yet been set, it means we are processing the first chunk
366
- # so we don't need to handle the previous chunk
367
- if self._previous_chunk is None:
350
+ # if the previous batch has not yet been set, it means we are processing the first batch
351
+ # so we don't need to handle the previous batch
352
+ if self._previous_batch is None:
368
353
  # instead, only set it for the next time this method is called
369
- self._previous_chunk = chunk
354
+ self._previous_batch = batch
370
355
 
371
- # otherwise the previous chunk is defined (and by this point has already been processed)
356
+ # otherwise the previous batch is defined (and by this point has already been processed)
372
357
  else:
373
- bin_chunk = self._previous_chunk.get_file('bin')
374
- _LOGGER.info(f"Deleting {bin_chunk.file_path}")
375
- bin_chunk.delete()
358
+ bin_file = self._previous_batch.get_file('bin')
359
+ _LOGGER.info(f"Deleting {bin_file.file_path}")
360
+ bin_file.delete()
376
361
 
377
- hdr_chunk = self._previous_chunk.get_file('hdr')
378
- _LOGGER.info(f"Deleting {hdr_chunk.file_path}")
379
- hdr_chunk.delete()
362
+ hdr_file = self._previous_batch.get_file('hdr')
363
+ _LOGGER.info(f"Deleting {hdr_file.file_path}")
364
+ hdr_file.delete()
380
365
 
381
- # and reassign the current chunk to be used as the previous chunk at the next call of this method
382
- self._previous_chunk = chunk
366
+ # and reassign the current batch to be used as the previous batch at the next call of this method
367
+ self._previous_batch = batch
@@ -9,7 +9,7 @@ from gnuradio import gr
9
9
  from gnuradio import spectre
10
10
 
11
11
  from spectre_core.capture_configs import Parameters, PNames
12
- from spectre_core.config import get_chunks_dir_path
12
+ from spectre_core.config import get_batches_dir_path
13
13
 
14
14
  from spectre_core.capture_configs import Parameters
15
15
 
@@ -24,7 +24,7 @@ from gnuradio import spectre
24
24
  from gnuradio import sdrplay3
25
25
 
26
26
  from spectre_core.capture_configs import Parameters, PNames
27
- from spectre_core.config import get_chunks_dir_path
27
+ from spectre_core.config import get_batches_dir_path
28
28
  from ._base import capture
29
29
 
30
30
  class _fixed_center_frequency(gr.top_block):
@@ -47,7 +47,7 @@ class _fixed_center_frequency(gr.top_block):
47
47
  ##################################################
48
48
  # Blocks
49
49
  ##################################################
50
- self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_chunks_dir_path(),
50
+ self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_batches_dir_path(),
51
51
  tag,
52
52
  batch_size,
53
53
  sample_rate)
@@ -111,7 +111,7 @@ class _swept_center_frequency(gr.top_block):
111
111
  sample_rate,
112
112
  samples_per_step,
113
113
  'freq')
114
- self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_chunks_dir_path(),
114
+ self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_batches_dir_path(),
115
115
  tag,
116
116
  batch_size,
117
117
  sample_rate,
@@ -24,7 +24,7 @@ from gnuradio import spectre
24
24
  from gnuradio import sdrplay3
25
25
 
26
26
  from spectre_core.capture_configs import Parameters, PNames
27
- from spectre_core.config import get_chunks_dir_path
27
+ from spectre_core.config import get_batches_dir_path
28
28
  from ._base import capture
29
29
 
30
30
 
@@ -48,7 +48,7 @@ class _tuner_1_fixed_center_frequency(gr.top_block):
48
48
  ##################################################
49
49
  # Blocks
50
50
  ##################################################
51
- self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_chunks_dir_path(),
51
+ self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_batches_dir_path(),
52
52
  tag,
53
53
  batch_size,
54
54
  sample_rate)
@@ -107,7 +107,7 @@ class _tuner_2_fixed_center_frequency(gr.top_block):
107
107
  ##################################################
108
108
  # Blocks
109
109
  ##################################################
110
- self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_chunks_dir_path(),
110
+ self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_batches_dir_path(),
111
111
  tag,
112
112
  batch_size,
113
113
  sample_rate)
@@ -175,7 +175,7 @@ class _tuner_1_swept_center_frequency(gr.top_block):
175
175
  sample_rate,
176
176
  samples_per_step,
177
177
  'freq')
178
- self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_chunks_dir_path(),
178
+ self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_batches_dir_path(),
179
179
  tag,
180
180
  batch_size,
181
181
  sample_rate,
@@ -25,7 +25,7 @@ from gnuradio import spectre
25
25
  from gnuradio import analog
26
26
 
27
27
  from spectre_core.capture_configs import Parameters, PNames
28
- from spectre_core.config import get_chunks_dir_path
28
+ from spectre_core.config import get_batches_dir_path
29
29
  from ._base import capture
30
30
 
31
31
 
@@ -46,7 +46,7 @@ class _cosine_signal_1(gr.top_block):
46
46
  ##################################################
47
47
  # Blocks
48
48
  ##################################################
49
- self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_chunks_dir_path(),
49
+ self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_batches_dir_path(),
50
50
  tag,
51
51
  batch_size,
52
52
  samp_rate)
@@ -100,7 +100,7 @@ class _tagged_staircase(gr.top_block):
100
100
  frequency_step,
101
101
  step_increment,
102
102
  samp_rate)
103
- self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_chunks_dir_path(),
103
+ self.spectre_batched_file_sink_0 = spectre.batched_file_sink(get_batches_dir_path(),
104
104
  tag,
105
105
  batch_size,
106
106
  samp_rate,
@@ -123,7 +123,7 @@ class _Receiver(BaseReceiver):
123
123
  PNames.WINDOW_HOP,
124
124
  PNames.WINDOW_SIZE,
125
125
  PNames.EVENT_HANDLER_KEY,
126
- PNames.CHUNK_KEY,
126
+ PNames.BATCH_KEY,
127
127
  PNames.WATCH_EXTENSION,
128
128
  PNames.MIN_SAMPLES_PER_STEP,
129
129
  PNames.MAX_SAMPLES_PER_STEP,
@@ -152,7 +152,7 @@ class _Receiver(BaseReceiver):
152
152
  (PNames.WINDOW_SIZE, 512),
153
153
  (PNames.WINDOW_TYPE, "boxcar"),
154
154
  (PNames.EVENT_HANDLER_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
155
- (PNames.CHUNK_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
155
+ (PNames.BATCH_KEY, CaptureModes.SWEPT_CENTER_FREQUENCY),
156
156
  (PNames.WATCH_EXTENSION, "bin")
157
157
  )
158
158
 
@@ -166,7 +166,7 @@ class _Receiver(BaseReceiver):
166
166
  PNames.FREQUENCY_RESOLUTION,
167
167
  PNames.WINDOW_TYPE,
168
168
  PNames.EVENT_HANDLER_KEY,
169
- PNames.CHUNK_KEY,
169
+ PNames.BATCH_KEY,
170
170
  PNames.WATCH_EXTENSION
171
171
  )
172
172
 
@@ -12,12 +12,6 @@ from spectre_core.exceptions import ModeNotFoundError
12
12
  from ._spectrogram import Spectrogram
13
13
  from ._array_operations import is_close
14
14
 
15
- __all__ = [
16
- "get_analytical_spectrogram",
17
- "validate_analytically",
18
- "TestResults"
19
- ]
20
-
21
15
  @dataclass
22
16
  class TestResults:
23
17
  # Whether the times array matches analytically