mt-metadata 0.3.9__py2.py3-none-any.whl → 0.4.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mt-metadata might be problematic. Click here for more details.

Files changed (95) hide show
  1. mt_metadata/__init__.py +1 -1
  2. mt_metadata/base/helpers.py +84 -9
  3. mt_metadata/base/metadata.py +137 -65
  4. mt_metadata/features/__init__.py +14 -0
  5. mt_metadata/features/coherence.py +303 -0
  6. mt_metadata/features/cross_powers.py +29 -0
  7. mt_metadata/features/fc_coherence.py +81 -0
  8. mt_metadata/features/feature.py +72 -0
  9. mt_metadata/features/feature_decimation_channel.py +26 -0
  10. mt_metadata/features/feature_fc.py +24 -0
  11. mt_metadata/{transfer_functions/processing/aurora/decimation.py → features/feature_fc_run.py} +9 -4
  12. mt_metadata/features/feature_ts.py +24 -0
  13. mt_metadata/{transfer_functions/processing/aurora/window.py → features/feature_ts_run.py} +11 -18
  14. mt_metadata/features/standards/__init__.py +6 -0
  15. mt_metadata/features/standards/base_feature.json +46 -0
  16. mt_metadata/features/standards/coherence.json +57 -0
  17. mt_metadata/features/standards/fc_coherence.json +57 -0
  18. mt_metadata/features/standards/feature_decimation_channel.json +68 -0
  19. mt_metadata/features/standards/feature_fc_run.json +35 -0
  20. mt_metadata/features/standards/feature_ts_run.json +35 -0
  21. mt_metadata/features/standards/feature_weighting_window.json +46 -0
  22. mt_metadata/features/standards/weight_kernel.json +46 -0
  23. mt_metadata/features/standards/weights.json +101 -0
  24. mt_metadata/features/test_helpers/channel_weight_specs_example.json +156 -0
  25. mt_metadata/features/weights/__init__.py +0 -0
  26. mt_metadata/features/weights/base.py +44 -0
  27. mt_metadata/features/weights/channel_weight_spec.py +209 -0
  28. mt_metadata/features/weights/feature_weight_spec.py +194 -0
  29. mt_metadata/features/weights/monotonic_weight_kernel.py +275 -0
  30. mt_metadata/features/weights/standards/__init__.py +6 -0
  31. mt_metadata/features/weights/standards/activation_monotonic_weight_kernel.json +38 -0
  32. mt_metadata/features/weights/standards/base.json +36 -0
  33. mt_metadata/features/weights/standards/channel_weight_spec.json +35 -0
  34. mt_metadata/features/weights/standards/composite.json +36 -0
  35. mt_metadata/features/weights/standards/feature_weight_spec.json +13 -0
  36. mt_metadata/features/weights/standards/monotonic_weight_kernel.json +49 -0
  37. mt_metadata/features/weights/standards/taper_monotonic_weight_kernel.json +16 -0
  38. mt_metadata/features/weights/taper_weight_kernel.py +60 -0
  39. mt_metadata/helper_functions.py +69 -0
  40. mt_metadata/timeseries/filters/channel_response.py +77 -37
  41. mt_metadata/timeseries/filters/coefficient_filter.py +6 -5
  42. mt_metadata/timeseries/filters/filter_base.py +11 -15
  43. mt_metadata/timeseries/filters/fir_filter.py +8 -1
  44. mt_metadata/timeseries/filters/frequency_response_table_filter.py +26 -11
  45. mt_metadata/timeseries/filters/helper_functions.py +0 -2
  46. mt_metadata/timeseries/filters/obspy_stages.py +4 -1
  47. mt_metadata/timeseries/filters/pole_zero_filter.py +9 -5
  48. mt_metadata/timeseries/filters/time_delay_filter.py +8 -1
  49. mt_metadata/timeseries/location.py +20 -5
  50. mt_metadata/timeseries/person.py +14 -7
  51. mt_metadata/timeseries/standards/person.json +1 -1
  52. mt_metadata/timeseries/standards/run.json +2 -2
  53. mt_metadata/timeseries/station.py +4 -2
  54. mt_metadata/timeseries/stationxml/__init__.py +5 -0
  55. mt_metadata/timeseries/stationxml/xml_channel_mt_channel.py +25 -27
  56. mt_metadata/timeseries/stationxml/xml_inventory_mt_experiment.py +16 -47
  57. mt_metadata/timeseries/stationxml/xml_station_mt_station.py +25 -24
  58. mt_metadata/transfer_functions/__init__.py +3 -0
  59. mt_metadata/transfer_functions/core.py +8 -11
  60. mt_metadata/transfer_functions/io/emtfxml/metadata/location.py +5 -0
  61. mt_metadata/transfer_functions/io/emtfxml/metadata/provenance.py +14 -3
  62. mt_metadata/transfer_functions/io/tools.py +2 -0
  63. mt_metadata/transfer_functions/io/zonge/metadata/header.py +1 -1
  64. mt_metadata/transfer_functions/io/zonge/metadata/standards/header.json +1 -1
  65. mt_metadata/transfer_functions/io/zonge/metadata/standards/job.json +2 -2
  66. mt_metadata/transfer_functions/io/zonge/zonge.py +19 -23
  67. mt_metadata/transfer_functions/processing/__init__.py +2 -1
  68. mt_metadata/transfer_functions/processing/aurora/__init__.py +2 -4
  69. mt_metadata/transfer_functions/processing/aurora/band.py +46 -125
  70. mt_metadata/transfer_functions/processing/aurora/channel_nomenclature.py +27 -20
  71. mt_metadata/transfer_functions/processing/aurora/decimation_level.py +324 -152
  72. mt_metadata/transfer_functions/processing/aurora/frequency_bands.py +230 -0
  73. mt_metadata/transfer_functions/processing/aurora/processing.py +3 -3
  74. mt_metadata/transfer_functions/processing/aurora/run.py +32 -7
  75. mt_metadata/transfer_functions/processing/aurora/standards/decimation_level.json +7 -73
  76. mt_metadata/transfer_functions/processing/aurora/stations.py +33 -4
  77. mt_metadata/transfer_functions/processing/fourier_coefficients/decimation.py +176 -178
  78. mt_metadata/transfer_functions/processing/fourier_coefficients/fc.py +11 -9
  79. mt_metadata/transfer_functions/processing/fourier_coefficients/standards/decimation.json +1 -111
  80. mt_metadata/transfer_functions/processing/short_time_fourier_transform.py +64 -0
  81. mt_metadata/transfer_functions/processing/standards/__init__.py +6 -0
  82. mt_metadata/transfer_functions/processing/standards/short_time_fourier_transform.json +94 -0
  83. mt_metadata/transfer_functions/processing/{aurora/standards/decimation.json → standards/time_series_decimation.json} +17 -6
  84. mt_metadata/transfer_functions/processing/{aurora/standards → standards}/window.json +13 -2
  85. mt_metadata/transfer_functions/processing/time_series_decimation.py +50 -0
  86. mt_metadata/transfer_functions/processing/window.py +118 -0
  87. mt_metadata/transfer_functions/tf/station.py +17 -1
  88. mt_metadata/utils/mttime.py +22 -3
  89. mt_metadata/utils/validators.py +4 -2
  90. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/METADATA +39 -15
  91. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/RECORD +95 -55
  92. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/WHEEL +1 -1
  93. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/AUTHORS.rst +0 -0
  94. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/LICENSE +0 -0
  95. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/top_level.txt +0 -0
@@ -1,45 +1,84 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  """
3
- Created on Fri Feb 25 15:20:59 2022
3
+ This module contains the Decimation Metadata class. This class interacts with a decimation JSON.
4
+ It contains the metadata to specify a transformation from time series to a Spectrogram, including
5
+ cascadng decimation info.
4
6
 
5
- @author: jpeacock
7
+ There are two main use cases for this class. On the one hand, this can be used to specify a
8
+ set of processing parameters to create an FCDecimation, which can then be stored in an MTH5 archive.
9
+ On the other hand, this metadata gets stored along with Spectrograms in an MTH5 archive and can
10
+ be used to access the parameters associated with the spectrograms creation.
11
+
12
+ TODO: Consider renaming this class to FCDecmiation, to contrast with other Decimation objects,
13
+ or FCDecimationLevel to make it
14
+ Also see notes in mt_metadata issue 235.
15
+
16
+ Created on Fri Feb 25 15:20:59 2022
17
+
18
+ @author: jpeacock
6
19
  """
7
20
  # =============================================================================
8
21
  # Imports
9
22
  # =============================================================================
10
23
  from collections import OrderedDict
24
+ from loguru import logger
25
+ from typing import List, Optional
26
+
27
+ from .standards import SCHEMA_FN_PATHS
11
28
  from mt_metadata.base.helpers import write_lines
12
29
  from mt_metadata.base import get_schema, Base
13
30
  from mt_metadata.timeseries import TimePeriod
14
- from mt_metadata.transfer_functions.processing.aurora import Window
15
- from mt_metadata.transfer_functions.processing.aurora.decimation_level import get_fft_harmonics
31
+ from mt_metadata.transfer_functions.processing.short_time_fourier_transform import ShortTimeFourierTransform
32
+ from mt_metadata.transfer_functions.processing.time_series_decimation import TimeSeriesDecimation
33
+ # from mt_metadata.transfer_functions.processing.aurora.decimation_level import DecimationLevel as AuroraDecimationLevel
16
34
  from mt_metadata.transfer_functions.processing.fourier_coefficients import (
17
- Channel,
35
+ Channel as FCChannel
18
36
  )
19
- from .standards import SCHEMA_FN_PATHS
20
37
  from mt_metadata.utils.list_dict import ListDict
21
38
 
39
+ import numpy as np
22
40
  # =============================================================================
23
41
  attr_dict = get_schema("decimation", SCHEMA_FN_PATHS)
24
42
  attr_dict.add_dict(TimePeriod()._attr_dict, "time_period")
25
- attr_dict.add_dict(Window()._attr_dict, "window")
43
+ attr_dict.add_dict(ShortTimeFourierTransform()._attr_dict, "short_time_fourier_transform")
44
+ attr_dict.add_dict(TimeSeriesDecimation()._attr_dict, "time_series_decimation")
26
45
 
27
46
  # =============================================================================
28
47
  class Decimation(Base):
48
+ """
49
+ TODO: the name of this class could be changed to something more appropriate.
50
+ TODO: consider adding an attr decimation to access TimeSeriesDecimation more briefly.
51
+ """
29
52
  __doc__ = write_lines(attr_dict)
30
53
 
31
54
  def __init__(self, **kwargs):
32
- self.window = Window()
55
+ """
56
+ Constructor.
57
+
58
+ :param kwargs: TODO: add doc here
59
+ """
33
60
  self.time_period = TimePeriod()
34
61
  self.channels = ListDict()
62
+ self.time_series_decimation = TimeSeriesDecimation()
63
+ self.short_time_fourier_transform = ShortTimeFourierTransform()
64
+
35
65
  super().__init__(attr_dict=attr_dict, **kwargs)
36
- # if self.decimation_level == 0:
37
- # self.anti_alias_filter = None
38
66
 
39
- def __len__(self):
67
+ if self.short_time_fourier_transform.per_window_detrend_type:
68
+ msg = f"per_window_detrend_type was set to {self.short_time_fourier_transform.per_window_detrend_type}"
69
+ msg += "however, this is not supported -- setting to empty string"
70
+ logger.debug(msg)
71
+ self.short_time_fourier_transform.per_window_detrend_type = ""
72
+
73
+ def __len__(self) -> int:
40
74
  return len(self.channels)
41
75
 
42
76
  def __add__(self, other):
77
+ """
78
+
79
+ :param other:
80
+ :return:
81
+ """
43
82
  if isinstance(other, Decimation):
44
83
  self.channels.extend(other.channels)
45
84
 
@@ -49,6 +88,21 @@ class Decimation(Base):
49
88
  self.logger.error(msg)
50
89
  raise TypeError(msg)
51
90
 
91
+ #----- Begin (Possibly Temporary) methods for integrating TimeSeriesDecimation, STFT Classes -----#
92
+
93
+ @property
94
+ def decimation(self) -> TimeSeriesDecimation:
95
+ """
96
+ Passthrough method to access self.time_series_decimation
97
+ """
98
+ return self.time_series_decimation
99
+
100
+ @property
101
+ def stft(self):
102
+ return self.short_time_fourier_transform
103
+
104
+ #----- End (Possibly Temporary) methods for integrating TimeSeriesDecimation, STFT Classes -----#
105
+
52
106
  def update(self, other, match=[]):
53
107
  """
54
108
  Update attribute values from another like element, skipping None
@@ -93,8 +147,8 @@ class Decimation(Base):
93
147
  self.add_channel(ch)
94
148
 
95
149
  @property
96
- def channels_estimated(self):
97
- """channels for fcs were estimated"""
150
+ def channels_estimated(self) -> list:
151
+ """channels for which fcs were estimated"""
98
152
  return [
99
153
  ch.component
100
154
  for ch in self.channels.values()
@@ -102,7 +156,7 @@ class Decimation(Base):
102
156
  ]
103
157
 
104
158
  @channels_estimated.setter
105
- def channels_estimated(self, value):
159
+ def channels_estimated(self, value) -> None:
106
160
  """set channels esimated"""
107
161
 
108
162
  if value is None:
@@ -113,17 +167,17 @@ class Decimation(Base):
113
167
 
114
168
  for entry in value:
115
169
  if isinstance(entry, str):
116
- self.add_channel(Channel(component=entry))
170
+ self.add_channel(FCChannel(component=entry))
117
171
  elif entry is None:
118
172
  continue
119
- elif isinstance(entry, Channel):
173
+ elif isinstance(entry, FCChannel):
120
174
  self.add_channel(entry)
121
175
  else:
122
176
  msg = f"entry must be a string or type FCChannel not {type(entry)}"
123
177
  self.logger.error(msg)
124
178
  raise ValueError(msg)
125
179
 
126
- def has_channel(self, component):
180
+ def has_channel(self, component: str) -> bool:
127
181
  """
128
182
  Check to see if the channel already exists
129
183
 
@@ -144,15 +198,14 @@ class Decimation(Base):
144
198
  """
145
199
  if self.has_channel(component):
146
200
  return self.channels_estimated.index(component)
147
- return None
148
201
 
149
- def get_channel(self, component):
202
+ def get_channel(self, component: str) -> FCChannel:
150
203
  """
151
204
  Get a channel
152
205
 
153
206
  :param component: channel component to look for
154
207
  :type component: string
155
- :return: channel object based on channel type
208
+ :return: FCChannel object based on channel type
156
209
  :rtype: :class:`mt_metadata.timeseries.Channel`
157
210
 
158
211
  """
@@ -160,7 +213,7 @@ class Decimation(Base):
160
213
  if self.has_channel(component):
161
214
  return self.channels[component]
162
215
 
163
- def add_channel(self, channel_obj):
216
+ def add_channel(self, channel_obj: FCChannel) -> None:
164
217
  """
165
218
  Add a channel to the list, check if one exists if it does overwrite it
166
219
 
@@ -168,8 +221,8 @@ class Decimation(Base):
168
221
  :type channel_obj: :class:`mt_metadata.transfer_functions.processing.fourier_coefficients.Channel`
169
222
 
170
223
  """
171
- if not isinstance(channel_obj, (Channel)):
172
- msg = f"Input must be metadata.Channel not {type(channel_obj)}"
224
+ if not isinstance(channel_obj, (FCChannel)):
225
+ msg = f"Input must be metadata FCChannel not {type(channel_obj)}"
173
226
  self.logger.error(msg)
174
227
  raise ValueError(msg)
175
228
 
@@ -227,7 +280,7 @@ class Decimation(Base):
227
280
 
228
281
  for ii, channel in enumerate(value_list):
229
282
  try:
230
- ch = Channel()
283
+ ch = FCChannel()
231
284
  if hasattr(channel, "to_dict"):
232
285
  channel = channel.to_dict()
233
286
  ch.from_dict(channel)
@@ -268,26 +321,21 @@ class Decimation(Base):
268
321
  if self.time_period.end < max(end):
269
322
  self.time_period.end = max(end)
270
323
 
271
- # Workarounds for pass-through usage of same decimation as aurora
272
- @property
273
- def factor(self):
274
- return self.decimation_factor
275
324
 
276
- @property
277
- def sample_rate(self):
278
- return self.sample_rate_decimation
325
+ def is_valid_for_time_series_length(self, n_samples_ts: int) -> bool:
326
+ """
327
+ Given a time series of len n_samples_ts, checks if there are sufficient samples to STFT.
279
328
 
280
- def is_valid_for_time_series_length(self, n_samples_ts):
281
- """Given a time series of len n_samples_ts, are there sufficient samples to STFT"""
329
+ """
282
330
  required_num_samples = (
283
- self.window.num_samples
284
- + (self.min_num_stft_windows - 1) * self.window.num_samples_advance
331
+ self.stft.window.num_samples
332
+ + (self.stft.min_num_stft_windows - 1) * self.stft.window.num_samples_advance
285
333
  )
286
334
  if n_samples_ts < required_num_samples:
287
335
  msg = (
288
336
  f"{n_samples_ts} not enough samples for minimum of "
289
- f"{self.min_num_stft_windows} stft windows of length "
290
- f"{self.window.num_samples} and overlap {self.window.overlap}"
337
+ f"{self.stft.min_num_stft_windows} stft windows of length "
338
+ f"{self.stft.window.num_samples} and overlap {self.stft.window.overlap}"
291
339
  )
292
340
  self.logger.warning(msg)
293
341
  return False
@@ -295,158 +343,108 @@ class Decimation(Base):
295
343
  return True
296
344
 
297
345
  @property
298
- def fft_frequencies(self):
299
- return get_fft_harmonics(self.window.num_samples, self.sample_rate)
300
-
301
-
302
- def has_fcs_for_aurora_processing(self, decimation_level, remote):
303
- """
304
-
305
- Parameters
306
- ----------
307
- decimation_level: mt_metadata.transfer_functions.processing.aurora.decimation_level.DecimationLevel
308
- remote: bool
309
-
310
- Iterates over parameters:
311
- "channels_estimated", "anti_alias_filter", "sample_rate, "method", "prewhitening_type", "recoloring",
312
- "pre_fft_detrend_type", "min_num_stft_windows", "window", "harmonic_indices",
313
- Returns
314
- -------
315
-
316
- """
317
- # "channels_estimated"
318
- if remote:
319
- required_channels = decimation_level.reference_channels
346
+ def fft_frequencies(self) -> np.ndarray:
347
+ """ Returns the one-sided fft frequencies (without Nyquist)"""
348
+ return self.stft.window.fft_harmonics(self.decimation.sample_rate)
349
+
350
+
351
+ def fc_decimations_creator(
352
+ initial_sample_rate: float,
353
+ decimation_factors: Optional[list] = None,
354
+ max_levels: Optional[int] = 6,
355
+ time_period: Optional[TimePeriod] = None,
356
+ ) -> List[Decimation]:
357
+ """
358
+
359
+ Creates mt_metadata FCDecimation objects that parameterize Fourier coefficient decimation levels.
360
+
361
+ Note 1: This does not yet work through the assignment of which bands to keep. Refer to
362
+ mt_metadata.transfer_functions.processing.Processing.assign_bands() to see how this was done in the past
363
+
364
+ Parameters
365
+ ----------
366
+ initial_sample_rate: float
367
+ Sample rate of the "level0" data -- usually the sample rate during field acquisition.
368
+ decimation_factors: Optional[list]
369
+ The decimation factors that will be applied at each FC decimation level
370
+ max_levels: Optional[int]
371
+ The maximum number of decimation levels to allow
372
+ time_period: Optional[TimePeriod]
373
+ Provides the start and end times
374
+
375
+ Returns
376
+ -------
377
+ fc_decimations: list
378
+ Each element of the list is an object of type
379
+ mt_metadata.transfer_functions.processing.fourier_coefficients.Decimation,
380
+ (a.k.a. FCDecimation).
381
+
382
+ The order of the list corresponds the order of the cascading decimation
383
+ - No decimation levels are omitted.
384
+ - This could be changed in future by using a dict instead of a list,
385
+ - e.g. decimation_factors = dict(zip(np.arange(max_levels), decimation_factors))
386
+
387
+ """
388
+ if not decimation_factors:
389
+ # msg = "No decimation factors given, set default values to EMTF default values [1, 4, 4, 4, ..., 4]")
390
+ # logger.info(msg)
391
+ default_decimation_factor = 4
392
+ decimation_factors = max_levels * [default_decimation_factor]
393
+ decimation_factors[0] = 1
394
+
395
+ # See Note 1
396
+ fc_decimations = []
397
+ for i_dec_level, decimation_factor in enumerate(decimation_factors):
398
+ fc_dec = Decimation()
399
+ fc_dec.time_series_decimation.level = i_dec_level
400
+ fc_dec.id = f"{i_dec_level}"
401
+ fc_dec.time_series_decimation.factor = decimation_factor
402
+ if i_dec_level == 0:
403
+ current_sample_rate = 1.0 * initial_sample_rate
320
404
  else:
321
- required_channels = decimation_level.local_channels
322
-
323
- try:
324
- assert set(required_channels).issubset(self.channels_estimated)
325
- except AssertionError:
326
- msg = (
327
- f"required_channels for processing {required_channels} not available"
328
- f"-- fc channels estimated are {self.channels_estimated}"
329
- )
330
- self.logger.info(msg)
331
- return False
405
+ current_sample_rate /= decimation_factor
406
+ fc_dec.time_series_decimation.sample_rate = current_sample_rate
332
407
 
333
- # anti_alias_filter
334
- try:
335
- assert self.anti_alias_filter == decimation_level.anti_alias_filter
336
- except AssertionError:
337
- cond1 = decimation_level.anti_alias_filter == "default"
338
- cond2 = self.anti_alias_filter is None
339
- if cond1 & cond2:
340
- pass
408
+ if time_period:
409
+ if isinstance(time_period, TimePeriod):
410
+ fc_dec.time_period = time_period
341
411
  else:
342
412
  msg = (
343
- "Antialias Filters Not Compatible -- need to add handling for "
344
- f"{msg} FCdec {self.anti_alias_filter} and "
345
- f"{msg} processing config:{decimation_level.anti_alias_filter}"
413
+ f"Not sure how to assign time_period with type {type(time_period)}"
346
414
  )
415
+ logger.info(msg)
347
416
  raise NotImplementedError(msg)
348
417
 
349
- # sample_rate
350
- try:
351
- assert (
352
- self.sample_rate_decimation
353
- == decimation_level.decimation.sample_rate
354
- )
355
- except AssertionError:
356
- msg = (
357
- f"Sample rates do not agree: fc {self.sample_rate_decimation} differs from "
358
- f"processing config {decimation_level.decimation.sample_rate}"
359
- )
360
- self.logger.info(msg)
361
- return False
362
-
363
- # method (fft, wavelet, etc.)
364
- try:
365
- assert self.method == decimation_level.method
366
- except AssertionError:
367
- msg = (
368
- "Transform methods do not agree "
369
- f"{self.method} != {decimation_level.method}"
370
- )
371
- self.logger.info(msg)
372
- return False
418
+ fc_decimations.append(fc_dec)
373
419
 
374
- # prewhitening_type
375
- try:
376
- assert self.prewhitening_type == decimation_level.prewhitening_type
377
- except AssertionError:
378
- msg = (
379
- "prewhitening_type does not agree "
380
- f"{self.prewhitening_type} != {decimation_level.prewhitening_type}"
381
- )
382
- self.logger.info(msg)
383
- return False
420
+ return fc_decimations
384
421
 
385
- # recoloring
386
- try:
387
- assert self.recoloring == decimation_level.recoloring
388
- except AssertionError:
389
- msg = (
390
- "recoloring does not agree "
391
- f"{self.recoloring} != {decimation_level.recoloring}"
392
- )
393
- self.logger.info(msg)
394
- return False
422
+ def get_degenerate_fc_decimation(sample_rate: float) -> list:
423
+ """
424
+ WIP
395
425
 
396
- # pre_fft_detrend_type
397
- try:
398
- assert (
399
- self.pre_fft_detrend_type
400
- == decimation_level.pre_fft_detrend_type
401
- )
402
- except AssertionError:
403
- msg = (
404
- "pre_fft_detrend_type does not agree "
405
- f"{self.pre_fft_detrend_type} != {decimation_level.pre_fft_detrend_type}"
406
- )
407
- self.logger.info(msg)
408
- return False
426
+ Makes a default fc_decimation list.
427
+ This "degenerate" config will only operate on the first decimation level.
428
+ This is useful for testing. It could also be used in future on an MTH5 stored
429
+ time series in decimation levels already as separate runs.
409
430
 
410
- # min_num_stft_windows
411
- try:
412
- assert (
413
- self.min_num_stft_windows
414
- == decimation_level.min_num_stft_windows
415
- )
416
- except AssertionError:
417
- msg = (
418
- "min_num_stft_windows do not agree "
419
- f"{self.min_num_stft_windows} != {decimation_level.min_num_stft_windows}"
420
- )
421
- self.logger.info(msg)
422
- return False
431
+ Parameters
432
+ ----------
433
+ sample_rate: float
434
+ The sample rate associated with the time-series to convert to spectrogram
423
435
 
424
- # window
425
- try:
426
- assert self.window == decimation_level.window
427
- except AssertionError:
428
- msg = "window does not agree: "
429
- msg = f"{msg} FC Group: {self.window} "
430
- msg = f"{msg} Processing Config {decimation_level.window}"
431
- self.logger.info(msg)
432
- return False
436
+ Returns
437
+ -------
438
+ output: list
439
+ List has only one element which is of type FCDecimation, aka.
433
440
 
434
- if -1 in self.harmonic_indices:
435
- # if harmonic_indices is -1, it means keep all so we can skip this check.
436
- pass
437
- else:
438
- harmonic_indices_requested = decimation_level.harmonic_indices
439
- fcdec_group_set = set(self.harmonic_indices)
440
- processing_set = set(harmonic_indices_requested)
441
- if processing_set.issubset(fcdec_group_set):
442
- pass
443
- else:
444
- msg = (
445
- f"Processing FC indices {processing_set} is not contained "
446
- f"in FC indices {fcdec_group_set}"
447
- )
448
- self.logger.info(msg)
449
- return False
441
+ """
442
+ output = fc_decimations_creator(
443
+ sample_rate,
444
+ decimation_factors=[
445
+ 1,
446
+ ],
447
+ max_levels=1,
448
+ )
449
+ return output
450
450
 
451
- # No checks were failed the FCDecimation supports the processing config
452
- return True
@@ -32,6 +32,8 @@ class FC(Base):
32
32
 
33
33
  self.time_period = TimePeriod()
34
34
  self.levels = ListDict()
35
+ self._decimation_levels = []
36
+ self._channels_estimated = []
35
37
 
36
38
  super().__init__(attr_dict=attr_dict, **kwargs)
37
39
 
@@ -97,7 +99,7 @@ class FC(Base):
97
99
  """list of decimation levels"""
98
100
  dl_list = []
99
101
  for dl in self.levels:
100
- dl_list.append(dl.decimation_level)
102
+ dl_list.append(dl.decimation.level)
101
103
  dl_list = sorted(set([cc for cc in dl_list if cc is not None]))
102
104
  if self._decimation_levels == []:
103
105
  return dl_list
@@ -201,7 +203,7 @@ class FC(Base):
201
203
  if self.has_decimation_level(level):
202
204
  return self.levels[str(level)]
203
205
 
204
- def add_decimation_level(self, decimation_level_obj):
206
+ def add_decimation_level(self, fc_decimation):
205
207
  """
206
208
  Add a decimation_level to the list, check if one exists if it does overwrite it
207
209
 
@@ -209,21 +211,21 @@ class FC(Base):
209
211
  :type decimation_level_obj: :class:`mt_metadata.transfer_functions.processing.fourier_coefficients.decimation_level`
210
212
 
211
213
  """
212
- if not isinstance(decimation_level_obj, (Decimation)):
213
- msg = f"Input must be metadata.decimation_level not {type(decimation_level_obj)}"
214
+ if not isinstance(fc_decimation, (Decimation)):
215
+ msg = f"Input must be metadata.decimation_level not {type(fc_decimation)}"
214
216
  self.logger.error(msg)
215
217
  raise ValueError(msg)
216
218
 
217
- if self.has_decimation_level(decimation_level_obj.decimation_level):
218
- self.levels[decimation_level_obj.decimation_level].update(
219
- decimation_level_obj
219
+ if self.has_decimation_level(fc_decimation.decimation.level):
220
+ self.levels[fc_decimation.decimation.level].update(
221
+ fc_decimation
220
222
  )
221
223
  self.logger.debug(
222
- f"ch {decimation_level_obj.level} already exists, updating metadata"
224
+ f"ch {fc_decimation.decimation.level} already exists, updating metadata"
223
225
  )
224
226
 
225
227
  else:
226
- self.levels.append(decimation_level_obj)
228
+ self.levels.append(fc_decimation)
227
229
 
228
230
  self.update_time_period()
229
231
 
@@ -1,15 +1,4 @@
1
1
  {
2
- "decimation_level": {
3
- "type": "integer",
4
- "required": true,
5
- "style": "number",
6
- "units": null,
7
- "description": "Decimation level, must be a non-negative integer starting at 0",
8
- "options": [],
9
- "alias": [],
10
- "example": "1",
11
- "default": null
12
- },
13
2
  "id": {
14
3
  "type": "string",
15
4
  "required": true,
@@ -21,7 +10,7 @@
21
10
  "example": "1",
22
11
  "default": null
23
12
  },
24
- "channels_estimated": {
13
+ "channels_estimated": {
25
14
  "type": "string",
26
15
  "required": true,
27
16
  "style": "name list",
@@ -31,104 +20,5 @@
31
20
  "alias": [],
32
21
  "example": "[ex, hy]",
33
22
  "default": null
34
- },
35
- "sample_rate_decimation": {
36
- "type": "float",
37
- "required": true,
38
- "style": "number",
39
- "units": "samples per second",
40
- "description": "Sample rate of the decimation level.",
41
- "options": [],
42
- "alias": [],
43
- "example": 60,
44
- "default": 1
45
- },
46
- "decimation_factor": {
47
- "type": "integer",
48
- "required": true,
49
- "style": "number",
50
- "units": null,
51
- "description": "Decimation factor between initial sample rate and decimation sample rate.",
52
- "options": [],
53
- "alias": [],
54
- "example": 4,
55
- "default": 1
56
- },
57
- "min_num_stft_windows": {
58
- "type": "integer",
59
- "required": true,
60
- "style": "number",
61
- "units": null,
62
- "description": "How many FFT windows must be available for the time series to valid for STFT.",
63
- "options": [],
64
- "alias": [],
65
- "example": 4,
66
- "default": 2
67
- },
68
- "method": {
69
- "type": "string",
70
- "required": true,
71
- "style": "controlled vocabulary",
72
- "units": null,
73
- "description": "Fourier transform method",
74
- "options": ["fft", "wavelet", "other"],
75
- "alias": [],
76
- "example": "fft",
77
- "default": "fft"
78
- },
79
- "anti_alias_filter": {
80
- "type": "string",
81
- "required": true,
82
- "style": "free form",
83
- "units": null,
84
- "description": "Type of anti alias filter for decimation.",
85
- "options": [],
86
- "alias": [],
87
- "example": "default",
88
- "default": "default"
89
- },
90
- "pre_fft_detrend_type": {
91
- "type": "string",
92
- "required": true,
93
- "style": "free form",
94
- "units": null,
95
- "description": "Type of detrend method before FFT.",
96
- "options": [],
97
- "alias": [],
98
- "example": "linear",
99
- "default": "linear"
100
- },
101
- "prewhitening_type": {
102
- "type": "string",
103
- "required": true,
104
- "style": "controlled vocabulary",
105
- "units": null,
106
- "description": "Prewhitening method to be applied",
107
- "options": ["first difference", "other"],
108
- "alias": [],
109
- "example": "first difference",
110
- "default": "first difference"
111
- },
112
- "recoloring": {
113
- "type": "bool",
114
- "required": true,
115
- "style": "free form",
116
- "units": null,
117
- "description": "Whether the data are recolored [True] or not [False].",
118
- "options": [],
119
- "alias": [],
120
- "example": true,
121
- "default": true
122
- },
123
- "harmonic_indices": {
124
- "type": "integer",
125
- "required": true,
126
- "style": "number list",
127
- "units": null,
128
- "description": "List of harmonics indices kept, if all use -1",
129
- "options": [],
130
- "alias": [],
131
- "example": [0, 4, 8],
132
- "default": [-1]
133
23
  }
134
24
  }