mt-metadata 0.3.9__py2.py3-none-any.whl → 0.4.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mt-metadata might be problematic. Click here for more details.

Files changed (95) hide show
  1. mt_metadata/__init__.py +1 -1
  2. mt_metadata/base/helpers.py +84 -9
  3. mt_metadata/base/metadata.py +137 -65
  4. mt_metadata/features/__init__.py +14 -0
  5. mt_metadata/features/coherence.py +303 -0
  6. mt_metadata/features/cross_powers.py +29 -0
  7. mt_metadata/features/fc_coherence.py +81 -0
  8. mt_metadata/features/feature.py +72 -0
  9. mt_metadata/features/feature_decimation_channel.py +26 -0
  10. mt_metadata/features/feature_fc.py +24 -0
  11. mt_metadata/{transfer_functions/processing/aurora/decimation.py → features/feature_fc_run.py} +9 -4
  12. mt_metadata/features/feature_ts.py +24 -0
  13. mt_metadata/{transfer_functions/processing/aurora/window.py → features/feature_ts_run.py} +11 -18
  14. mt_metadata/features/standards/__init__.py +6 -0
  15. mt_metadata/features/standards/base_feature.json +46 -0
  16. mt_metadata/features/standards/coherence.json +57 -0
  17. mt_metadata/features/standards/fc_coherence.json +57 -0
  18. mt_metadata/features/standards/feature_decimation_channel.json +68 -0
  19. mt_metadata/features/standards/feature_fc_run.json +35 -0
  20. mt_metadata/features/standards/feature_ts_run.json +35 -0
  21. mt_metadata/features/standards/feature_weighting_window.json +46 -0
  22. mt_metadata/features/standards/weight_kernel.json +46 -0
  23. mt_metadata/features/standards/weights.json +101 -0
  24. mt_metadata/features/test_helpers/channel_weight_specs_example.json +156 -0
  25. mt_metadata/features/weights/__init__.py +0 -0
  26. mt_metadata/features/weights/base.py +44 -0
  27. mt_metadata/features/weights/channel_weight_spec.py +209 -0
  28. mt_metadata/features/weights/feature_weight_spec.py +194 -0
  29. mt_metadata/features/weights/monotonic_weight_kernel.py +275 -0
  30. mt_metadata/features/weights/standards/__init__.py +6 -0
  31. mt_metadata/features/weights/standards/activation_monotonic_weight_kernel.json +38 -0
  32. mt_metadata/features/weights/standards/base.json +36 -0
  33. mt_metadata/features/weights/standards/channel_weight_spec.json +35 -0
  34. mt_metadata/features/weights/standards/composite.json +36 -0
  35. mt_metadata/features/weights/standards/feature_weight_spec.json +13 -0
  36. mt_metadata/features/weights/standards/monotonic_weight_kernel.json +49 -0
  37. mt_metadata/features/weights/standards/taper_monotonic_weight_kernel.json +16 -0
  38. mt_metadata/features/weights/taper_weight_kernel.py +60 -0
  39. mt_metadata/helper_functions.py +69 -0
  40. mt_metadata/timeseries/filters/channel_response.py +77 -37
  41. mt_metadata/timeseries/filters/coefficient_filter.py +6 -5
  42. mt_metadata/timeseries/filters/filter_base.py +11 -15
  43. mt_metadata/timeseries/filters/fir_filter.py +8 -1
  44. mt_metadata/timeseries/filters/frequency_response_table_filter.py +26 -11
  45. mt_metadata/timeseries/filters/helper_functions.py +0 -2
  46. mt_metadata/timeseries/filters/obspy_stages.py +4 -1
  47. mt_metadata/timeseries/filters/pole_zero_filter.py +9 -5
  48. mt_metadata/timeseries/filters/time_delay_filter.py +8 -1
  49. mt_metadata/timeseries/location.py +20 -5
  50. mt_metadata/timeseries/person.py +14 -7
  51. mt_metadata/timeseries/standards/person.json +1 -1
  52. mt_metadata/timeseries/standards/run.json +2 -2
  53. mt_metadata/timeseries/station.py +4 -2
  54. mt_metadata/timeseries/stationxml/__init__.py +5 -0
  55. mt_metadata/timeseries/stationxml/xml_channel_mt_channel.py +25 -27
  56. mt_metadata/timeseries/stationxml/xml_inventory_mt_experiment.py +16 -47
  57. mt_metadata/timeseries/stationxml/xml_station_mt_station.py +25 -24
  58. mt_metadata/transfer_functions/__init__.py +3 -0
  59. mt_metadata/transfer_functions/core.py +8 -11
  60. mt_metadata/transfer_functions/io/emtfxml/metadata/location.py +5 -0
  61. mt_metadata/transfer_functions/io/emtfxml/metadata/provenance.py +14 -3
  62. mt_metadata/transfer_functions/io/tools.py +2 -0
  63. mt_metadata/transfer_functions/io/zonge/metadata/header.py +1 -1
  64. mt_metadata/transfer_functions/io/zonge/metadata/standards/header.json +1 -1
  65. mt_metadata/transfer_functions/io/zonge/metadata/standards/job.json +2 -2
  66. mt_metadata/transfer_functions/io/zonge/zonge.py +19 -23
  67. mt_metadata/transfer_functions/processing/__init__.py +2 -1
  68. mt_metadata/transfer_functions/processing/aurora/__init__.py +2 -4
  69. mt_metadata/transfer_functions/processing/aurora/band.py +46 -125
  70. mt_metadata/transfer_functions/processing/aurora/channel_nomenclature.py +27 -20
  71. mt_metadata/transfer_functions/processing/aurora/decimation_level.py +324 -152
  72. mt_metadata/transfer_functions/processing/aurora/frequency_bands.py +230 -0
  73. mt_metadata/transfer_functions/processing/aurora/processing.py +3 -3
  74. mt_metadata/transfer_functions/processing/aurora/run.py +32 -7
  75. mt_metadata/transfer_functions/processing/aurora/standards/decimation_level.json +7 -73
  76. mt_metadata/transfer_functions/processing/aurora/stations.py +33 -4
  77. mt_metadata/transfer_functions/processing/fourier_coefficients/decimation.py +176 -178
  78. mt_metadata/transfer_functions/processing/fourier_coefficients/fc.py +11 -9
  79. mt_metadata/transfer_functions/processing/fourier_coefficients/standards/decimation.json +1 -111
  80. mt_metadata/transfer_functions/processing/short_time_fourier_transform.py +64 -0
  81. mt_metadata/transfer_functions/processing/standards/__init__.py +6 -0
  82. mt_metadata/transfer_functions/processing/standards/short_time_fourier_transform.json +94 -0
  83. mt_metadata/transfer_functions/processing/{aurora/standards/decimation.json → standards/time_series_decimation.json} +17 -6
  84. mt_metadata/transfer_functions/processing/{aurora/standards → standards}/window.json +13 -2
  85. mt_metadata/transfer_functions/processing/time_series_decimation.py +50 -0
  86. mt_metadata/transfer_functions/processing/window.py +118 -0
  87. mt_metadata/transfer_functions/tf/station.py +17 -1
  88. mt_metadata/utils/mttime.py +22 -3
  89. mt_metadata/utils/validators.py +4 -2
  90. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/METADATA +39 -15
  91. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/RECORD +95 -55
  92. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/WHEEL +1 -1
  93. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/AUTHORS.rst +0 -0
  94. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/LICENSE +0 -0
  95. {mt_metadata-0.3.9.dist-info → mt_metadata-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,64 @@
1
+ """
2
+ This module contains the metadata ShortTimeFourierTransform (STFT) metadata class.
3
+
4
+ Development Notes:
5
+ This is part of a refactoring of the FCDecimation and aurora DecimationLevel
6
+
7
+ Both of those classes are essentially used to represent Spectrograms,
8
+ and in the Aurora DecimationLevel case, there are also information about processing included.
9
+
10
+ This class pulls out the metadata that are associated with the application of the STFT.
11
+
12
+ "harmonic_indices"
13
+ "method"
14
+ "min_num_stft_windows"
15
+ "per_window_detrend_type"
16
+ "pre_fft_detrend_type"
17
+ "prewhitening_type"
18
+ "recoloring"
19
+
20
+
21
+ Created on Sat Dec 28 18:39:00 2024
22
+
23
+ @author: kkappler
24
+
25
+ """
26
+
27
+ # =============================================================================
28
+ # Imports
29
+ # =============================================================================
30
+ from mt_metadata.base.helpers import write_lines
31
+ from mt_metadata.base import get_schema, Base
32
+ from mt_metadata.transfer_functions.processing.window import Window
33
+ from mt_metadata.transfer_functions.processing.standards import SCHEMA_FN_PATHS
34
+
35
+ # =============================================================================
36
+ attr_dict = get_schema("short_time_fourier_transform", SCHEMA_FN_PATHS)
37
+ attr_dict.add_dict(Window()._attr_dict, "window")
38
+
39
+ # =============================================================================
40
+
41
+
42
+ class ShortTimeFourierTransform(Base):
43
+ """
44
+ The ShortTimeFourierTransform (STFT) class contains information about how to apply the STFT
45
+ to the time series.
46
+
47
+ """
48
+ __doc__ = write_lines(attr_dict)
49
+
50
+ def __init__(self, **kwargs):
51
+ """
52
+ Constructor.
53
+ :param kwargs: TODO: add description
54
+ """
55
+ self.window = Window()
56
+ super().__init__(attr_dict=attr_dict, **kwargs)
57
+
58
+
59
+ def main():
60
+ stft = ShortTimeFourierTransform()
61
+
62
+
63
+ if __name__ == "__main__":
64
+ main()
@@ -0,0 +1,6 @@
1
+ # package file
2
+ from pathlib import Path
3
+
4
+ SCHEMA_PATH = Path(__file__).parent
5
+
6
+ SCHEMA_FN_PATHS = list(SCHEMA_PATH.glob("*.json"))
@@ -0,0 +1,94 @@
1
+ {
2
+ "harmonic_indices": {
3
+ "type": "integer",
4
+ "required": true,
5
+ "style": "number list",
6
+ "units": null,
7
+ "description": "List of harmonics indices kept, if all use -1",
8
+ "options": [],
9
+ "alias": [],
10
+ "example": [0, 4, 8],
11
+ "default": [-1]
12
+ },
13
+ "method": {
14
+ "type": "string",
15
+ "required": true,
16
+ "style": "controlled vocabulary",
17
+ "units": null,
18
+ "description": "Fourier transform method",
19
+ "options": [
20
+ "fft",
21
+ "wavelet",
22
+ "other"
23
+ ],
24
+ "alias": [],
25
+ "example": "fft",
26
+ "default": "fft"
27
+ },
28
+ "min_num_stft_windows": {
29
+ "type": "integer",
30
+ "required": true,
31
+ "style": "number",
32
+ "units": null,
33
+ "description": "How many FFT windows must be available for the time series to valid for STFT.",
34
+ "options": [],
35
+ "alias": [],
36
+ "example": 4,
37
+ "default": 2
38
+ },
39
+ "per_window_detrend_type": {
40
+ "type": "string",
41
+ "required": true,
42
+ "style": "controlled vocabulary",
43
+ "units": null,
44
+ "description": "Additional detrending applied per window. Not available for standard scipy spectrogram -- placholder for ARMA prewhitening.",
45
+ "options": [
46
+ "linear",
47
+ "constant",
48
+ ""
49
+ ],
50
+ "alias": [],
51
+ "example": "linear",
52
+ "default": ""
53
+ },
54
+ "pre_fft_detrend_type": {
55
+ "type": "string",
56
+ "required": true,
57
+ "style": "controlled vocabulary",
58
+ "units": null,
59
+ "description": "Pre FFT detrend method to be applied",
60
+ "options": [
61
+ "linear",
62
+ "other",
63
+ ""
64
+ ],
65
+ "alias": [],
66
+ "example": "linear",
67
+ "default": "linear"
68
+ },
69
+ "prewhitening_type": {
70
+ "type": "string",
71
+ "required": true,
72
+ "style": "controlled vocabulary",
73
+ "units": null,
74
+ "description": "Prewhitening method to be applied",
75
+ "options": [
76
+ "first difference",
77
+ "other"
78
+ ],
79
+ "alias": [],
80
+ "example": "first difference",
81
+ "default": "first difference"
82
+ },
83
+ "recoloring": {
84
+ "type": "bool",
85
+ "required": true,
86
+ "style": "free form",
87
+ "units": null,
88
+ "description": "Whether the data are recolored [True] or not [False].",
89
+ "options": [],
90
+ "alias": [],
91
+ "example": true,
92
+ "default": true
93
+ }
94
+ }
@@ -4,7 +4,7 @@
4
4
  "required": true,
5
5
  "style": "number",
6
6
  "units": null,
7
- "description": "Decimation level in sequential order",
7
+ "description": "Decimation level, must be a non-negative integer starting at 0",
8
8
  "options": [],
9
9
  "alias": [],
10
10
  "example": "0",
@@ -15,10 +15,10 @@
15
15
  "required": true,
16
16
  "style": "number",
17
17
  "units": null,
18
- "description": "Decimation factor",
18
+ "description": "Decimation factor between parent sample rate and decimated time series sample rate.",
19
19
  "options": [],
20
20
  "alias": [],
21
- "example": "1",
21
+ "example": "4.0",
22
22
  "default": 1.0
23
23
  },
24
24
  "method": {
@@ -31,16 +31,27 @@
31
31
  "alias": [],
32
32
  "example": "default",
33
33
  "default": "default"
34
- },
34
+ },
35
35
  "sample_rate": {
36
36
  "type": "float",
37
37
  "required": true,
38
38
  "style": "number",
39
39
  "units": "samples per second",
40
- "description": "Sample rate of the data after decimation.",
40
+ "description": "Sample rate of the decimation level data (after decimation).",
41
41
  "options": [],
42
42
  "alias": [],
43
43
  "example": "256",
44
44
  "default": 1
45
+ },
46
+ "anti_alias_filter": {
47
+ "type": "string",
48
+ "required": true,
49
+ "style": "free form",
50
+ "units": null,
51
+ "description": "Type of anti alias filter for decimation.",
52
+ "options": [],
53
+ "alias": [],
54
+ "example": "default",
55
+ "default": "default"
45
56
  }
46
- }
57
+ }
@@ -33,7 +33,7 @@
33
33
  "blackman",
34
34
  "hamming",
35
35
  "hann",
36
- "bartlett",
36
+ "bartlett",
37
37
  "flattop",
38
38
  "parzen",
39
39
  "bohman",
@@ -74,5 +74,16 @@
74
74
  "alias": [],
75
75
  "example": "2020-02-01T09:23:45.453670+00:00",
76
76
  "default": "1980-01-01T00:00:00+00:00"
77
+ },
78
+ "normalized": {
79
+ "type": "bool",
80
+ "required": true,
81
+ "units": null,
82
+ "style": "free form",
83
+ "description": "True if the window shall be normalized so the sum of the coefficients is 1",
84
+ "options": [],
85
+ "alias": ["normalised"],
86
+ "example": false,
87
+ "default": true
77
88
  }
78
- }
89
+ }
@@ -0,0 +1,50 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ This module contains the metadata TimeSeriesDecimation class.
4
+
5
+ Development Notes:
6
+ This is part of a refactoring that seeks to separate the FCDecimation and aurora DecimationLevel
7
+ from the time series decimation.
8
+
9
+ The previous version of this class was in processing/aurora/decimation.py and had attrs
10
+ ["level", "factor", "method", "sample_rate", "anti_alias_filter"],
11
+
12
+ TODO: Consider adding a parent_sample_rate attribute to this class
13
+
14
+ Created on Thu Dec 26 12:00:00 2024
15
+
16
+ @author: kkappler
17
+
18
+ """
19
+ # =============================================================================
20
+ # Imports
21
+ # =============================================================================
22
+ from mt_metadata.base.helpers import write_lines
23
+ from mt_metadata.base import get_schema, Base
24
+ from mt_metadata.transfer_functions.processing.standards import SCHEMA_FN_PATHS
25
+
26
+ # =============================================================================
27
+ attr_dict = get_schema("time_series_decimation", SCHEMA_FN_PATHS)
28
+ # =============================================================================
29
+
30
+
31
+ class TimeSeriesDecimation(Base):
32
+ """
33
+ The decimation class contains information about how to decimaate a time series as well
34
+ as attributes to describe it's place in the mth5 hierarchy.
35
+ Key pieces of information:
36
+ 1. The decimation level, an integer that tells the sequential order in a decimation scheme.
37
+ 2. The decimation factor. This is normally an integer, but the decimation.json does allow for floating point values.
38
+
39
+ Development Notes:
40
+ -
41
+ """
42
+ __doc__ = write_lines(attr_dict)
43
+
44
+ def __init__(self, **kwargs):
45
+
46
+ super().__init__(attr_dict=attr_dict, **kwargs)
47
+
48
+ # TODO: add this logic to __init__ and a test
49
+ # if self.level == 0:
50
+ # self.anti_alias_filter = None
@@ -0,0 +1,118 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Thu Feb 17 14:15:20 2022
4
+
5
+ Updated 2025-01-02: kkappler, adding methods to generate taper values. In future this class
6
+ can replace ApodizationWindow in aurora.
7
+
8
+ @author: jpeacock
9
+ """
10
+ # =============================================================================
11
+ # Imports
12
+ # =============================================================================
13
+ from mt_metadata.base.helpers import write_lines
14
+ from mt_metadata.base import get_schema, Base
15
+ from .standards import SCHEMA_FN_PATHS
16
+
17
+ import numpy as np
18
+ import scipy.signal as ssig
19
+
20
+ # =============================================================================
21
+ attr_dict = get_schema("window", SCHEMA_FN_PATHS)
22
+ # =============================================================================
23
+
24
+
25
+ class Window(Base):
26
+ __doc__ = write_lines(attr_dict)
27
+
28
+ def __init__(self, **kwargs):
29
+ super().__init__(attr_dict=attr_dict, **kwargs)
30
+ self.additional_args = kwargs.get("additional_args", {})
31
+ self._taper = None
32
+
33
+ @property
34
+ def additional_args(self) -> dict:
35
+ return self._additional_args
36
+
37
+ @additional_args.setter
38
+ def additional_args(self, args):
39
+ if not isinstance(args, dict):
40
+ raise TypeError("additional_args must be a dictionary")
41
+ self._additional_args = args
42
+
43
+ @property
44
+ def num_samples_advance(self):
45
+ return self.num_samples - self.overlap
46
+
47
+ def fft_harmonics(self, sample_rate: float) -> np.ndarray:
48
+ """
49
+ Returns the frequencies for an fft..
50
+ :param sample_rate:
51
+ :return:
52
+ """
53
+ return get_fft_harmonics(
54
+ samples_per_window=self.num_samples,
55
+ sample_rate=sample_rate
56
+ )
57
+
58
+ def taper(self) -> np.ndarray:
59
+ """
60
+ Get's the window coeffcients. via wrapper call to scipy.signal
61
+
62
+ Note: see scipy.signal.get_window for a description of what is expected in args[1:]. http://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.get_window.html
63
+
64
+ Returns
65
+ -------
66
+
67
+ """
68
+ if self._taper is None:
69
+ # Repackaging the args so that scipy.signal.get_window() accepts all cases
70
+ window_args = [v for k, v in self.additional_args.items()]
71
+ window_args.insert(0, self.type)
72
+ window_args = tuple(window_args)
73
+
74
+ taper = ssig.get_window(window_args, self.num_samples)
75
+
76
+ if self.normalized:
77
+ taper /= np.sum(taper)
78
+
79
+ self._taper = taper
80
+
81
+ return self._taper
82
+
83
+
84
+
85
+ def get_fft_harmonics(
86
+ samples_per_window: int,
87
+ sample_rate: float
88
+ ) -> np.ndarray:
89
+ """
90
+ Works for odd and even number of points.
91
+
92
+ Development notes:
93
+ - Could be modified with arguments to support one_sided, two_sided, ignore_dc
94
+ ignore_nyquist, and etc. Consider taking FrequencyBands as an argument.
95
+ - This function was in decimation_level, but there were circular import issues.
96
+ The function needs only a window length and sample rate, so putting it here for now.
97
+ - TODO: switch to using np.fft.rfftfreq
98
+
99
+ Parameters
100
+ ----------
101
+ samples_per_window: int
102
+ Number of samples in a window that will be Fourier transformed.
103
+ sample_rate: float
104
+ Inverse of time step between samples; Samples per second in Hz.
105
+
106
+ Returns
107
+ -------
108
+ harmonic_frequencies: numpy array
109
+ The frequencies that the fft will be computed.
110
+ These are one-sided (positive frequencies only)
111
+ Does _not_ return Nyquist
112
+ Does return DC component
113
+ """
114
+ delta_t = 1.0 / sample_rate
115
+ harmonic_frequencies = np.fft.fftfreq(samples_per_window, d=delta_t)
116
+ n_fft_harmonics = int(samples_per_window / 2) # no bin at Nyquist,
117
+ harmonic_frequencies = harmonic_frequencies[0:n_fft_harmonics]
118
+ return harmonic_frequencies
@@ -12,6 +12,7 @@ Created on Wed Dec 23 21:30:36 2020
12
12
  # Imports
13
13
  # =============================================================================
14
14
  import numpy as np
15
+ import copy
15
16
  from collections import OrderedDict
16
17
  from mt_metadata.base.helpers import write_lines
17
18
  from mt_metadata.base import get_schema, Base
@@ -30,6 +31,7 @@ from . import (
30
31
  Run,
31
32
  TransferFunction,
32
33
  )
34
+
33
35
  from mt_metadata.utils.list_dict import ListDict
34
36
 
35
37
  # =============================================================================
@@ -56,13 +58,17 @@ attr_dict.add_dict(
56
58
  )
57
59
 
58
60
  attr_dict.add_dict(get_schema("time_period", TS_SCHEMA_FN_PATHS), "time_period")
59
- attr_dict.add_dict(TransferFunction()._attr_dict, "transfer_function")
61
+ attr_dict.add_dict(
62
+ copy.deepcopy(TransferFunction()._attr_dict), "transfer_function"
63
+ )
60
64
  attr_dict.add_dict(get_schema("copyright", TS_SCHEMA_FN_PATHS), None)
61
65
  attr_dict["release_license"]["required"] = False
62
66
  attr_dict.add_dict(
63
67
  get_schema("citation", TS_SCHEMA_FN_PATHS), None, keys=["doi"]
64
68
  )
65
69
  attr_dict["doi"]["required"] = False
70
+
71
+
66
72
  # =============================================================================
67
73
  class Station(Base):
68
74
  __doc__ = write_lines(attr_dict)
@@ -78,7 +84,17 @@ class Station(Base):
78
84
  self.time_period = TimePeriod()
79
85
  self.transfer_function = TransferFunction()
80
86
  self.runs = ListDict()
87
+
81
88
  super().__init__(attr_dict=attr_dict, **kwargs)
89
+ # for now this is a hack. Somewhere processing paramters is being
90
+ # set globally. This will reset to default of empty list
91
+ self.transfer_function.processing_parameters = []
92
+ try:
93
+ self.transfer_function.processing_parameters = kwargs[
94
+ "transfer_function.processing_parameters"
95
+ ]
96
+ except KeyError:
97
+ pass
82
98
 
83
99
  def __add__(self, other):
84
100
  if isinstance(other, Station):
@@ -7,16 +7,32 @@ Created on Wed May 13 19:10:46 2020
7
7
  # =============================================================================
8
8
  # IMPORTS
9
9
  # =============================================================================
10
+ from copy import deepcopy
10
11
  import datetime
11
12
  from dateutil.parser import parse as dtparser
12
- from copy import deepcopy
13
13
  import numpy as np
14
14
  import pandas as pd
15
15
  from pandas._libs.tslibs import OutOfBoundsDatetime
16
16
 
17
+ from typing import Optional, Union # Self is importable in python 3.11+
17
18
 
18
19
  from loguru import logger
19
20
 
21
+ DATETIME_HINT = Union[
22
+ float,
23
+ int,
24
+ np.datetime64,
25
+ pd.Timestamp,
26
+ str,
27
+ datetime.datetime,
28
+ ]
29
+
30
+ try:
31
+ from obspy.core.utcdatetime import UTCDateTime # for type hinting
32
+ DATETIME_HINT = Union[DATETIME_HINT, UTCDateTime]
33
+ except ImportError:
34
+ pass
35
+
20
36
  # =============================================================================
21
37
  # Get leap seconds
22
38
  # =============================================================================
@@ -387,7 +403,10 @@ class MTime:
387
403
 
388
404
  return t_min_max, pd_timestamp
389
405
 
390
- def parse(self, dt_str):
406
+ def parse(
407
+ self,
408
+ dt_str: Optional[DATETIME_HINT] = None
409
+ ) -> None: # TODO: add Self as s typehint 3.11
391
410
  """
392
411
  Parse a date-time string using dateutil.parser
393
412
 
@@ -441,7 +460,7 @@ class MTime:
441
460
 
442
461
  else:
443
462
  try:
444
- stamp = t_min_max, stamp = self._check_timestamp(
463
+ t_min_max, stamp = self._check_timestamp(
445
464
  pd.Timestamp(dt_str)
446
465
  )
447
466
  except (ValueError, TypeError, OutOfBoundsDatetime, OverflowError):
@@ -64,9 +64,11 @@ def validate_header(header, attribute=False):
64
64
  else:
65
65
  required_keys = [key for key in REQUIRED_KEYS if key != "attribute"]
66
66
  if sorted(header) != sorted(required_keys):
67
+ missing_keys = [x for x in required_keys if x not in header]
67
68
  msg = (
68
- f"Keys is not correct, must include {required_keys}"
69
- + f". Currently has {header}"
69
+ f"Keys is not correct, must include {required_keys}\n"
70
+ + f". Currently has {header}\n"
71
+ + f"Need to add keys: {missing_keys}"
70
72
  )
71
73
  raise MTValidatorError(msg)
72
74
  return header
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
2
- Name: mt-metadata
3
- Version: 0.3.9
1
+ Metadata-Version: 2.2
2
+ Name: mt_metadata
3
+ Version: 0.4.0
4
4
  Summary: Metadata for magnetotelluric data
5
5
  Home-page: https://github.com/kujaku11/mt_metadata
6
6
  Author: Jared Peacock
@@ -22,13 +22,29 @@ Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
23
  License-File: AUTHORS.rst
24
24
  Requires-Dist: numpy
25
+ Requires-Dist: scipy
25
26
  Requires-Dist: pandas
26
- Requires-Dist: obspy
27
27
  Requires-Dist: matplotlib
28
28
  Requires-Dist: xarray
29
29
  Requires-Dist: loguru
30
-
31
- # mt_metadata version 0.3.9
30
+ Provides-Extra: obspy
31
+ Requires-Dist: obspy; extra == "obspy"
32
+ Provides-Extra: test
33
+ Requires-Dist: pytest>=3; extra == "test"
34
+ Dynamic: author
35
+ Dynamic: author-email
36
+ Dynamic: classifier
37
+ Dynamic: description
38
+ Dynamic: description-content-type
39
+ Dynamic: home-page
40
+ Dynamic: keywords
41
+ Dynamic: license
42
+ Dynamic: provides-extra
43
+ Dynamic: requires-dist
44
+ Dynamic: requires-python
45
+ Dynamic: summary
46
+
47
+ # mt_metadata version 0.4.0
32
48
  Standard MT metadata
33
49
 
34
50
  [![PyPi version](https://img.shields.io/pypi/v/mt_metadata.svg)](https://pypi.python.org/pypi/mt-metadata)
@@ -43,11 +59,11 @@ Requires-Dist: loguru
43
59
 
44
60
  MT Metadata is a project led by [IRIS-PASSCAL MT Software working group](https://www.iris.edu/hq/about_iris/governance/mt_soft>) and USGS to develop tools that standardize magnetotelluric metadata, well, at least create tools for standards that are generally accepted. This include the two main types of magnetotelluric data
45
61
 
46
- - **Time Series**
62
+ - **Time Series**
47
63
  - Structured as:
48
64
  - Experiment -> Survey -> Station -> Run -> Channel
49
65
  - Supports translation to/from **StationXML**
50
-
66
+
51
67
  - **Transfer Functions**
52
68
  - Supports (will support) to/from:
53
69
  - **EDI** (most common format)
@@ -58,11 +74,12 @@ MT Metadata is a project led by [IRIS-PASSCAL MT Software working group](https:/
58
74
 
59
75
  Most people will be using the transfer functions, but a lot of that metadata comes from the time series metadata. This module supports both and has tried to make them more or less seamless to reduce complication.
60
76
 
61
- * **Version**: 0.3.9
77
+ * **Version**: 0.4.0
62
78
  * **Free software**: MIT license
63
79
  * **Documentation**: https://mt-metadata.readthedocs.io.
64
80
  * **Examples**: Click the `Binder` badge above and Jupyter Notebook examples are in **mt_metadata/examples/notebooks** and **docs/source/notebooks**
65
81
  * **Suggested Citation**: Peacock, J. R., Kappler, K., Ronan, T., Heagy, L., Kelbert, A., Frassetto, A. (2022) MTH5: An archive and exchangeable data format for magnetotelluric time series data, *Computers & Geoscience*, **162**, doi:10.1016/j.cageo.2022.105102
82
+ * **IPDS**: IP-138156
66
83
 
67
84
 
68
85
  # Installation
@@ -71,20 +88,27 @@ Most people will be using the transfer functions, but a lot of that metadata com
71
88
 
72
89
  `git clone https://github.com/kujaku11/mt_metadata.git`
73
90
 
74
- `python setup.py install`
91
+ `pip install .`
75
92
 
76
- You can add the flag `-e` if you want to change the code.
93
+ You can add the flag `-e` if you want to install the source repository in an editable state.
77
94
 
78
95
  ## PIP
79
96
  `pip install mt_metadata`
80
97
 
98
+ > You can install with optional packages by appending `[option_name]` to the package name during the
99
+ > `pip` install command. E.g:
100
+ >
101
+ > `pip install mt_metadata[obspy]`
102
+ >
103
+ > or `pip install .[obspy]` if building from source.
104
+
81
105
  ## Conda
82
106
 
83
107
  `conda install mt_metadata`
84
108
 
85
109
  # Standards
86
110
 
87
- Each metadata keyword has an associated standard that goes with it. These are stored internally in JSON file. The JSON files are read in when the package is loaded to initialize the standards. Each keyword is described by:
111
+ Each metadata keyword has an associated standard that goes with it. These are stored internally in JSON file. The JSON files are read in when the package is loaded to initialize the standards. Each keyword is described by:
88
112
 
89
113
  - **type** - How the value should be represented based on very basic types
90
114
 
@@ -100,7 +124,7 @@ Each metadata keyword has an associated standard that goes with it. These are s
100
124
  - *Controlled Vocabulary* only certain values are allowed according to **options**
101
125
  - *Date* a date and/or time string in ISO format
102
126
  - *Number* a float or integer
103
- - *Boolean* the value can only be True or False
127
+ - *Boolean* the value can only be True or False
104
128
 
105
129
  - **units** - Units of the value
106
130
  - **description** - Full description of what the metadata key is meant to convey.
@@ -117,7 +141,7 @@ Each metadata object is based on a Base class that has methods:
117
141
  - to_from_dict
118
142
  - attribute_information
119
143
 
120
- And each object has a doc string that describes the standard:
144
+ And each object has a doc string that describes the standard:
121
145
 
122
146
 
123
147
  | **Metadata Key** | **Description** | **Example** |
@@ -230,7 +254,7 @@ print(x.to_xml(string=True))
230
254
  Credits
231
255
  -------
232
256
 
233
- This project is in cooperation with the Incorporated Research Institutes of Seismology, the U.S. Geological Survey, and other collaborators. Facilities of the IRIS Consortium are supported by the National Science Foundation’s Seismological Facilities for the Advancement of Geoscience (SAGE) Award under Cooperative Support Agreement EAR-1851048. USGS is partially funded through the Community for Data Integration and IMAGe through the Minerals Resources Program.
257
+ This project is in cooperation with the Incorporated Research Institutes of Seismology, the U.S. Geological Survey, and other collaborators. Facilities of the IRIS Consortium are supported by the National Science Foundation’s Seismological Facilities for the Advancement of Geoscience (SAGE) Award under Cooperative Support Agreement EAR-1851048. USGS is partially funded through the Community for Data Integration and IMAGe through the Minerals Resources Program.
234
258
 
235
259
 
236
260
  History