mt-metadata 0.3.5__py2.py3-none-any.whl → 0.3.7__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mt-metadata might be problematic. Click here for more details.

Files changed (34) hide show
  1. mt_metadata/__init__.py +1 -1
  2. mt_metadata/base/helpers.py +9 -2
  3. mt_metadata/timeseries/filters/filtered.py +133 -75
  4. mt_metadata/timeseries/filters/frequency_response_table_filter.py +10 -7
  5. mt_metadata/timeseries/station.py +31 -0
  6. mt_metadata/timeseries/stationxml/xml_channel_mt_channel.py +53 -1
  7. mt_metadata/timeseries/stationxml/xml_inventory_mt_experiment.py +1 -0
  8. mt_metadata/transfer_functions/__init__.py +38 -0
  9. mt_metadata/transfer_functions/core.py +96 -71
  10. mt_metadata/transfer_functions/io/edi/edi.py +29 -19
  11. mt_metadata/transfer_functions/io/edi/metadata/define_measurement.py +1 -0
  12. mt_metadata/transfer_functions/io/edi/metadata/emeasurement.py +4 -2
  13. mt_metadata/transfer_functions/io/edi/metadata/header.py +3 -1
  14. mt_metadata/transfer_functions/io/edi/metadata/information.py +13 -6
  15. mt_metadata/transfer_functions/io/emtfxml/emtfxml.py +12 -6
  16. mt_metadata/transfer_functions/io/emtfxml/metadata/data.py +1 -1
  17. mt_metadata/transfer_functions/io/emtfxml/metadata/estimate.py +1 -1
  18. mt_metadata/transfer_functions/io/emtfxml/metadata/period_range.py +6 -1
  19. mt_metadata/transfer_functions/io/emtfxml/metadata/provenance.py +6 -2
  20. mt_metadata/transfer_functions/io/emtfxml/metadata/standards/copyright.json +2 -1
  21. mt_metadata/transfer_functions/processing/aurora/__init__.py +0 -1
  22. mt_metadata/transfer_functions/processing/aurora/band.py +7 -11
  23. mt_metadata/transfer_functions/processing/aurora/channel_nomenclature.py +6 -44
  24. mt_metadata/transfer_functions/processing/aurora/standards/regression.json +46 -1
  25. mt_metadata/transfer_functions/processing/aurora/station.py +17 -11
  26. mt_metadata/transfer_functions/processing/aurora/stations.py +4 -4
  27. mt_metadata/utils/mttime.py +1 -1
  28. mt_metadata/utils/validators.py +11 -2
  29. {mt_metadata-0.3.5.dist-info → mt_metadata-0.3.7.dist-info}/METADATA +52 -3
  30. {mt_metadata-0.3.5.dist-info → mt_metadata-0.3.7.dist-info}/RECORD +34 -34
  31. {mt_metadata-0.3.5.dist-info → mt_metadata-0.3.7.dist-info}/AUTHORS.rst +0 -0
  32. {mt_metadata-0.3.5.dist-info → mt_metadata-0.3.7.dist-info}/LICENSE +0 -0
  33. {mt_metadata-0.3.5.dist-info → mt_metadata-0.3.7.dist-info}/WHEEL +0 -0
  34. {mt_metadata-0.3.5.dist-info → mt_metadata-0.3.7.dist-info}/top_level.txt +0 -0
@@ -838,10 +838,12 @@ class TF:
838
838
  ].data.tolist()
839
839
  if self.ex in outputs or self.ey in outputs or self.hz in outputs:
840
840
  if np.all(
841
- self._transfer_function.transfer_function.sel(
842
- input=self._ch_input_dict["tf"],
843
- output=self._ch_output_dict["tf"],
844
- ).data
841
+ self._transfer_function.transfer_function.loc[
842
+ dict(
843
+ input=self._ch_input_dict["tf"],
844
+ output=self._ch_output_dict["tf"],
845
+ )
846
+ ].data
845
847
  == 0
846
848
  ):
847
849
  return False
@@ -857,9 +859,9 @@ class TF:
857
859
 
858
860
  """
859
861
  if self.has_transfer_function():
860
- ds = self.dataset.transfer_function.sel(
861
- input=self.hx_hy, output=self.ex_ey_hz
862
- )
862
+ ds = self.dataset.transfer_function.loc[
863
+ dict(input=self.hx_hy, output=self.ex_ey_hz)
864
+ ]
863
865
  for key, mkey in self._dataset_attr_dict.items():
864
866
  obj, attr = mkey.split(".", 1)
865
867
  value = getattr(self, obj).get_attr_from_name(attr)
@@ -889,9 +891,9 @@ class TF:
889
891
 
890
892
  """
891
893
  if self.has_transfer_function():
892
- ds = self.dataset.transfer_function_error.sel(
893
- input=self.hx_hy, output=self.ex_ey_hz
894
- )
894
+ ds = self.dataset.transfer_function_error.loc[
895
+ dict(input=self.hx_hy, output=self.ex_ey_hz)
896
+ ]
895
897
  for key, mkey in self._dataset_attr_dict.items():
896
898
  obj, attr = mkey.split(".", 1)
897
899
  value = getattr(self, obj).get_attr_from_name(attr)
@@ -921,9 +923,9 @@ class TF:
921
923
 
922
924
  """
923
925
  if self.has_transfer_function():
924
- ds = self.dataset.transfer_function_model_error.sel(
925
- input=self.hx_hy, output=self.ex_ey_hz
926
- )
926
+ ds = self.dataset.transfer_function_model_error.loc[
927
+ dict(input=self.hx_hy, output=self.ex_ey_hz)
928
+ ]
927
929
  for key, mkey in self._dataset_attr_dict.items():
928
930
  obj, attr = mkey.split(".", 1)
929
931
  value = getattr(self, obj).get_attr_from_name(attr)
@@ -958,10 +960,12 @@ class TF:
958
960
  ].data.tolist()
959
961
  if self.ex in outputs or self.ey in outputs:
960
962
  if np.all(
961
- self._transfer_function.transfer_function.sel(
962
- input=self._ch_input_dict["impedance"],
963
- output=self._ch_output_dict["impedance"],
964
- ).data
963
+ self._transfer_function.transfer_function.loc[
964
+ dict(
965
+ input=self._ch_input_dict["impedance"],
966
+ output=self._ch_output_dict["impedance"],
967
+ )
968
+ ].data
965
969
  == 0
966
970
  ):
967
971
  return False
@@ -977,10 +981,12 @@ class TF:
977
981
 
978
982
  """
979
983
  if self.has_impedance():
980
- z = self.dataset.transfer_function.sel(
981
- input=self._ch_input_dict["impedance"],
982
- output=self._ch_output_dict["impedance"],
983
- )
984
+ z = self.dataset.transfer_function.loc[
985
+ dict(
986
+ input=self._ch_input_dict["impedance"],
987
+ output=self._ch_output_dict["impedance"],
988
+ )
989
+ ]
984
990
  z.name = "impedance"
985
991
  for key, mkey in self._dataset_attr_dict.items():
986
992
  obj, attr = mkey.split(".", 1)
@@ -1011,10 +1017,12 @@ class TF:
1011
1017
 
1012
1018
  """
1013
1019
  if self.has_impedance():
1014
- z_err = self.dataset.transfer_function_error.sel(
1015
- input=self._ch_input_dict["impedance"],
1016
- output=self._ch_output_dict["impedance"],
1017
- )
1020
+ z_err = self.dataset.transfer_function_error.loc[
1021
+ dict(
1022
+ input=self._ch_input_dict["impedance"],
1023
+ output=self._ch_output_dict["impedance"],
1024
+ )
1025
+ ]
1018
1026
  z_err.name = "impedance_error"
1019
1027
 
1020
1028
  for key, mkey in self._dataset_attr_dict.items():
@@ -1046,10 +1054,12 @@ class TF:
1046
1054
 
1047
1055
  """
1048
1056
  if self.has_impedance():
1049
- z_err = self.dataset.transfer_function_model_error.sel(
1050
- input=self._ch_input_dict["impedance"],
1051
- output=self._ch_output_dict["impedance"],
1052
- )
1057
+ z_err = self.dataset.transfer_function_model_error.loc[
1058
+ dict(
1059
+ input=self._ch_input_dict["impedance"],
1060
+ output=self._ch_output_dict["impedance"],
1061
+ )
1062
+ ]
1053
1063
  z_err.name = "impedance_model_error"
1054
1064
 
1055
1065
  for key, mkey in self._dataset_attr_dict.items():
@@ -1087,10 +1097,12 @@ class TF:
1087
1097
  if self.hz in outputs:
1088
1098
  if np.all(
1089
1099
  np.nan_to_num(
1090
- self._transfer_function.transfer_function.sel(
1091
- input=self._ch_input_dict["tipper"],
1092
- output=self._ch_output_dict["tipper"],
1093
- ).data
1100
+ self._transfer_function.transfer_function.loc[
1101
+ dict(
1102
+ input=self._ch_input_dict["tipper"],
1103
+ output=self._ch_output_dict["tipper"],
1104
+ )
1105
+ ].data
1094
1106
  )
1095
1107
  == 0
1096
1108
  ):
@@ -1107,10 +1119,12 @@ class TF:
1107
1119
 
1108
1120
  """
1109
1121
  if self.has_tipper():
1110
- t = self.dataset.transfer_function.sel(
1111
- input=self._ch_input_dict["tipper"],
1112
- output=self._ch_output_dict["tipper"],
1113
- )
1122
+ t = self.dataset.transfer_function.loc[
1123
+ dict(
1124
+ input=self._ch_input_dict["tipper"],
1125
+ output=self._ch_output_dict["tipper"],
1126
+ )
1127
+ ]
1114
1128
  t.name = "tipper"
1115
1129
 
1116
1130
  for key, mkey in self._dataset_attr_dict.items():
@@ -1141,10 +1155,12 @@ class TF:
1141
1155
 
1142
1156
  """
1143
1157
  if self.has_tipper():
1144
- t = self.dataset.transfer_function_error.sel(
1145
- input=self._ch_input_dict["tipper"],
1146
- output=self._ch_output_dict["tipper"],
1147
- )
1158
+ t = self.dataset.transfer_function_error.loc[
1159
+ dict(
1160
+ input=self._ch_input_dict["tipper"],
1161
+ output=self._ch_output_dict["tipper"],
1162
+ )
1163
+ ]
1148
1164
  t.name = "tipper_error"
1149
1165
  for key, mkey in self._dataset_attr_dict.items():
1150
1166
  obj, attr = mkey.split(".", 1)
@@ -1174,10 +1190,12 @@ class TF:
1174
1190
 
1175
1191
  """
1176
1192
  if self.has_tipper():
1177
- t = self.dataset.transfer_function_model_error.sel(
1178
- input=self._ch_input_dict["tipper"],
1179
- output=self._ch_output_dict["tipper"],
1180
- )
1193
+ t = self.dataset.transfer_function_model_error.loc[
1194
+ dict(
1195
+ input=self._ch_input_dict["tipper"],
1196
+ output=self._ch_output_dict["tipper"],
1197
+ )
1198
+ ]
1181
1199
  t.name = "tipper_model_error"
1182
1200
  for key, mkey in self._dataset_attr_dict.items():
1183
1201
  obj, attr = mkey.split(".", 1)
@@ -1209,10 +1227,12 @@ class TF:
1209
1227
  """
1210
1228
 
1211
1229
  if np.all(
1212
- self._transfer_function.inverse_signal_power.sel(
1213
- input=self._ch_input_dict["isp"],
1214
- output=self._ch_output_dict["isp"],
1215
- ).data
1230
+ self._transfer_function.inverse_signal_power.loc[
1231
+ dict(
1232
+ input=self._ch_input_dict["isp"],
1233
+ output=self._ch_output_dict["isp"],
1234
+ )
1235
+ ].data
1216
1236
  == 0
1217
1237
  ):
1218
1238
  return False
@@ -1221,10 +1241,12 @@ class TF:
1221
1241
  @property
1222
1242
  def inverse_signal_power(self):
1223
1243
  if self.has_inverse_signal_power():
1224
- ds = self.dataset.inverse_signal_power.sel(
1225
- input=self._ch_input_dict["isp"],
1226
- output=self._ch_output_dict["isp"],
1227
- )
1244
+ ds = self.dataset.inverse_signal_power.loc[
1245
+ dict(
1246
+ input=self._ch_input_dict["isp"],
1247
+ output=self._ch_output_dict["isp"],
1248
+ )
1249
+ ]
1228
1250
  for key, mkey in self._dataset_attr_dict.items():
1229
1251
  obj, attr = mkey.split(".", 1)
1230
1252
  value = getattr(self, obj).get_attr_from_name(attr)
@@ -1260,10 +1282,12 @@ class TF:
1260
1282
  """
1261
1283
 
1262
1284
  if np.all(
1263
- self._transfer_function.residual_covariance.sel(
1264
- input=self._ch_input_dict["res"],
1265
- output=self._ch_output_dict["res"],
1266
- ).data
1285
+ self._transfer_function.residual_covariance.loc[
1286
+ dict(
1287
+ input=self._ch_input_dict["res"],
1288
+ output=self._ch_output_dict["res"],
1289
+ )
1290
+ ].data
1267
1291
  == 0
1268
1292
  ):
1269
1293
  return False
@@ -1272,10 +1296,12 @@ class TF:
1272
1296
  @property
1273
1297
  def residual_covariance(self):
1274
1298
  if self.has_residual_covariance():
1275
- ds = self.dataset.residual_covariance.sel(
1276
- input=self._ch_input_dict["res"],
1277
- output=self._ch_output_dict["res"],
1278
- )
1299
+ ds = self.dataset.residual_covariance.loc[
1300
+ dict(
1301
+ input=self._ch_input_dict["res"],
1302
+ output=self._ch_output_dict["res"],
1303
+ )
1304
+ ]
1279
1305
  for key, mkey in self._dataset_attr_dict.items():
1280
1306
  obj, attr = mkey.split(".", 1)
1281
1307
  value = getattr(self, obj).get_attr_from_name(attr)
@@ -2222,19 +2248,19 @@ class TF:
2222
2248
  setattr(self, tf_key, getattr(zmm_obj, j_key))
2223
2249
  self._transfer_function["transfer_function"].loc[
2224
2250
  dict(input=zmm_obj.input_channels, output=zmm_obj.output_channels)
2225
- ] = zmm_obj.dataset.transfer_function.sel(
2226
- input=zmm_obj.input_channels, output=zmm_obj.output_channels
2227
- )
2251
+ ] = zmm_obj.dataset.transfer_function.loc[
2252
+ dict(input=zmm_obj.input_channels, output=zmm_obj.output_channels)
2253
+ ]
2228
2254
  self._transfer_function["inverse_signal_power"].loc[
2229
2255
  dict(input=zmm_obj.input_channels, output=zmm_obj.input_channels)
2230
- ] = zmm_obj.dataset.inverse_signal_power.sel(
2231
- input=zmm_obj.input_channels, output=zmm_obj.input_channels
2232
- )
2256
+ ] = zmm_obj.dataset.inverse_signal_power.loc[
2257
+ dict(input=zmm_obj.input_channels, output=zmm_obj.input_channels)
2258
+ ]
2233
2259
  self._transfer_function["residual_covariance"].loc[
2234
2260
  dict(input=zmm_obj.output_channels, output=zmm_obj.output_channels)
2235
- ] = zmm_obj.dataset.residual_covariance.sel(
2236
- input=zmm_obj.output_channels, output=zmm_obj.output_channels
2237
- )
2261
+ ] = zmm_obj.dataset.residual_covariance.loc[
2262
+ dict(input=zmm_obj.output_channels, output=zmm_obj.output_channels)
2263
+ ]
2238
2264
 
2239
2265
  self._compute_error_from_covariance()
2240
2266
  self._rotation_angle = -1 * zmm_obj.declination
@@ -2362,4 +2388,3 @@ class TF:
2362
2388
 
2363
2389
  class TFError(Exception):
2364
2390
  pass
2365
- pass
@@ -300,20 +300,20 @@ class EDI(object):
300
300
 
301
301
  self._read_data()
302
302
 
303
- if self.Header.lat is None:
303
+ if self.Header.lat in [None, 0.0]:
304
304
  self.Header.lat = self.Measurement.reflat
305
305
  self.logger.debug(
306
- "Got latitude from reflat for {0}".format(self.Header.dataid)
306
+ f"Got latitude from reflat for {self.Header.dataid}"
307
307
  )
308
- if self.Header.lon is None:
308
+ if self.Header.lon in [None, 0.0]:
309
309
  self.Header.lon = self.Measurement.reflon
310
310
  self.logger.debug(
311
- "Got longitude from reflon for {0}".format(self.Header.dataid)
311
+ f"Got longitude from reflon for {self.Header.dataid}"
312
312
  )
313
- if self.Header.elev is None:
313
+ if self.Header.elev in [None, 0.0]:
314
314
  self.Header.elev = self.Measurement.refelev
315
315
  self.logger.debug(
316
- "Got elevation from refelev for {0}".format(self.Header.dataid)
316
+ f"Got elevation from refelev for {self.Header.dataid}"
317
317
  )
318
318
 
319
319
  if self.elev in [0, None] and get_elevation:
@@ -419,8 +419,7 @@ class EDI(object):
419
419
  )
420
420
  elif key.startswith("t"):
421
421
  obj[:, ii, jj] = (
422
- data_dict[f"{key}r.exp"]
423
- + data_dict[f"{key}i.exp"] * 1j
422
+ data_dict[f"{key}r.exp"] + data_dict[f"{key}i.exp"] * 1j
424
423
  )
425
424
  try:
426
425
  error_key = [
@@ -756,10 +755,8 @@ class EDI(object):
756
755
  extra_lines.append(
757
756
  f"\toriginal_program.date={self.Header.progdate}\n"
758
757
  )
759
- if self.Header.fileby != "1980-01-01":
760
- extra_lines.append(
761
- f"\toriginal_file.date={self.Header.filedate}\n"
762
- )
758
+ if self.Header.filedate != "1980-01-01":
759
+ extra_lines.append(f"\toriginal_file.date={self.Header.filedate}\n")
763
760
  header_lines = self.Header.write_header(
764
761
  longitude_format=longitude_format, latlon_format=latlon_format
765
762
  )
@@ -907,15 +904,11 @@ class EDI(object):
907
904
  ]
908
905
  elif data_key.lower() == "freq":
909
906
  block_lines = [
910
- ">{0} // {1:.0f}\n".format(
911
- data_key.upper(), data_comp_arr.size
912
- )
907
+ ">{0} // {1:.0f}\n".format(data_key.upper(), data_comp_arr.size)
913
908
  ]
914
909
  elif data_key.lower() in ["zrot", "trot"]:
915
910
  block_lines = [
916
- ">{0} // {1:.0f}\n".format(
917
- data_key.upper(), data_comp_arr.size
918
- )
911
+ ">{0} // {1:.0f}\n".format(data_key.upper(), data_comp_arr.size)
919
912
  ]
920
913
  else:
921
914
  raise ValueError("Cannot write block for {0}".format(data_key))
@@ -1039,6 +1032,13 @@ class EDI(object):
1039
1032
  if survey.summary != None:
1040
1033
  self.Info.info_list.append(f"survey.summary = {survey.summary}")
1041
1034
 
1035
+ for key in survey.to_dict(single=True).keys():
1036
+ if "northwest" in key or "southeast" in key or "time_period" in key:
1037
+ continue
1038
+ value = survey.get_attr_from_name(key)
1039
+ if value != None:
1040
+ self.Info.info_list.append(f"survey.{key} = {value}")
1041
+
1042
1042
  @property
1043
1043
  def station_metadata(self):
1044
1044
  sm = metadata.Station()
@@ -1192,6 +1192,8 @@ class EDI(object):
1192
1192
  self.Header.datum = sm.location.datum
1193
1193
  self.Header.units = sm.transfer_function.units
1194
1194
  self.Header.enddate = sm.time_period.end
1195
+ if sm.geographic_name is not None:
1196
+ self.Header.loc = sm.geographic_name
1195
1197
 
1196
1198
  ### write notes
1197
1199
  # write comments, which would be anything in the info section from an edi
@@ -1203,7 +1205,7 @@ class EDI(object):
1203
1205
  if k in ["processing_parameters"]:
1204
1206
  for item in v:
1205
1207
  self.Info.info_list.append(
1206
- f"transfer_function.{item.replace('=', ' = ')}"
1208
+ f"transfer_function.processing_parameters.{item.replace('=', ' = ')}"
1207
1209
  )
1208
1210
  else:
1209
1211
  self.Info.info_list.append(f"transfer_function.{k} = {v}")
@@ -1379,3 +1381,11 @@ class EDI(object):
1379
1381
  @property
1380
1382
  def rrhy_metadata(self):
1381
1383
  return self._get_magnetic_metadata("rrhy")
1384
+
1385
+ @property
1386
+ def rrhx_metadata(self):
1387
+ return self._get_magnetic_metadata("rrhx")
1388
+
1389
+ @property
1390
+ def rrhy_metadata(self):
1391
+ return self._get_magnetic_metadata("rrhy")
@@ -441,6 +441,7 @@ class DefineMeasurement(Base):
441
441
  "chtype": channel.component,
442
442
  "id": channel.channel_id,
443
443
  "acqchan": channel.channel_number,
444
+ "dip": channel.measurement_tilt,
444
445
  }
445
446
  )
446
447
  setattr(self, f"meas_{channel.component.lower()}", meas)
@@ -17,6 +17,7 @@ from .standards import SCHEMA_FN_PATHS
17
17
  # =============================================================================
18
18
  attr_dict = get_schema("emeasurement", SCHEMA_FN_PATHS)
19
19
 
20
+
20
21
  # ==============================================================================
21
22
  # magnetic measurements
22
23
  # ==============================================================================
@@ -40,8 +41,9 @@ class EMeasurement(Base):
40
41
 
41
42
  super().__init__(attr_dict=attr_dict, **kwargs)
42
43
 
43
- if self.x != 0 or self.y != 0 or self.x2 != 0 or self.y2 != 0:
44
- self.azm = self.azimuth
44
+ if self.azm == 0:
45
+ if self.x != 0 or self.x2 != 0 or self.y != 0 or self.y2 != 0:
46
+ self.azm = self.azimuth
45
47
 
46
48
  def __str__(self):
47
49
  return "\n".join(
@@ -213,7 +213,7 @@ class Header(Location):
213
213
  self,
214
214
  longitude_format="LON",
215
215
  latlon_format="dms",
216
- required=True,
216
+ required=False,
217
217
  ):
218
218
  """
219
219
  Write header information to a list of lines.
@@ -243,6 +243,8 @@ class Header(Location):
243
243
  for key, value in self.to_dict(single=True, required=required).items():
244
244
  if key in ["x", "x2", "y", "y2", "z", "z2"]:
245
245
  continue
246
+ if value in [None, "None"]:
247
+ continue
246
248
  if key in ["latitude"]:
247
249
  key = "lat"
248
250
  elif key in ["longitude"]:
@@ -10,6 +10,7 @@ Created on Sat Dec 4 14:13:37 2021
10
10
  from mt_metadata.base import Base
11
11
  from mt_metadata.base.helpers import validate_name
12
12
 
13
+
13
14
  # ==============================================================================
14
15
  # Info object
15
16
  # ==============================================================================
@@ -444,11 +445,17 @@ class Information(Base):
444
445
  new_dict[new_key] = value.split()[0]
445
446
  elif key.lower().endswith("sen"):
446
447
  comp = key.lower().split()[0]
447
- new_dict[
448
- f"{comp}.sensor.manufacturer"
449
- ] = "Phoenix Geophysics"
448
+ new_dict[f"{comp}.sensor.manufacturer"] = (
449
+ "Phoenix Geophysics"
450
+ )
450
451
  new_dict[f"{comp}.sensor.type"] = "Induction Coil"
451
452
  new_dict[new_key] = value
453
+ elif new_key in [
454
+ "survey.time_period.start_date",
455
+ "survey.time_period.end_date",
456
+ ]:
457
+ if value.count("-") == 1:
458
+ new_dict[new_key] = value.split("-")[0]
452
459
  else:
453
460
  new_dict[new_key] = value
454
461
 
@@ -461,8 +468,8 @@ class Information(Base):
461
468
  new_dict[key] = value
462
469
 
463
470
  if processing_parameters != []:
464
- new_dict[
465
- "transfer_function.processing_parameters"
466
- ] = processing_parameters
471
+ new_dict["transfer_function.processing_parameters"] = (
472
+ processing_parameters
473
+ )
467
474
 
468
475
  self.info_dict = new_dict
@@ -358,14 +358,20 @@ class EMTFXML(emtf_xml.EMTF):
358
358
  if hasattr(value, "to_xml") and callable(getattr(value, "to_xml")):
359
359
  if key == "processing_info":
360
360
  if skip_field_notes:
361
- value.remote_info._order.remove("field_notes")
361
+ try:
362
+ value.remote_info._order.remove("field_notes")
363
+ except ValueError:
364
+ self.logger.debug("No field notes to skip.")
362
365
  if value.remote_info.site.id in [
363
366
  None,
364
367
  "",
365
368
  "None",
366
369
  "none",
367
370
  ]:
368
- value.remote_info._order.remove("site")
371
+ try:
372
+ value.remote_info._order.remove("site")
373
+ except ValueError:
374
+ self.logger.debug("No remote field notes to skip.")
369
375
  element = value.to_xml()
370
376
  if isinstance(element, list):
371
377
  for item in element:
@@ -1211,7 +1217,7 @@ class EMTFXML(emtf_xml.EMTF):
1211
1217
  self.logger.warning(
1212
1218
  f"Cannot set processing info attribute {param}"
1213
1219
  )
1214
- self.logger.exception(error)
1220
+ # self.logger.exception(error)
1215
1221
  elif "magnetometer" in key:
1216
1222
  index = int(key.split("_")[1].split(".")[0])
1217
1223
  key = key.split(".", 1)[1:]
@@ -1227,7 +1233,7 @@ class EMTFXML(emtf_xml.EMTF):
1227
1233
  self.logger.warning(
1228
1234
  f"Cannot set processing info attribute {param}"
1229
1235
  )
1230
- self.logger.exception(error)
1236
+ # self.logger.exception(error)
1231
1237
  else:
1232
1238
  try:
1233
1239
  run.set_attr_from_name(key, value)
@@ -1235,7 +1241,7 @@ class EMTFXML(emtf_xml.EMTF):
1235
1241
  self.logger.warning(
1236
1242
  f"Cannot set processing info attribute {param}"
1237
1243
  )
1238
- self.logger.exception(error)
1244
+ # self.logger.exception(error)
1239
1245
  else:
1240
1246
  try:
1241
1247
  self.processing_info.set_attr_from_name(key, value)
@@ -1243,7 +1249,7 @@ class EMTFXML(emtf_xml.EMTF):
1243
1249
  self.logger.warning(
1244
1250
  f"Cannot set processing info attribute {param}"
1245
1251
  )
1246
- self.logger.exception(error)
1252
+ # self.logger.exception(error)
1247
1253
 
1248
1254
  self.site.run_list = sm.transfer_function.runs_processed
1249
1255
 
@@ -432,7 +432,7 @@ class TransferFunction(Base):
432
432
  pass
433
433
 
434
434
  comp_element = et.SubElement(
435
- period_element, key.replace("_", ".").capitalize(), attr_dict
435
+ period_element, key.replace("_", ".").upper(), attr_dict
436
436
  )
437
437
  idx_dict = self.write_dict[key]
438
438
  shape = arr.shape
@@ -56,7 +56,7 @@ class Estimate(Base):
56
56
 
57
57
  root = et.Element(
58
58
  self.__class__.__name__.capitalize(),
59
- {"name": self.name, "type": self.type},
59
+ {"name": self.name.upper(), "type": self.type},
60
60
  )
61
61
 
62
62
  et.SubElement(root, "Description").text = self.description
@@ -20,6 +20,8 @@ from mt_metadata.transfer_functions.io.emtfxml.metadata import helpers
20
20
 
21
21
  # =============================================================================
22
22
  attr_dict = get_schema("period_range", SCHEMA_FN_PATHS)
23
+
24
+
23
25
  # =============================================================================
24
26
  class PeriodRange(Base):
25
27
  __doc__ = write_lines(attr_dict)
@@ -45,7 +47,10 @@ class PeriodRange(Base):
45
47
 
46
48
  root = et.Element(
47
49
  self.__class__.__name__,
48
- {"min": f"{self.min:.9f}", "max": f"{self.max:.9f}"},
50
+ {
51
+ "min": f"{self.min:<16.5E}".strip(),
52
+ "max": f"{self.max:<16.5E}".strip(),
53
+ },
49
54
  )
50
55
  if string:
51
56
  return element_to_string(root)
@@ -11,7 +11,11 @@ Created on Wed Dec 23 21:30:36 2020
11
11
  # =============================================================================
12
12
  # Imports
13
13
  # =============================================================================
14
- from mt_metadata.base.helpers import write_lines, dict_to_xml, element_to_string
14
+ from mt_metadata.base.helpers import (
15
+ write_lines,
16
+ dict_to_xml,
17
+ element_to_string,
18
+ )
15
19
  from mt_metadata.base import get_schema, Base
16
20
  from .standards import SCHEMA_FN_PATHS
17
21
  from . import Person
@@ -40,7 +44,7 @@ class Provenance(Base):
40
44
 
41
45
  @property
42
46
  def create_time(self):
43
- return self._creation_dt.iso_str
47
+ return self._creation_dt.iso_str.split(".")[0]
44
48
 
45
49
  @create_time.setter
46
50
  def create_time(self, dt_str):
@@ -21,7 +21,8 @@
21
21
  "Restricted release",
22
22
  "Paper Citation Required",
23
23
  "Academic Use Only",
24
- "Conditions Apply"
24
+ "Conditions Apply",
25
+ "Data Citation Required"
25
26
  ],
26
27
  "alias": [],
27
28
  "example": "Unrestricted release",
@@ -20,7 +20,6 @@ __all__ = [
20
20
  "Decimation",
21
21
  "DecimationLevel",
22
22
  "Estimator",
23
- "FrequencyBand",
24
23
  "Processing",
25
24
  "Regression",
26
25
  "Run",