anss-formats 0.0.4__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,24 @@
1
+ Metadata-Version: 2.4
2
+ Name: anss-formats
3
+ Version: 0.1.1
4
+ Summary: Python implementation of the library used to communicate seismic event detection information between systems
5
+ License: CC0-1.0
6
+ Keywords: anss,earthquakes,formats,detection
7
+ Author: John Patton
8
+ Author-email: jpatton@usgs.gov
9
+ Requires-Python: >3.9.1,<3.12
10
+ Classifier: License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Provides-Extra: pycurl
15
+ Requires-Dist: certifi (>=2024.07.04,<2025.0.0)
16
+ Requires-Dist: cryptography (>=42.0.5)
17
+ Requires-Dist: docutils (!=0.21.post1)
18
+ Requires-Dist: dparse (>=0.6.2,<0.7.0)
19
+ Requires-Dist: pydantic (>=2.6.0,<3.0.0)
20
+ Requires-Dist: requests (>=2.32.2,<3.0.0)
21
+ Requires-Dist: twine (>=5.1.1,<6.0.0)
22
+ Requires-Dist: urllib3 (>=2.6.0,<3.0.0)
23
+ Project-URL: Homepage, https://gitlab.com/anss-netops/anss-data-formats
24
+ Project-URL: Repository, https://gitlab.com/anss-netops/anss-data-formats
@@ -0,0 +1,17 @@
1
+ anssformats/__init__.py,sha256=1HsV_uIEBuKIhoa8jBHOD4bX6eMOfQ-57KvbHYTie8g,686
2
+ anssformats/amplitude.py,sha256=DCtkc6siT8dj0C3ynBM01rfaSpKOeHTc-51CDmspELg,647
3
+ anssformats/analytics.py,sha256=ROUq_SNq69tUKDFCYjqgElO3185_-XAsIZguN4ItA-s,3125
4
+ anssformats/association.py,sha256=UjDmr213kSzCCIWHMDce06Xjzut3-l1IYKexxJR_C_I,1173
5
+ anssformats/channel.py,sha256=z4sQGpJwSySSuzKCca_UovS6_DbPaP_j4BAZpX-Cqwc,1169
6
+ anssformats/detection.py,sha256=-UlR7tv5szgXsg5kY2NLxu3fAn_5ROUogM7H7W4EgQY,2201
7
+ anssformats/eventType.py,sha256=6DHjovVVGug5AU1WRKX3RxQbrnwSPlg2KNtH2sLX1Q0,1216
8
+ anssformats/filter.py,sha256=uOAUteSWeBvDcB12_9E_9sqe0wb6iR_lHZdjLv5agNM,675
9
+ anssformats/formatbasemodel.py,sha256=mUKwY0jiHrcr8jdZLCQhzSx3qBg16FEUytRgB5wiuIY,2044
10
+ anssformats/geojson.py,sha256=xjm_9ZxX9P8lzQKDBDO6L-ai1y7PQofVMJ5FwEIAVjU,1244
11
+ anssformats/hypocenter.py,sha256=QkXfQkRzjHbefF4G3TaOTwD5PYcvFkX5dvxjeVS3yfs,1552
12
+ anssformats/magnitude.py,sha256=ncMmwMoj_Lv0kSzjjtQVZ5_41mSkCYFxgExxiaeSqao,993
13
+ anssformats/pick.py,sha256=UpLcqxPx2BZScpMIAlk9E-QOWVPJsT9jVP-DtkZtij0,2427
14
+ anssformats/source.py,sha256=G78YUwY2fimi8Uy0d1mKjaIIsEDUue50wdHpACQsSMU,398
15
+ anss_formats-0.1.1.dist-info/METADATA,sha256=fJ5BfeSJKSn3M2vau9lBLdVcGFbdb4S7zmA5llY6uZU,1038
16
+ anss_formats-0.1.1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
17
+ anss_formats-0.1.1.dist-info/RECORD,,
anssformats/__init__.py CHANGED
@@ -0,0 +1,26 @@
1
+ from anssformats.pick import Pick
2
+ from anssformats.source import Source
3
+ from anssformats.amplitude import Amplitude
4
+ from anssformats.analytics import Analytics, Prediction
5
+ from anssformats.association import Association
6
+ from anssformats.detection import Detection
7
+ from anssformats.hypocenter import Hypocenter
8
+ from anssformats.magnitude import Magnitude
9
+ from anssformats.channel import Channel
10
+ from anssformats.filter import Filter
11
+ from anssformats.eventType import EventType
12
+
13
+ __all__ = [
14
+ "Pick",
15
+ "Source",
16
+ "Amplitude",
17
+ "Analytics",
18
+ "Prediction",
19
+ "Association",
20
+ "Detection",
21
+ "Hypocenter",
22
+ "Magnitude",
23
+ "Channel",
24
+ "Filter",
25
+ "EventType",
26
+ ]
@@ -0,0 +1,82 @@
1
+ from typing import Optional, Union, List
2
+
3
+ from pydantic import Field
4
+
5
+ from anssformats.formatbasemodel import FormatBaseModel
6
+ from anssformats.source import Source as SourceFormat
7
+
8
+
9
+ class Prediction(FormatBaseModel):
10
+ """A generic prediction from an analytical model (AI/ML, statistical, rule-based, etc.)
11
+
12
+ This class provides a flexible structure for representing any type of prediction
13
+ with associated metrics and model provenance.
14
+
15
+ Attributes
16
+ ----------
17
+ label: string identifying what is being predicted (e.g., "phase", "magnitude",
18
+ "eventType", "custom_attribute")
19
+
20
+ value: the predicted value - can be string, number, or structured object
21
+
22
+ probability: optional float [0.0-1.0] containing the probability of this prediction
23
+
24
+ metrics: optional dict containing additional prediction metrics such as confidence,
25
+ uncertainty quantification (std, ranges, credible intervals), entropy, etc.
26
+ Structure is flexible to support any model-specific metrics.
27
+
28
+ modelID: optional string identifying which model made this prediction
29
+
30
+ modelVersion: optional string containing the version of the model
31
+
32
+ source: optional Source object containing the source/author of the
33
+ """
34
+
35
+ label: str = Field(
36
+ ...,
37
+ description="What is being predicted (e.g., phase, magnitude, eventType, distance)",
38
+ )
39
+ value: Union[str, float, int, list, dict] = Field(
40
+ ..., description="The predicted value"
41
+ )
42
+ probability: Optional[float] = Field(
43
+ None, ge=0.0, le=1.0, description="Probability of this prediction [0.0-1.0]"
44
+ )
45
+ metrics: Optional[dict] = Field(
46
+ None,
47
+ description="Additional prediction metrics (confidence, uncertainty, entropy, etc.)",
48
+ )
49
+ modelID: Optional[str] = Field(
50
+ None, description="Identifier for the model that made this prediction"
51
+ )
52
+ modelVersion: Optional[str] = Field(None, description="Version of the model")
53
+ source: Optional[SourceFormat] = Field(
54
+ None, description="Source/author of the model"
55
+ )
56
+
57
+
58
+ class Analytics(FormatBaseModel):
59
+ """A conversion class used to create, parse, and validate analytical information
60
+ from models such as pickers, analytical models, AI/ML, etc.
61
+
62
+ This class provides an extensible structure supporting multiple models, arbitrary
63
+ prediction labels, and custom extensions.
64
+
65
+ Attributes
66
+ ----------
67
+
68
+ predictions: optional list of Prediction objects containing predictions from one
69
+ or more models. Each prediction can represent any type of analytical output
70
+ with associated confidence metrics and model provenance.
71
+
72
+ extensions: optional dict containing custom key-value pairs for experimental code and debugging.
73
+
74
+ """
75
+
76
+ predictions: Optional[List[Prediction]] = Field(
77
+ None, description="Array of predictions from one or more analytical models"
78
+ )
79
+
80
+ extensions: Optional[dict] = Field(
81
+ None, description="Custom key-value pairs for experimental or debuging data"
82
+ )
anssformats/channel.py CHANGED
@@ -2,47 +2,11 @@ from typing import Optional, List, Literal
2
2
 
3
3
  from pydantic import Field, field_validator, ValidationInfo
4
4
  from anssformats.formatbasemodel import FormatBaseModel
5
-
6
-
7
- class ChannelGeometry(FormatBaseModel):
8
- """A class holding the geojson geometry for the channel
9
-
10
- type: string containing the type of this geometry
11
-
12
- coordinates: List of floats containing the longitude in degrees, latitude in degrees, and elevation in meters, in that order
13
- """
14
-
15
- type: str = "Point"
16
- coordinates: List[float]
17
-
18
- # check that coordinates are valid
19
- @field_validator("coordinates")
20
- @classmethod
21
- def validate_coordinates(
22
- cls, value: List[float], info: ValidationInfo
23
- ) -> List[float]:
24
- if value is None:
25
- raise ValueError("Missing coordinates")
26
-
27
- if len(value) != 3:
28
- raise ValueError("Incomplete coordinates")
29
-
30
- # longitude
31
- if value[0] < -180.0 or value[0] > 180.0:
32
- raise ValueError("Longitude coordinate out of valid range")
33
-
34
- # latitude
35
- if value[1] < -90.0 or value[1] > 90.0:
36
- raise ValueError("Latitude coordinate out of valid range")
37
-
38
- # don't bother validating elevation
39
- # value[2]
40
-
41
- return value
5
+ from anssformats.geojson import PointGeometry
42
6
 
43
7
 
44
8
  class ChannelProperties(FormatBaseModel):
45
- """A class holding the channe specific custom properties for a geojson point feature
9
+ """A class holding the channel specific custom properties for a geojson point feature
46
10
 
47
11
  Station: string containing the station code
48
12
 
@@ -65,12 +29,12 @@ class Channel(FormatBaseModel):
65
29
 
66
30
  type: string containing the type of this geojson
67
31
 
68
- geometry: ChannelGeometry object containing the geojson geometry for this feature
32
+ geometry: PointGeometry object containing the geojson geometry for this feature
69
33
 
70
34
  properties: ChannelProperties object containing the channel properties
71
35
  """
72
36
 
73
37
  type: str = "Feature"
74
38
 
75
- geometry: ChannelGeometry
39
+ geometry: PointGeometry
76
40
  properties: ChannelProperties
@@ -0,0 +1,69 @@
1
+ from typing import List, Literal, Optional
2
+
3
+ from pydantic import Field
4
+
5
+
6
+ from anssformats.formatbasemodel import CustomDT, FormatBaseModel
7
+ from anssformats.eventType import EventType
8
+ from anssformats.hypocenter import Hypocenter, HypocenterProperties
9
+ from anssformats.source import Source
10
+ from anssformats.pick import Pick
11
+ from anssformats.magnitude import Magnitude
12
+ from anssformats.analytics import Analytics
13
+
14
+
15
+ class Detection(FormatBaseModel):
16
+ """A conversion class used to create, parse, and validate detection data.
17
+
18
+ type: string identifying this message as a detection
19
+
20
+ id: string containing a unique identifier for this detection
21
+
22
+ source: Source object containing the source of the detection
23
+
24
+ hypocenter: Hypocenter object containing the hypocenter of the detection
25
+
26
+ detectionType: optional string containing the origin type of this detection; valid
27
+ values are "New", "Update", "Final", and "Retract"
28
+
29
+ detectionTime: optional datetime containing the time this detection was made
30
+
31
+ eventType: optional EventType object containing the event type of the detection
32
+
33
+ minimumDistance: optional float containing the distance to the closest station
34
+
35
+ rms: optional float containing the detection RMS
36
+
37
+ maximumGap: optional float containing the detection gap
38
+
39
+ detector: optional string containing the detection grid, algorithm, or other
40
+ information
41
+
42
+ pickData: optional list of either Pick objects used to generate
43
+ this detection
44
+
45
+ analyticsInfo: optional analytics object containing model output attached to this detection
46
+ """
47
+
48
+ type: Literal["Detection"]
49
+ id: str
50
+ source: Source
51
+
52
+ hypocenter: Hypocenter
53
+
54
+ detectionType: Optional[Literal["New", "Update", "Final", "Retract"]] = None
55
+ detectionTime: Optional[CustomDT] = None
56
+
57
+ eventType: Optional[EventType] = None
58
+
59
+ minimumDistance: Optional[float] = Field(None, ge=0.0)
60
+ rms: Optional[float] = None
61
+ maximumGap: Optional[float] = Field(None, ge=0.0, le=360.0)
62
+
63
+ detector: Optional[str] = None
64
+
65
+ pickData: Optional[List[Pick]] = None
66
+
67
+ magnitudeData: Optional[List[Magnitude]] = None
68
+
69
+ analyticsInfo: Optional[Analytics] = None
@@ -1,7 +1,7 @@
1
1
  from datetime import datetime
2
2
  from typing import Any
3
3
 
4
- from pydantic import BaseModel, GetCoreSchemaHandler
4
+ from pydantic import BaseModel, GetCoreSchemaHandler, field_validator
5
5
  from pydantic_core import CoreSchema, core_schema
6
6
 
7
7
 
@@ -22,10 +22,6 @@ def convert_datetime_to_iso8601_with_z_suffix(dt: datetime) -> str:
22
22
  class FormatBaseModel(BaseModel):
23
23
  """A Pydantic BaseModel used for any required formatting of keys and values"""
24
24
 
25
- class Config:
26
- # conversion for datetime to datetime string
27
- json_encoders = {datetime: convert_datetime_to_iso8601_with_z_suffix}
28
-
29
25
  def model_dump(self):
30
26
  """Override the default model_dump method to always exclude None values"""
31
27
  return super().model_dump(exclude_none=True)
@@ -41,17 +37,22 @@ class CustomDT(datetime):
41
37
  strings.
42
38
  """
43
39
 
40
+ @field_validator("*", mode="before")
44
41
  @classmethod
45
42
  def validate_no_tz(cls, v: Any, info: core_schema.ValidationInfo) -> Any:
46
43
  if isinstance(v, str):
47
44
  return datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=None)
48
- else:
49
- return v.replace(tzinfo=None)
45
+ return v.replace(tzinfo=None)
50
46
 
51
47
  @classmethod
52
48
  def __get_pydantic_core_schema__(
53
49
  cls, source_type: Any, handler: GetCoreSchemaHandler
54
50
  ) -> CoreSchema:
55
- return core_schema.with_info_plain_validator_function(
56
- function=cls.validate_no_tz
51
+ return core_schema.with_info_before_validator_function(
52
+ cls.validate_no_tz,
53
+ handler(datetime),
54
+ serialization=core_schema.plain_serializer_function_ser_schema(
55
+ convert_datetime_to_iso8601_with_z_suffix,
56
+ when_used="json-unless-none",
57
+ ),
57
58
  )
anssformats/geojson.py ADDED
@@ -0,0 +1,41 @@
1
+ from typing import Optional, List, Literal
2
+
3
+ from pydantic import Field, field_validator, ValidationInfo
4
+ from anssformats.formatbasemodel import FormatBaseModel
5
+
6
+
7
+ class PointGeometry(FormatBaseModel):
8
+ """A class holding a geojson point geometry
9
+
10
+ type: string containing the type of this geometry
11
+
12
+ coordinates: List of floats containing the longitude in degrees, latitude in degrees, and elevation in meters or depth in kilometers, in that order
13
+ """
14
+
15
+ type: str = "Point"
16
+ coordinates: List[float]
17
+
18
+ # check that coordinates are valid
19
+ @field_validator("coordinates")
20
+ @classmethod
21
+ def validate_coordinates(
22
+ cls, value: List[float], info: ValidationInfo
23
+ ) -> List[float]:
24
+ if value is None:
25
+ raise ValueError("Missing coordinates")
26
+
27
+ if len(value) != 3:
28
+ raise ValueError("Incomplete coordinates")
29
+
30
+ # longitude
31
+ if value[0] < -180.0 or value[0] > 180.0:
32
+ raise ValueError("Longitude coordinate out of valid range")
33
+
34
+ # latitude
35
+ if value[1] < -90.0 or value[1] > 90.0:
36
+ raise ValueError("Latitude coordinate out of valid range")
37
+
38
+ # don't validate elevation/depth
39
+ # value[2]
40
+
41
+ return value
@@ -0,0 +1,47 @@
1
+ from typing import Optional, List, Literal
2
+
3
+ from pydantic import Field, field_validator, ValidationInfo
4
+ from anssformats.formatbasemodel import CustomDT, FormatBaseModel
5
+ from anssformats.geojson import PointGeometry
6
+
7
+
8
+ class HypocenterProperties(FormatBaseModel):
9
+ """A class holding the hypocenter specific custom properties for a geojson point feature
10
+
11
+ originTime: required datetime containing the origin time of the hypocenter
12
+
13
+ latitudeError: optional float containing the error of the latitude of this
14
+ hypocenter in kilometers
15
+
16
+ longitudeError: optional float containing the error of the longitude of this
17
+ hypocenter in kilometers
18
+
19
+ depthError: optional float containing the error of the depth of this hypocenter in
20
+ kilometers
21
+
22
+ timeError: optional float containing the error of the origin time of this hypocenter
23
+ in seconds
24
+ """
25
+
26
+ originTime: CustomDT
27
+ latitudeError: Optional[float] = None
28
+ longitudeError: Optional[float] = None
29
+ depthError: Optional[float] = None
30
+ timeError: Optional[float] = None
31
+
32
+
33
+ class Hypocenter(FormatBaseModel):
34
+ """A conversion class used to create, parse, and validate geojson Hypocenter data as part of
35
+ detection data.
36
+
37
+ type: string containing the type of this geojson
38
+
39
+ geometry: PointGeometry object containing the geojson geometry for this feature
40
+
41
+ properties: HypocenterProperties object containing the hypocenter properties
42
+ """
43
+
44
+ type: str = "Feature"
45
+
46
+ geometry: PointGeometry
47
+ properties: HypocenterProperties
@@ -0,0 +1,35 @@
1
+ from typing import Optional
2
+
3
+ from pydantic import Field
4
+
5
+ from anssformats.formatbasemodel import FormatBaseModel
6
+ from anssformats.source import Source
7
+
8
+
9
+ class Magnitude(FormatBaseModel):
10
+ """A conversion class used to create, parse, and validate Magnitude data as part
11
+ of detection data.
12
+
13
+ Attributes
14
+ ----------
15
+
16
+ value: float containing the magnitude value
17
+
18
+ type: string containing the magnitude type
19
+
20
+ error: optional float containing the associated magnitude error (if any)
21
+
22
+ probability: optional float containing the associated magnitude probability (if any)
23
+
24
+ id: optional string containing a unique identifier for this magnitude
25
+
26
+ source: optional Source object containing the source of the magnitude
27
+ """
28
+
29
+ value: float = Field(None, ge=-2.0, le=10.0)
30
+ type: str
31
+
32
+ error: Optional[float] = Field(None, ge=0.0)
33
+ probability: Optional[float] = Field(None, ge=0.0, le=100.0)
34
+ id: Optional[str] = None
35
+ source: Optional[Source] = None
anssformats/pick.py CHANGED
@@ -4,13 +4,11 @@ from pydantic import Field
4
4
 
5
5
  from anssformats.amplitude import Amplitude
6
6
  from anssformats.association import Association
7
- from anssformats.beam import Beam
8
- from anssformats.machineLearning import MachineLearning
7
+ from anssformats.analytics import Analytics
9
8
  from anssformats.filter import Filter
10
9
  from anssformats.formatbasemodel import CustomDT, FormatBaseModel
11
10
  from anssformats.channel import Channel, ChannelProperties
12
11
  from anssformats.source import Source
13
- from anssformats.quality import Quality
14
12
 
15
13
 
16
14
  class Pick(FormatBaseModel):
@@ -40,20 +38,12 @@ class Pick(FormatBaseModel):
40
38
  filterInfo: optional list of Filter objects containing the filter frequencies when the
41
39
  pick was made
42
40
 
43
- amplitude: optional Amplitude object containing the amplitude associated with the
44
- pick
45
-
46
- beamInfo: optional Beam object containing the waveform beam information associated with
47
- the pick
41
+ amplitude: optional Amplitude object containing the amplitude associated with the pick
48
42
 
49
43
  associationInfo: optional Association object containing the association information
50
44
  if this pick is used as data in a Detection
51
45
 
52
- machineLearningInfo: optional machineLearning object containing the machineLearning
53
- information of this pick
54
-
55
- qualityInfo: optional quality object containing the quality
56
- information of this pick
46
+ AnalyticsInfo: optional analytics object containing model output attached to this pick
57
47
  """
58
48
 
59
49
  type: Literal["Pick"]
@@ -72,8 +62,6 @@ class Pick(FormatBaseModel):
72
62
 
73
63
  filterInfo: Optional[List[Filter]] = None
74
64
  amplitudeInfo: Optional[Amplitude] = None
75
- beamInfo: Optional[Beam] = None
76
65
 
77
66
  associationInfo: Optional[Association] = None
78
- machineLearningInfo: Optional[MachineLearning] = None
79
- qualityInfo: Optional[List[Quality]] = None
67
+ analyticsInfo: Optional[Analytics] = None
@@ -1,561 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: anss-formats
3
- Version: 0.0.4
4
- Summary: Python implementation of the library used to communicate seismic event detection information between systems
5
- License: CC0-1.0
6
- Keywords: anss,earthquakes,formats,detection
7
- Author: John Patton
8
- Author-email: jpatton@usgs.gov
9
- Requires-Python: >3.9.1,<3.12
10
- Classifier: License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication
11
- Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3.10
13
- Classifier: Programming Language :: Python :: 3.11
14
- Provides-Extra: pycurl
15
- Requires-Dist: certifi (>=2024.07.04,<2025.0.0)
16
- Requires-Dist: cryptography (>=44.0.1,<45.0.0)
17
- Requires-Dist: docutils (!=0.21.post1)
18
- Requires-Dist: dparse (>=0.6.2,<0.7.0)
19
- Requires-Dist: pydantic (>=2.6.0,<3.0.0)
20
- Requires-Dist: requests (>=2.32.2,<3.0.0)
21
- Requires-Dist: twine (>=5.1.1,<6.0.0)
22
- Project-URL: Homepage, https://gitlab.com/anss-netops/anss-data-formats
23
- Project-URL: Repository, https://gitlab.com/anss-netops/anss-data-formats
24
- Description-Content-Type: text/markdown
25
-
26
- # ANSS Data Formats
27
- The US Geological Survey (USGS) Advanced National Seismic System (ANSS) defines a number of data exchange formats to communicate seismic event detection information between processing systems. These formats are defined using objects as defined in the [JSON standard](http://www.json.org).
28
-
29
- The purpose of this project is to:
30
-
31
- 1. Define formats to hold data representing the estimates of various types of
32
- seismic event detections.
33
- 2. Store the format definitions in a source controlled manner.
34
- 3. Host libraries used to generate, parse, and validate the formats
35
-
36
- ## Defined formats:
37
-
38
- * [Pick](format-docs/Pick.md) Format - A format for unassociated picks from a waveform arrival time picking algorithm.
39
-
40
- ## Supporting format objects:
41
-
42
- * [Amplitude](format-docs/Amplitude.md) Object - An object that contains information about an amplitude as part of a pick.
43
- * [Beam](format-docs/Beam.md) Object - An object that contains information about a waveform beam as part of a pick.
44
- * [Associated](format-docs/Associated.md) Object - An object that contains associated information if a pick is included in a detection.
45
- * [Filter](format-docs/Filter.md) Object - An object that contains filter information as part of a pick.
46
- * [Site](format-docs/Site.md) Object - An object that defines the station used to create a pick.
47
- * [Source](format-docs/Source.md) Object - An object that defines the creator/source of a pick.
48
- * [Quality](format-docs/Quality.md) Object - An object that defines the data quality of a pick.
49
- * [MachineLearning](format-docs/MachineLearning.md) Object - An object that defines the machine learning information for a pick.
50
- * [EventType](format-docs/EventType.md) Object - An object that defines the event type for MachineLearning info.
51
-
52
- # Amplitude Object Specification
53
-
54
- ## Description
55
-
56
- The Amplitude object is an object designed to encode the amplitude information
57
- that may or may not be part of the [Pick](Pick.md) Format. Amplitude uses the
58
- [JSON standard](http://www.json.org).
59
-
60
- ## Usage
61
-
62
- The Amplitude object is intended for use as part of the [Pick](Pick.md) Format
63
- in seismic data messaging between seismic applications and organizations.
64
-
65
- ## Output
66
-
67
- ```json
68
- {
69
- "amplitude" : Number,
70
- "period" : Number,
71
- "snr" : Number
72
- }
73
- ```
74
-
75
- ## Glossary
76
-
77
- **Optional Values:**
78
-
79
- The following are values that **may or may not** be provided as part of an
80
- amplitude.
81
-
82
- * amplitude - A decimal number containing the amplitude.
83
- * period - A decimal number containing the amplitude period.
84
- * snr - A decimal number containing the signal to noise ratio, capped at 1E9.
85
-
86
- # Association Object Specification
87
-
88
- ## Description
89
-
90
- The Association object is an object designed to encode information provided when
91
- a [Pick](Pick.md). Association uses the [JSON standard](http://www.json.org).
92
-
93
- ## Usage
94
-
95
- Association is intended for use as part of the [Pick](Pick.md) Format in seismic data messaging between seismic applications and organizations.
96
-
97
- ## Output
98
-
99
- ```json
100
- {
101
- "phase" : String,
102
- "distance" : Number,
103
- "azimuth" : Number,
104
- "residual" : Number,
105
- "sigma" : Number
106
- }
107
- ```
108
-
109
- ## Glossary
110
-
111
- **Optional Values:**
112
-
113
- The following are values that **may or may not** be provided as part of
114
- association.
115
-
116
- * phase - A string that identifies the seismic phase for this data if Association.
117
- * distance - A decimal number containing the distance in degrees between the detection's and data's locations if Association.
118
- * azimuth - A decimal number containing the azimuth in degrees between the detection's and data's locations if Association.
119
- * residual - A decimal number containing residual in seconds of the data if Association.
120
- * sigma - A decimal number reflecting the number of standard deviations of the data from the calculated value if Association.
121
-
122
- # Beam Object Specification
123
-
124
- ## Description
125
-
126
- The Beam object is an object designed to encode waveform beam information
127
- that may or may not be part of the [Pick](Pick.md) Format. Beam uses the
128
- [JSON standard](http://www.json.org).
129
-
130
- ## Usage
131
-
132
- The Beam object is intended for use as part of the [Pick](Pick.md) Format
133
- in seismic data messaging between seismic applications and organizations.
134
-
135
- ## Output
136
-
137
- ```json
138
- {
139
- "backAzimuth" : Number,
140
- "backAzimuthError" : Number,
141
- "slowness" : Number,
142
- "slownessError" : Number,
143
- "powerRatio" : Number,
144
- "powerRatioError" : Number
145
- }
146
- ```
147
-
148
- ## Glossary
149
-
150
- **Required Values:**
151
-
152
- These are the values **required** to define a beam.
153
-
154
- * backAzimuth - A decimal number containing the back azimuth.
155
- * slowness - A decimal number containing the horizontal slowness.
156
-
157
- **Optional Values:**
158
-
159
- The following are supplementary values that **may or may not** be provided by
160
- various algorithms.
161
-
162
- * backAzimuthError - A decimal number containing the back azimuth error.
163
- * slownessError - A decimal number containing the horizontal slowness error.
164
- * powerRatio - A decimal number containing the power ratio.
165
- * powerRatioError - A decimal number containing the power ratio error.
166
-
167
- # Site Object Specification
168
-
169
- ## Description
170
-
171
- The Site object is an object designed to define the seismic station used to
172
- produce a [Pick](Pick.md) message. Site uses the [JSON](https://www.json.org) and [GeoJSON](https://geojson.org/) standards.
173
-
174
- ## Usage
175
-
176
- Site is intended for use as part of the [Pick](Pick.md) Format in seismic data
177
- messaging between seismic applications and organizations.
178
-
179
- ## Output
180
-
181
- ```json
182
- {
183
- "type": "Feature",
184
- "geometry": {
185
- "type": "Point",
186
- "coordinates": [125.6, 10.1]
187
- },
188
- "properties": {
189
- "Station" : String,
190
- "Channel" : String,
191
- "Network" : String,
192
- "Location" : String
193
- }
194
- }
195
- ```
196
-
197
- ## Glossary
198
-
199
- **Required Values:**
200
-
201
- These are the properties **required** to define a Site.
202
-
203
- * type - A string indicating the geojson feature type
204
- * geometry - A geojson point containing the station coordinates in the form [Latitude, Longitude, Elevation (in meters)]
205
- * station - A string the station code.
206
- * network - A string containing network code.
207
-
208
- **Optional Values:**
209
-
210
- The following are supplementary properties that **may or may not** be provided as
211
- part of a Site.
212
-
213
- * channel - A string containing the channel code.
214
- * location - A string containing the location code.
215
-
216
- # EventType Object Specification
217
-
218
- ## Description
219
-
220
- The EventType object is an object designed to define the originating seismic
221
- organization that produced a [MachineLearning](MachineLearning.md) object.
222
- Site uses the [JSON standard](http://www.json.org).
223
-
224
- ## Usage
225
-
226
- EventType is intended for use as part of the [PicMachineLearningk](MachineLearning.md) Oject in seismic data
227
- messaging between seismic applications and organizations.
228
-
229
- ## Output
230
-
231
- ```json
232
- {
233
- "type" : String,
234
- "certainty" : String
235
- }
236
- ```
237
-
238
- ## Glossary
239
-
240
- **Required Values:**
241
-
242
- These are the values **required** to define a EventType
243
-
244
- * type - A string containing the event type, allowed type strings are: "Earthquake", "MineCollapse", "NuclearExplosion", "QuarryBlast", "InducedOrTriggered", "RockBurst", "FluidInjection", "IceQuake", and "VolcanicEruption"
245
-
246
- **Optional Values:**
247
-
248
- The following are values that **may or may not** be provided as part of EventType.
249
-
250
- * certainty - A string containing the certainty of the event type; allowed strings are "Suspected" and "Confirmed"
251
-
252
- # Filter Object Specification
253
-
254
- ## Description
255
-
256
- The Filter object is an object designed to encode a single set of filter
257
- frequencies that may or may not be part of the filter list in the [Pick](Pick.md)
258
- Format. Filter uses the [JSON standard](http://www.json.org) .
259
-
260
- ## Usage
261
-
262
- The Filter object is intended for use as part of the [Pick](Pick.md) Format
263
- in seismic data messaging between seismic applications and organizations.
264
-
265
- ## Output
266
-
267
- ```json
268
- {
269
- "type" : String,
270
- "highPass" : Number,
271
- "lowPass" : Number,
272
- "units" : String
273
- }
274
- ```
275
-
276
- ## Glossary
277
-
278
- **Optional Values:**
279
-
280
- The following are values that **may or may not** be provided as part of a filter.
281
-
282
- * type - A string containing the type of filter
283
- * highPass - A decimal number containing the high pass frequency in Hz.
284
- * lowPass - A decimal number containing the low pass frequency in Hz.
285
- * units - A string containing the filter frequency units.
286
-
287
- Note: The Type of filter is assumed to be "BandPass", and the Units are assumed
288
- to be "Hertz"
289
-
290
- # MachineLearning Object Specification
291
-
292
- ## Description
293
-
294
- The MachineLearning object is an object designed to encode value added
295
- information available for a [Pick](Pick.md) from advanced algorithms such as
296
- machine learning. MachineLearning uses the [JSON standard](http://www.json.org).
297
-
298
- ## Usage
299
-
300
- MachineLearning is intended for use as part of the [Pick](Pick.md) Format in
301
- seismic data messaging between seismic
302
- applications and organizations.
303
-
304
- ## Output
305
-
306
- ```json
307
- {
308
- "phase" : String,
309
- "phaseProbability" : Number,
310
- "distance" : Number,
311
- "distanceProbability" : Number,
312
- "distanceRangeHalfWidth" : Number,
313
- "distanceRangeSigma" : Number,
314
- "backAzimuth" : Number,
315
- "backAzimuthProbability" : Number,
316
- "magnitude" : Number,
317
- "magnitudeType" : String,
318
- "magnitudeProbability" : Number,
319
- "depth" : Number,
320
- "depthProbability" : Number,
321
- "eventType" :
322
- {
323
- "type" : String,
324
- "certainty" : String
325
- },
326
- "eventTypeProbability" : Number,
327
- "repickShift" : Number,
328
- "repickSTD" : Number,
329
- "repickCredibleIntervalLower" : Number,
330
- "repickCredibleIntervalUpper" : Number,
331
- "source" :
332
- {
333
- "agencyID" : String,
334
- "author" : String
335
- }
336
- }
337
- ```
338
-
339
- ## Glossary
340
-
341
- **Optional Values:**
342
-
343
- The following are values that **may or may not** be provided as part of MachineLearning.
344
-
345
- * phase - A string that identifies the seismic phase for this data
346
- * phaseProbability - A decimal number containing the probability of the phase identification
347
- * distance - A decimal number containing a distance estimation in degrees
348
- * distanceProbability - A decimal number containing the probability of the distance estimation
349
- * distanceRangeHalfWidth - A decimal number containing the half-width of a distance range centered at Distance (e.g. Distance is 15 deg +/- 10 deg)
350
- * distanceRangeSigma - A decimal number containing the standard deviation for a probability PDF curve for Distance (e.g. Distance is 15 deg +/- 3 * DistanceRangeSigma where DistanceProbability is modified by the PDF probability, lowering as it gets further from Distance ). DistanceRangeSigma is mutually exclusive of DistanceRangeHalfWidth, and if both are provided DistanceRangeSigma should be used.
351
- * backAzimuth - A decimal number containing a backazimuth estimation in degrees
352
- * backAzimuthProbability - A decimal number containing the probability of the backazimuth estimation
353
- * magnitude - A decimal number containing the magnitude estimation
354
- * magnitudeType - A string that identifies the magnitude type
355
- * magnitudeProbability - A decimal number containing the probability of the magnitude estimation
356
- * depth - A decimal number containing a depth estimation in kilometers
357
- * depthProbability - A decimal number containing the probability of the depth estimation
358
- * eventType - An object containing the event type, see [EventType](EventType.md).
359
- * eventTypeProbability - A decimal number containing the probability of the event type estimation
360
- * repickShift - A decimal number containing the repick shift in seconds (to regenerate the initial Pick.Time, subtract this value from the current Pick.Time)
361
- * repickSTD - A decimal number containing the repick shift standard deviation
362
- * repickCredibleIntervalLower - A decimal number containing the repick shift credible interval lower
363
- * repickCredibleIntervalUpper - A decimal number containing the repick shift credible interval upper
364
- * source - An object containing the source of the MachineLearning, see [Source](Source.md).
365
-
366
- # Pick Format Specification
367
-
368
- ## Description
369
-
370
- Pick is a format designed to encode the basic information of an unassociated
371
- waveform arrival time pick. Pick uses the
372
- [JSON standard](http://www.json.org).
373
-
374
- ## Usage
375
- Pick is intended for use in seismic data messaging between seismic
376
- applications and organizations.
377
-
378
- ## Output
379
-
380
- ```json
381
- {
382
- "type" : "Pick",
383
- "id" : String,
384
- "channel" :
385
- {
386
- "type": "Feature",
387
- "geometry": {
388
- "type": "Point",
389
- "coordinates": [125.6, 10.1, 1589.0]
390
- },
391
- "properties": {
392
- "station" : String,
393
- "channel" : String,
394
- "network" : String,
395
- "location" : String
396
- }
397
- },
398
- "time" : ISO8601,
399
- "source" :
400
- {
401
- "agencyID" : String,
402
- "author" : String
403
- },
404
- "phase" : String,
405
- "polarity" : ("up" | "down"),
406
- "onset" : ("impulsive" | "emergent" | "questionable"),
407
- "pickerType" : ("manual" | "raypicker" | "filterpicker" | "earthworm" | "other"),
408
- "filterInfo" : [ {
409
- "type" : String,
410
- "highPass" : Number,
411
- "lowPass" : Number,
412
- "units" : String
413
- }, ...],
414
- "amplitudeInfo" :
415
- {
416
- "value" : Number,
417
- "period" : Number,
418
- "snr" : Number
419
- },
420
- "beamInfo" :
421
- {
422
- "backAzimuth" : Number,
423
- "backAzimuthError" : Number,
424
- "slowness" : Number,
425
- "slownessError" : Number,
426
- "powerRatio" : Number,
427
- "powerRatioError" : Number,
428
- },
429
- "associationInfo" :
430
- {
431
- "phase" : String,
432
- "distance" : Number,
433
- "azimuth" : Number,
434
- "residual" : Number,
435
- "sigma" : Number
436
- },
437
- "qualityInfo" : [ {
438
- "standard": String,
439
- "value": Number
440
- }, ...],
441
- "machineLearningInfo" :
442
- {
443
- "phase" : String,
444
- "phaseProbability" : Number,
445
- "distance" : Number,
446
- "distanceProbability" : Number,
447
- "distanceRangeHalfWidth" : Number,
448
- "distanceRangeSigma" : Number,
449
- "backAzimuth" : Number,
450
- "backAzimuthProbability" : Number,
451
- "magnitude" : Number,
452
- "magnitudeType" : String,
453
- "magnitudeProbability" : Number,
454
- "depth" : Number,
455
- "depthProbability" : Number,
456
- "eventType" : {
457
- "type" : String,
458
- "certainty" : String
459
- },
460
- "eventTypeProbability" : Number,
461
- "repickShift" : Number,
462
- "repickSTD" : Number,
463
- "repickCredibleIntervalLower" : Number,
464
- "repickCredibleIntervalUpper" : Number,
465
- "source" : {
466
- "agencyID" : String,
467
- "author" : String
468
- }
469
- }
470
- }
471
- ```
472
-
473
- ## Glossary
474
-
475
- **Required Values:**
476
-
477
- These are the values **required** to define a pick.
478
-
479
- * type - A string that identifies this message as a pick.
480
- * id - A string containing an unique identifier for this pick.
481
- * channel - A GeoJSON object containing the channel the pick was made at, see [Channel](Channel.md).
482
- * source - An object containing the source of the pick, see [Source](Source.md).
483
- * time - A string containing the UTC arrival time of the phase that was picked, in the ISO8601 format `YYYY-MM-DDTHH:MM:SS.SSSZ`.
484
-
485
- **Optional Values:**
486
-
487
- The following are supplementary values that **may or may not** be provided by
488
- various picking algorithms.
489
-
490
- * phase - A string that identifies the seismic phase that was picked.
491
- * polarity - A string containing the phase polarity; "up" or "down".
492
- * onset - A string containing the phase onset; "impulsive", "emergent", or "questionable" .
493
- * pickerType - A string describing the type of picker; "manual", "raypicker", "filterpicker", "earthworm", or "other".
494
- * filter - An array of objects containing the filter frequencies when the pick was made, see [Filter](Filter.md).
495
- * amplitude - An object containing the amplitude associated with the pick, see [Amplitude](Amplitude.md).
496
- * beam - An object containing the waveform beam information associated with the pick, see [Beam](Beam.md).
497
- * associationInfo - An object containing the association information if this pick is used as data in a Detection, see [Associated](Associated.md).
498
- * machineLearningInfo - An object containing the machine learning information of this pick, see [MachineLearning](MachineLearning.md).
499
- * qualityInfo - An array of objects containing the containing the quality metric information for this pick, see [Quality](Quality.md).
500
-
501
- # Quality Object Specification
502
-
503
- ## Description
504
-
505
- The Quality object is an object designed to hold data quality for a [Pick](Pick.md) message.
506
- Site uses the [JSON standard](http://www.json.org).
507
-
508
- ## Usage
509
-
510
- Quality is intended for use as part of the [Pick](Pick.md) Format in seismic data
511
- messaging between seismic applications and organizations.
512
-
513
- ## Output
514
-
515
- ```json
516
- {
517
- "standard": String,
518
- "value": Number
519
- }
520
- ```
521
-
522
- ## Glossary
523
-
524
- **Required Values:**
525
-
526
- These are the values **required** to define a Quality
527
-
528
- * standard - A string containing the name of the quality standard.
529
- * value - A string containing numarical value of the quality standard.
530
-
531
- # Source Object Specification
532
-
533
- ## Description
534
-
535
- The Source object is an object designed to define the originating seismic
536
- organization that produced a [Pick](Pick.md) message.
537
- Site uses the [JSON standard](http://www.json.org).
538
-
539
- ## Usage
540
-
541
- Source is intended for use as part of the [Pick](Pick.md) Format in seismic data
542
- messaging between seismic applications and organizations.
543
-
544
- ## Output
545
-
546
- ```json
547
- {
548
- "agencyID" : String,
549
- "author" : String
550
- }
551
- ```
552
-
553
- ## Glossary
554
-
555
- **Required Values:**
556
-
557
- These are the values **required** to define a Source
558
-
559
- * agencyID - A string containing the originating agency FDSN ID.
560
- * author - A string containing the source author.
561
-
@@ -1,15 +0,0 @@
1
- anssformats/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- anssformats/amplitude.py,sha256=DCtkc6siT8dj0C3ynBM01rfaSpKOeHTc-51CDmspELg,647
3
- anssformats/association.py,sha256=UjDmr213kSzCCIWHMDce06Xjzut3-l1IYKexxJR_C_I,1173
4
- anssformats/beam.py,sha256=lKmdXi_9Y9kglXPrsDG8WT2_41ssX92PrLxxjws7wT4,1004
5
- anssformats/channel.py,sha256=AQ6wrBCtMhXwN3gdxWQKkiutqnVB9H2FmbMrOY6CTC8,2240
6
- anssformats/eventType.py,sha256=6DHjovVVGug5AU1WRKX3RxQbrnwSPlg2KNtH2sLX1Q0,1216
7
- anssformats/filter.py,sha256=uOAUteSWeBvDcB12_9E_9sqe0wb6iR_lHZdjLv5agNM,675
8
- anssformats/formatbasemodel.py,sha256=naZFyuL6YOIMdpYpxD91l7MbbG6eafz3-itzLilSoG8,1933
9
- anssformats/machineLearning.py,sha256=6xwmv_F2VuxZl-ng7qUKlV7Mrlw6aQ-LKg6A6x3vdfo,3261
10
- anssformats/pick.py,sha256=Spaw7Paw_UCkVxNSJxJSirmiXM3e2KEY8-nIwVgk2Vg,2867
11
- anssformats/quality.py,sha256=PK5sPAoJTUgCrmQkiWTerEB2klWbR1ECAU-qIABpcjA,389
12
- anssformats/source.py,sha256=G78YUwY2fimi8Uy0d1mKjaIIsEDUue50wdHpACQsSMU,398
13
- anss_formats-0.0.4.dist-info/METADATA,sha256=8RSZ270Phj4OLm0zK_6NMkrokce0uEUHkoadqJY9a9w,19170
14
- anss_formats-0.0.4.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
15
- anss_formats-0.0.4.dist-info/RECORD,,
anssformats/beam.py DELETED
@@ -1,34 +0,0 @@
1
- from typing import Optional
2
-
3
- from pydantic import Field
4
-
5
- from anssformats.formatbasemodel import FormatBaseModel
6
-
7
-
8
- class Beam(FormatBaseModel):
9
- """A conversion class used to create, parse, and validate beam detection data.
10
-
11
- Attributes
12
- ----------
13
-
14
- backAzimuth: float containing the back azimuth in degrees
15
-
16
- backAzimuthError: optional float containing the back azimuth error
17
-
18
- slowness: float containing the horizontal slowness
19
-
20
- slownessError: optional float containing the horizontal slowness error
21
-
22
- powerRatio: optional float containing the power ratio
23
-
24
- powerRatioError: optional float containing the power ratio error
25
- """
26
-
27
- backAzimuth: float = Field(ge=0.0)
28
- backAzimuthError: Optional[float] = Field(None, ge=0.0)
29
-
30
- slowness: float = Field(ge=0.0)
31
- slownessError: Optional[float] = Field(None, ge=0.0)
32
-
33
- powerRatio: Optional[float] = Field(None, ge=0.0)
34
- powerRatioError: Optional[float] = Field(None, ge=0.0)
@@ -1,89 +0,0 @@
1
- from typing import Optional
2
-
3
- from pydantic import Field
4
-
5
- from anssformats.eventType import EventType as EventTypeFormat
6
- from anssformats.formatbasemodel import FormatBaseModel
7
- from anssformats.source import Source as SourceFormat
8
-
9
-
10
- class MachineLearning(FormatBaseModel):
11
- """A conversion class used to create, parse, and validate value added MachineLearning
12
- data from advanced algorithms such as machine learning as part of detection formats
13
- data.
14
-
15
- Attributes
16
- ----------
17
-
18
- phase: optional string containing MachineLearning phase name
19
-
20
- phaseProbability: optional float containing the probability of the MachineLearning
21
- phase name
22
-
23
- distance: optional float containing the MachineLearning distance in degrees
24
-
25
- distanceProbability: optional float containing the probability of the MachineLearning
26
- distance
27
-
28
- backAzimuth: optional float containing the MachineLearning back azimuth in degrees
29
-
30
- backAzimuthProbability: optional float containing the probability of the
31
- MachineLearning back azimuth
32
-
33
- magnitude: optional float containing the MachineLearning magnitude
34
-
35
- magnitudeType: optional string containing the MachineLearning magnitude type
36
-
37
- magnitudeProbability: optional float containing the probability of the
38
- MachineLearning magnitude
39
-
40
- depth: optional float containing the MachineLearning depth in kilometers
41
-
42
- depthProbability: optional float containing the probability of the MachineLearning
43
- depth
44
-
45
- eventType: optional EventType object containing the MachineLearning event type
46
-
47
- eventTypeProbability: optional float containing the probability of the
48
- MachineLearning event type
49
-
50
- repickShift: optional float containing the repick shift value in seconds
51
-
52
- repickSTD: optional float containing the repick shift standard deviation
53
-
54
- repickCredibleIntervalLower: optional float containing the repick shift lower credible interval
55
-
56
- repickCredibleIntervalUpper: optional float containing the repick shift upper credible interval
57
-
58
- source: optional Source object containing the source of the MachineLearning
59
- information
60
- """
61
-
62
- phase: Optional[str] = Field(None, pattern=r"^[A-Za-z]+$")
63
- phaseProbability: Optional[float] = None
64
-
65
- distance: Optional[float] = Field(None, ge=0.0)
66
- distanceProbability: Optional[float] = None
67
-
68
- backAzimuth: Optional[float] = Field(None, ge=0.0)
69
- backAzimuthProbability: Optional[float] = None
70
-
71
- magnitude: Optional[float] = Field(None, ge=-2.0, le=10.0)
72
- magnitudeType: Optional[str] = None
73
- magnitudeProbability: Optional[float] = None
74
-
75
- depth: Optional[float] = Field(None, ge=-100.0, le=1500.0)
76
- depthProbability: Optional[float] = None
77
-
78
- eventType: Optional[EventTypeFormat] = None
79
- eventTypeProbability: Optional[float] = None
80
-
81
- distanceRangeHalfWidth: Optional[float] = None
82
- distanceRangeSigma: Optional[float] = None
83
-
84
- repickShift: Optional[float] = None
85
- repickSTD: Optional[float] = None
86
- repickCredibleIntervalLower: Optional[float] = None
87
- repickCredibleIntervalUpper: Optional[float] = None
88
-
89
- source: Optional[SourceFormat] = None
anssformats/quality.py DELETED
@@ -1,17 +0,0 @@
1
- from anssformats.formatbasemodel import FormatBaseModel
2
-
3
-
4
- class Quality(FormatBaseModel):
5
- """A conversion class used to create, parse, and validate Quality data as part of
6
- detection data.
7
-
8
- Attributes
9
- ----------
10
-
11
- standard: string containing the name of the quality standard used
12
-
13
- value: float containing the Quality value
14
- """
15
-
16
- standard: str
17
- value: float