anss-formats 0.0.4__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,24 @@
1
+ Metadata-Version: 2.4
2
+ Name: anss-formats
3
+ Version: 0.1.1
4
+ Summary: Python implementation of the library used to communicate seismic event detection information between systems
5
+ License: CC0-1.0
6
+ Keywords: anss,earthquakes,formats,detection
7
+ Author: John Patton
8
+ Author-email: jpatton@usgs.gov
9
+ Requires-Python: >3.9.1,<3.12
10
+ Classifier: License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Provides-Extra: pycurl
15
+ Requires-Dist: certifi (>=2024.07.04,<2025.0.0)
16
+ Requires-Dist: cryptography (>=42.0.5)
17
+ Requires-Dist: docutils (!=0.21.post1)
18
+ Requires-Dist: dparse (>=0.6.2,<0.7.0)
19
+ Requires-Dist: pydantic (>=2.6.0,<3.0.0)
20
+ Requires-Dist: requests (>=2.32.2,<3.0.0)
21
+ Requires-Dist: twine (>=5.1.1,<6.0.0)
22
+ Requires-Dist: urllib3 (>=2.6.0,<3.0.0)
23
+ Project-URL: Homepage, https://gitlab.com/anss-netops/anss-data-formats
24
+ Project-URL: Repository, https://gitlab.com/anss-netops/anss-data-formats
@@ -0,0 +1,26 @@
1
+ from anssformats.pick import Pick
2
+ from anssformats.source import Source
3
+ from anssformats.amplitude import Amplitude
4
+ from anssformats.analytics import Analytics, Prediction
5
+ from anssformats.association import Association
6
+ from anssformats.detection import Detection
7
+ from anssformats.hypocenter import Hypocenter
8
+ from anssformats.magnitude import Magnitude
9
+ from anssformats.channel import Channel
10
+ from anssformats.filter import Filter
11
+ from anssformats.eventType import EventType
12
+
13
+ __all__ = [
14
+ "Pick",
15
+ "Source",
16
+ "Amplitude",
17
+ "Analytics",
18
+ "Prediction",
19
+ "Association",
20
+ "Detection",
21
+ "Hypocenter",
22
+ "Magnitude",
23
+ "Channel",
24
+ "Filter",
25
+ "EventType",
26
+ ]
@@ -0,0 +1,82 @@
1
+ from typing import Optional, Union, List
2
+
3
+ from pydantic import Field
4
+
5
+ from anssformats.formatbasemodel import FormatBaseModel
6
+ from anssformats.source import Source as SourceFormat
7
+
8
+
9
+ class Prediction(FormatBaseModel):
10
+ """A generic prediction from an analytical model (AI/ML, statistical, rule-based, etc.)
11
+
12
+ This class provides a flexible structure for representing any type of prediction
13
+ with associated metrics and model provenance.
14
+
15
+ Attributes
16
+ ----------
17
+ label: string identifying what is being predicted (e.g., "phase", "magnitude",
18
+ "eventType", "custom_attribute")
19
+
20
+ value: the predicted value - can be string, number, or structured object
21
+
22
+ probability: optional float [0.0-1.0] containing the probability of this prediction
23
+
24
+ metrics: optional dict containing additional prediction metrics such as confidence,
25
+ uncertainty quantification (std, ranges, credible intervals), entropy, etc.
26
+ Structure is flexible to support any model-specific metrics.
27
+
28
+ modelID: optional string identifying which model made this prediction
29
+
30
+ modelVersion: optional string containing the version of the model
31
+
32
+ source: optional Source object containing the source/author of the
33
+ """
34
+
35
+ label: str = Field(
36
+ ...,
37
+ description="What is being predicted (e.g., phase, magnitude, eventType, distance)",
38
+ )
39
+ value: Union[str, float, int, list, dict] = Field(
40
+ ..., description="The predicted value"
41
+ )
42
+ probability: Optional[float] = Field(
43
+ None, ge=0.0, le=1.0, description="Probability of this prediction [0.0-1.0]"
44
+ )
45
+ metrics: Optional[dict] = Field(
46
+ None,
47
+ description="Additional prediction metrics (confidence, uncertainty, entropy, etc.)",
48
+ )
49
+ modelID: Optional[str] = Field(
50
+ None, description="Identifier for the model that made this prediction"
51
+ )
52
+ modelVersion: Optional[str] = Field(None, description="Version of the model")
53
+ source: Optional[SourceFormat] = Field(
54
+ None, description="Source/author of the model"
55
+ )
56
+
57
+
58
+ class Analytics(FormatBaseModel):
59
+ """A conversion class used to create, parse, and validate analytical information
60
+ from models such as pickers, analytical models, AI/ML, etc.
61
+
62
+ This class provides an extensible structure supporting multiple models, arbitrary
63
+ prediction labels, and custom extensions.
64
+
65
+ Attributes
66
+ ----------
67
+
68
+ predictions: optional list of Prediction objects containing predictions from one
69
+ or more models. Each prediction can represent any type of analytical output
70
+ with associated confidence metrics and model provenance.
71
+
72
+ extensions: optional dict containing custom key-value pairs for experimental code and debugging.
73
+
74
+ """
75
+
76
+ predictions: Optional[List[Prediction]] = Field(
77
+ None, description="Array of predictions from one or more analytical models"
78
+ )
79
+
80
+ extensions: Optional[dict] = Field(
81
+ None, description="Custom key-value pairs for experimental or debuging data"
82
+ )
@@ -0,0 +1,40 @@
1
+ from typing import Optional, List, Literal
2
+
3
+ from pydantic import Field, field_validator, ValidationInfo
4
+ from anssformats.formatbasemodel import FormatBaseModel
5
+ from anssformats.geojson import PointGeometry
6
+
7
+
8
+ class ChannelProperties(FormatBaseModel):
9
+ """A class holding the channel specific custom properties for a geojson point feature
10
+
11
+ Station: string containing the station code
12
+
13
+ Channel: optional string containing the channel code
14
+
15
+ Network: string containing the network code
16
+
17
+ Location: optional string containing the location code
18
+ """
19
+
20
+ station: str
21
+ channel: Optional[str] = None
22
+ network: str
23
+ location: Optional[str] = None
24
+
25
+
26
+ class Channel(FormatBaseModel):
27
+ """A conversion class used to create, parse, and validate geojson Channel data as part of
28
+ detection data.
29
+
30
+ type: string containing the type of this geojson
31
+
32
+ geometry: PointGeometry object containing the geojson geometry for this feature
33
+
34
+ properties: ChannelProperties object containing the channel properties
35
+ """
36
+
37
+ type: str = "Feature"
38
+
39
+ geometry: PointGeometry
40
+ properties: ChannelProperties
@@ -0,0 +1,69 @@
1
+ from typing import List, Literal, Optional
2
+
3
+ from pydantic import Field
4
+
5
+
6
+ from anssformats.formatbasemodel import CustomDT, FormatBaseModel
7
+ from anssformats.eventType import EventType
8
+ from anssformats.hypocenter import Hypocenter, HypocenterProperties
9
+ from anssformats.source import Source
10
+ from anssformats.pick import Pick
11
+ from anssformats.magnitude import Magnitude
12
+ from anssformats.analytics import Analytics
13
+
14
+
15
+ class Detection(FormatBaseModel):
16
+ """A conversion class used to create, parse, and validate detection data.
17
+
18
+ type: string identifying this message as a detection
19
+
20
+ id: string containing a unique identifier for this detection
21
+
22
+ source: Source object containing the source of the detection
23
+
24
+ hypocenter: Hypocenter object containing the hypocenter of the detection
25
+
26
+ detectionType: optional string containing the origin type of this detection; valid
27
+ values are "New", "Update", "Final", and "Retract"
28
+
29
+ detectionTime: optional datetime containing the time this detection was made
30
+
31
+ eventType: optional EventType object containing the event type of the detection
32
+
33
+ minimumDistance: optional float containing the distance to the closest station
34
+
35
+ rms: optional float containing the detection RMS
36
+
37
+ maximumGap: optional float containing the detection gap
38
+
39
+ detector: optional string containing the detection grid, algorithm, or other
40
+ information
41
+
42
+ pickData: optional list of either Pick objects used to generate
43
+ this detection
44
+
45
+ analyticsInfo: optional analytics object containing model output attached to this detection
46
+ """
47
+
48
+ type: Literal["Detection"]
49
+ id: str
50
+ source: Source
51
+
52
+ hypocenter: Hypocenter
53
+
54
+ detectionType: Optional[Literal["New", "Update", "Final", "Retract"]] = None
55
+ detectionTime: Optional[CustomDT] = None
56
+
57
+ eventType: Optional[EventType] = None
58
+
59
+ minimumDistance: Optional[float] = Field(None, ge=0.0)
60
+ rms: Optional[float] = None
61
+ maximumGap: Optional[float] = Field(None, ge=0.0, le=360.0)
62
+
63
+ detector: Optional[str] = None
64
+
65
+ pickData: Optional[List[Pick]] = None
66
+
67
+ magnitudeData: Optional[List[Magnitude]] = None
68
+
69
+ analyticsInfo: Optional[Analytics] = None
@@ -1,7 +1,7 @@
1
1
  from datetime import datetime
2
2
  from typing import Any
3
3
 
4
- from pydantic import BaseModel, GetCoreSchemaHandler
4
+ from pydantic import BaseModel, GetCoreSchemaHandler, field_validator
5
5
  from pydantic_core import CoreSchema, core_schema
6
6
 
7
7
 
@@ -22,10 +22,6 @@ def convert_datetime_to_iso8601_with_z_suffix(dt: datetime) -> str:
22
22
  class FormatBaseModel(BaseModel):
23
23
  """A Pydantic BaseModel used for any required formatting of keys and values"""
24
24
 
25
- class Config:
26
- # conversion for datetime to datetime string
27
- json_encoders = {datetime: convert_datetime_to_iso8601_with_z_suffix}
28
-
29
25
  def model_dump(self):
30
26
  """Override the default model_dump method to always exclude None values"""
31
27
  return super().model_dump(exclude_none=True)
@@ -41,17 +37,22 @@ class CustomDT(datetime):
41
37
  strings.
42
38
  """
43
39
 
40
+ @field_validator("*", mode="before")
44
41
  @classmethod
45
42
  def validate_no_tz(cls, v: Any, info: core_schema.ValidationInfo) -> Any:
46
43
  if isinstance(v, str):
47
44
  return datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=None)
48
- else:
49
- return v.replace(tzinfo=None)
45
+ return v.replace(tzinfo=None)
50
46
 
51
47
  @classmethod
52
48
  def __get_pydantic_core_schema__(
53
49
  cls, source_type: Any, handler: GetCoreSchemaHandler
54
50
  ) -> CoreSchema:
55
- return core_schema.with_info_plain_validator_function(
56
- function=cls.validate_no_tz
51
+ return core_schema.with_info_before_validator_function(
52
+ cls.validate_no_tz,
53
+ handler(datetime),
54
+ serialization=core_schema.plain_serializer_function_ser_schema(
55
+ convert_datetime_to_iso8601_with_z_suffix,
56
+ when_used="json-unless-none",
57
+ ),
57
58
  )
@@ -0,0 +1,41 @@
1
+ from typing import Optional, List, Literal
2
+
3
+ from pydantic import Field, field_validator, ValidationInfo
4
+ from anssformats.formatbasemodel import FormatBaseModel
5
+
6
+
7
+ class PointGeometry(FormatBaseModel):
8
+ """A class holding a geojson point geometry
9
+
10
+ type: string containing the type of this geometry
11
+
12
+ coordinates: List of floats containing the longitude in degrees, latitude in degrees, and elevation in meters or depth in kilometers, in that order
13
+ """
14
+
15
+ type: str = "Point"
16
+ coordinates: List[float]
17
+
18
+ # check that coordinates are valid
19
+ @field_validator("coordinates")
20
+ @classmethod
21
+ def validate_coordinates(
22
+ cls, value: List[float], info: ValidationInfo
23
+ ) -> List[float]:
24
+ if value is None:
25
+ raise ValueError("Missing coordinates")
26
+
27
+ if len(value) != 3:
28
+ raise ValueError("Incomplete coordinates")
29
+
30
+ # longitude
31
+ if value[0] < -180.0 or value[0] > 180.0:
32
+ raise ValueError("Longitude coordinate out of valid range")
33
+
34
+ # latitude
35
+ if value[1] < -90.0 or value[1] > 90.0:
36
+ raise ValueError("Latitude coordinate out of valid range")
37
+
38
+ # don't validate elevation/depth
39
+ # value[2]
40
+
41
+ return value
@@ -0,0 +1,47 @@
1
+ from typing import Optional, List, Literal
2
+
3
+ from pydantic import Field, field_validator, ValidationInfo
4
+ from anssformats.formatbasemodel import CustomDT, FormatBaseModel
5
+ from anssformats.geojson import PointGeometry
6
+
7
+
8
+ class HypocenterProperties(FormatBaseModel):
9
+ """A class holding the hypocenter specific custom properties for a geojson point feature
10
+
11
+ originTime: required datetime containing the origin time of the hypocenter
12
+
13
+ latitudeError: optional float containing the error of the latitude of this
14
+ hypocenter in kilometers
15
+
16
+ longitudeError: optional float containing the error of the longitude of this
17
+ hypocenter in kilometers
18
+
19
+ depthError: optional float containing the error of the depth of this hypocenter in
20
+ kilometers
21
+
22
+ timeError: optional float containing the error of the origin time of this hypocenter
23
+ in seconds
24
+ """
25
+
26
+ originTime: CustomDT
27
+ latitudeError: Optional[float] = None
28
+ longitudeError: Optional[float] = None
29
+ depthError: Optional[float] = None
30
+ timeError: Optional[float] = None
31
+
32
+
33
+ class Hypocenter(FormatBaseModel):
34
+ """A conversion class used to create, parse, and validate geojson Hypocenter data as part of
35
+ detection data.
36
+
37
+ type: string containing the type of this geojson
38
+
39
+ geometry: PointGeometry object containing the geojson geometry for this feature
40
+
41
+ properties: HypocenterProperties object containing the hypocenter properties
42
+ """
43
+
44
+ type: str = "Feature"
45
+
46
+ geometry: PointGeometry
47
+ properties: HypocenterProperties
@@ -0,0 +1,35 @@
1
+ from typing import Optional
2
+
3
+ from pydantic import Field
4
+
5
+ from anssformats.formatbasemodel import FormatBaseModel
6
+ from anssformats.source import Source
7
+
8
+
9
+ class Magnitude(FormatBaseModel):
10
+ """A conversion class used to create, parse, and validate Magnitude data as part
11
+ of detection data.
12
+
13
+ Attributes
14
+ ----------
15
+
16
+ value: float containing the magnitude value
17
+
18
+ type: string containing the magnitude type
19
+
20
+ error: optional float containing the associated magnitude error (if any)
21
+
22
+ probability: optional float containing the associated magnitude probability (if any)
23
+
24
+ id: optional string containing a unique identifier for this magnitude
25
+
26
+ source: optional Source object containing the source of the magnitude
27
+ """
28
+
29
+ value: float = Field(None, ge=-2.0, le=10.0)
30
+ type: str
31
+
32
+ error: Optional[float] = Field(None, ge=0.0)
33
+ probability: Optional[float] = Field(None, ge=0.0, le=100.0)
34
+ id: Optional[str] = None
35
+ source: Optional[Source] = None
@@ -4,13 +4,11 @@ from pydantic import Field
4
4
 
5
5
  from anssformats.amplitude import Amplitude
6
6
  from anssformats.association import Association
7
- from anssformats.beam import Beam
8
- from anssformats.machineLearning import MachineLearning
7
+ from anssformats.analytics import Analytics
9
8
  from anssformats.filter import Filter
10
9
  from anssformats.formatbasemodel import CustomDT, FormatBaseModel
11
10
  from anssformats.channel import Channel, ChannelProperties
12
11
  from anssformats.source import Source
13
- from anssformats.quality import Quality
14
12
 
15
13
 
16
14
  class Pick(FormatBaseModel):
@@ -40,20 +38,12 @@ class Pick(FormatBaseModel):
40
38
  filterInfo: optional list of Filter objects containing the filter frequencies when the
41
39
  pick was made
42
40
 
43
- amplitude: optional Amplitude object containing the amplitude associated with the
44
- pick
45
-
46
- beamInfo: optional Beam object containing the waveform beam information associated with
47
- the pick
41
+ amplitude: optional Amplitude object containing the amplitude associated with the pick
48
42
 
49
43
  associationInfo: optional Association object containing the association information
50
44
  if this pick is used as data in a Detection
51
45
 
52
- machineLearningInfo: optional machineLearning object containing the machineLearning
53
- information of this pick
54
-
55
- qualityInfo: optional quality object containing the quality
56
- information of this pick
46
+ AnalyticsInfo: optional analytics object containing model output attached to this pick
57
47
  """
58
48
 
59
49
  type: Literal["Pick"]
@@ -72,8 +62,6 @@ class Pick(FormatBaseModel):
72
62
 
73
63
  filterInfo: Optional[List[Filter]] = None
74
64
  amplitudeInfo: Optional[Amplitude] = None
75
- beamInfo: Optional[Beam] = None
76
65
 
77
66
  associationInfo: Optional[Association] = None
78
- machineLearningInfo: Optional[MachineLearning] = None
79
- qualityInfo: Optional[List[Quality]] = None
67
+ analyticsInfo: Optional[Analytics] = None
@@ -1,39 +1,39 @@
1
- [tool.poetry]
2
- name = "anss-formats"
3
- version = "0.0.4" #prev 0.0.1, 0.0.2
4
- description = "Python implementation of the library used to communicate seismic event detection information between systems"
5
- authors = ["John Patton <jpatton@usgs.gov>"]
6
- readme = "README_purpose.md"
7
- license = "CC0-1.0"
8
- homepage ="https://gitlab.com/anss-netops/anss-data-formats"
9
- include = [
10
- "LICENSE.md"
11
- ]
12
- repository="https://gitlab.com/anss-netops/anss-data-formats"
13
- keywords = ["anss", "earthquakes", "formats", "detection"]
14
- packages = [
15
- {include = "anssformats" }
16
- ]
17
-
18
- [tool.poetry.dependencies]
19
- python = ">3.9.1,<3.12"
20
- twine = "^5.1.1"
21
- dparse = "^0.6.2"
22
- certifi = "^2024.07.04"
23
- cryptography = "^44.0.1"
24
- pydantic = "^2.6.0"
25
- docutils = "!=0.21.post1"
26
- requests = "^2.32.2"
27
-
28
- [tool.poetry.group.dev.dependencies]
29
- black = "^24.10.0"
30
- safety = "^2.2.0"
31
- pytest = "^7.3.1"
32
- pytest-cov = "^4.1.0"
33
-
34
- [tool.poetry.extras]
35
- pycurl = ["pycurl"]
36
-
37
- [build-system]
38
- requires = ["poetry-core>=1.0.0"]
39
- build-backend = "poetry.core.masonry.api"
1
+ [tool.poetry]
2
+ name = "anss-formats"
3
+ version = "0.1.1"
4
+ description = "Python implementation of the library used to communicate seismic event detection information between systems"
5
+ authors = ["John Patton <jpatton@usgs.gov>"]
6
+ license = "CC0-1.0"
7
+ homepage ="https://gitlab.com/anss-netops/anss-data-formats"
8
+ include = [
9
+ "LICENSE.md"
10
+ ]
11
+ repository="https://gitlab.com/anss-netops/anss-data-formats"
12
+ keywords = ["anss", "earthquakes", "formats", "detection"]
13
+ packages = [
14
+ {include = "anssformats" }
15
+ ]
16
+
17
+ [tool.poetry.dependencies]
18
+ python = ">3.9.1,<3.12"
19
+ twine = "^5.1.1"
20
+ dparse = "^0.6.2"
21
+ certifi = "^2024.07.04"
22
+ cryptography = ">=42.0.5"
23
+ pydantic = "^2.6.0"
24
+ docutils = "!=0.21.post1"
25
+ requests = "^2.32.2"
26
+ urllib3 = "^2.6.0"
27
+
28
+ [tool.poetry.group.dev.dependencies]
29
+ black = "^24.10.0"
30
+ safety = "^2.2.0"
31
+ pytest = "^7.3.1"
32
+ pytest-cov = "^4.1.0"
33
+
34
+ [tool.poetry.extras]
35
+ pycurl = ["pycurl"]
36
+
37
+ [build-system]
38
+ requires = ["poetry-core>=1.0.0"]
39
+ build-backend = "poetry.core.masonry.api"