tom-alertstreams 0.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tom_alertstreams-0.0.0/PKG-INFO +187 -0
- tom_alertstreams-0.0.0/README.md +153 -0
- tom_alertstreams-0.0.0/pyproject.toml +75 -0
- tom_alertstreams-0.0.0/tom_alertstreams/__init__.py +2 -0
- tom_alertstreams-0.0.0/tom_alertstreams/admin.py +3 -0
- tom_alertstreams-0.0.0/tom_alertstreams/alertstreams/__init__.py +0 -0
- tom_alertstreams-0.0.0/tom_alertstreams/alertstreams/alertstream.py +108 -0
- tom_alertstreams-0.0.0/tom_alertstreams/alertstreams/gcn.py +69 -0
- tom_alertstreams-0.0.0/tom_alertstreams/alertstreams/hopskotch.py +164 -0
- tom_alertstreams-0.0.0/tom_alertstreams/apps.py +6 -0
- tom_alertstreams-0.0.0/tom_alertstreams/management/commands/__init__.py +0 -0
- tom_alertstreams-0.0.0/tom_alertstreams/management/commands/hoptestpub.py +40 -0
- tom_alertstreams-0.0.0/tom_alertstreams/management/commands/readstreams.py +38 -0
- tom_alertstreams-0.0.0/tom_alertstreams/migrations/__init__.py +0 -0
- tom_alertstreams-0.0.0/tom_alertstreams/models.py +3 -0
- tom_alertstreams-0.0.0/tom_alertstreams/tests.py +3 -0
- tom_alertstreams-0.0.0/tom_alertstreams/views.py +3 -0
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: tom-alertstreams
|
|
3
|
+
Version: 0.0.0
|
|
4
|
+
Summary: Reusable TOMToolkit app for listening to kafka streams.
|
|
5
|
+
License: GPL-3.0-only
|
|
6
|
+
Keywords: tomtoolkit,astronomy,astrophysics,cosmology,science
|
|
7
|
+
Author: TOM Toolkit Project
|
|
8
|
+
Author-email: tomtoolkit-maintainers@lco.global
|
|
9
|
+
Maintainer: Joey Chatelain
|
|
10
|
+
Maintainer-email: jchate6@gmail.com
|
|
11
|
+
Requires-Python: >=3.9.0,<3.13
|
|
12
|
+
Classifier: Environment :: Web Environment
|
|
13
|
+
Classifier: Framework :: Django
|
|
14
|
+
Classifier: Framework :: Django :: 4.1
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Intended Audience :: Science/Research
|
|
17
|
+
Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
|
18
|
+
Classifier: Operating System :: OS Independent
|
|
19
|
+
Classifier: Programming Language :: Python
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Topic :: Scientific/Engineering :: Astronomy
|
|
27
|
+
Classifier: Topic :: Scientific/Engineering :: Physics
|
|
28
|
+
Requires-Dist: gcn-kafka (>=0.3,<1.0)
|
|
29
|
+
Requires-Dist: hop-client (>=0.10,<1.0)
|
|
30
|
+
Requires-Dist: psycopg2-binary (>=2.9,<3.0)
|
|
31
|
+
Requires-Dist: tomtoolkit (>=2.22,<3.0)
|
|
32
|
+
Description-Content-Type: text/markdown
|
|
33
|
+
|
|
34
|
+
# tom-alertstreams
|
|
35
|
+
|
|
36
|
+
`tom-alertstreams` is a reusable TOM Toolkit app for listening to kafka streams.
|
|
37
|
+
|
|
38
|
+
`tom-alertstreams` provides a management command, `readstreams`. There are no `urlpatterns`,
|
|
39
|
+
no Views, and no templates. The `readstreams` management command reads the `settings.py` `ALERT_STREAMS`
|
|
40
|
+
configuration and starts listening to each configured Kafka stream. It is not expected
|
|
41
|
+
to return, and is intended to run along side your TOM's server component. The `ALERT_STREAMS`
|
|
42
|
+
configuration (see below) tells `readstreams` what streams to access, how to access them,
|
|
43
|
+
what topics to listen to, and what to do with messages that arrive on a given topic.
|
|
44
|
+
|
|
45
|
+
## Installation
|
|
46
|
+
|
|
47
|
+
1. Install the package into your TOM environment:
|
|
48
|
+
```bash
|
|
49
|
+
pip install tom-alertstreams
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
2. In your project `settings.py`, add `tom_alertstreams` to your `INSTALLED_APPS` setting:
|
|
53
|
+
|
|
54
|
+
```python
|
|
55
|
+
INSTALLED_APPS = [
|
|
56
|
+
...
|
|
57
|
+
'tom_alertstreams',
|
|
58
|
+
]
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
At this point you can verify the installation by running `./manage.py` to list the available
|
|
62
|
+
management commands and see
|
|
63
|
+
|
|
64
|
+
```bash
|
|
65
|
+
[tom_alertstreams]
|
|
66
|
+
readstreams
|
|
67
|
+
```
|
|
68
|
+
in the output.
|
|
69
|
+
|
|
70
|
+
## Configuration
|
|
71
|
+
|
|
72
|
+
Each Kafka stream that your TOM listens to (via `readstreams`) will have a configuration dictionary
|
|
73
|
+
in your `settings.py` `ALERT_STREAMS`. `ALERT_STREAMS` is a list of configuration dictionaries, one
|
|
74
|
+
dictionary for each Kafka stream. Here's an example `ALERT_STREAMS` configuration for two Kafka streams:
|
|
75
|
+
[SCiMMA Hopskotch](https://scimma.org/hopskotch.html) and
|
|
76
|
+
[GCN Classic over Kafka](https://gcn.nasa.gov/quickstart).
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
ALERT_STREAMS = [
|
|
80
|
+
{
|
|
81
|
+
'ACTIVE': True,
|
|
82
|
+
'NAME': 'tom_alertstreams.alertstreams.hopskotch.HopskotchAlertStream',
|
|
83
|
+
'OPTIONS': {
|
|
84
|
+
'URL': 'kafka://kafka.scimma.org/',
|
|
85
|
+
# The hop-client requires that the GROUP_ID prefix match the SCIMMA_AUTH_USERNAME
|
|
86
|
+
'GROUP_ID': os.getenv('SCIMMA_AUTH_USERNAME', "") + '-' + 'uniqueidforyourapp12345',
|
|
87
|
+
'USERNAME': os.getenv('SCIMMA_AUTH_USERNAME', None),
|
|
88
|
+
'PASSWORD': os.getenv('SCIMMA_AUTH_PASSWORD', None),
|
|
89
|
+
'TOPIC_HANDLERS': {
|
|
90
|
+
'sys.heartbeat': 'tom_alertstreams.alertstreams.hopskotch.heartbeat_handler',
|
|
91
|
+
'tomtoolkit.test': 'tom_alertstreams.alertstreams.hopskotch.alert_logger',
|
|
92
|
+
'hermes.test': 'tom_alertstreams.alertstreams.hopskotch.alert_logger',
|
|
93
|
+
'hermes.*': 'regex match public topics here, requires * handler to be defined'
|
|
94
|
+
'*': 'default_handler_here'
|
|
95
|
+
},
|
|
96
|
+
},
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
'ACTIVE': True,
|
|
100
|
+
'NAME': 'tom_alertstreams.alertstreams.gcn.GCNClassicAlertStream',
|
|
101
|
+
# The keys of the OPTIONS dictionary become (lower-case) properties of the AlertStream instance.
|
|
102
|
+
'OPTIONS': {
|
|
103
|
+
# see https://github.com/nasa-gcn/gcn-kafka-python#to-use for configuration details.
|
|
104
|
+
'GCN_CLASSIC_CLIENT_ID': os.getenv('GCN_CLASSIC_CLIENT_ID', None),
|
|
105
|
+
'GCN_CLASSIC_CLIENT_SECRET': os.getenv('GCN_CLASSIC_CLIENT_SECRET', None),
|
|
106
|
+
'DOMAIN': 'gcn.nasa.gov', # optional, defaults to 'gcn.nasa.gov'
|
|
107
|
+
'CONFIG': { # optional
|
|
108
|
+
# 'group.id': 'tom_alertstreams-my-custom-group-id',
|
|
109
|
+
# 'auto.offset.reset': 'earliest',
|
|
110
|
+
# 'enable.auto.commit': False
|
|
111
|
+
},
|
|
112
|
+
'TOPIC_HANDLERS': {
|
|
113
|
+
'gcn.classic.text.LVC_INITIAL': 'tom_alertstreams.alertstreams.alertstream.alert_logger',
|
|
114
|
+
'gcn.classic.text.LVC_PRELIMINARY': 'tom_alertstreams.alertstreams.alertstream.alert_logger',
|
|
115
|
+
'gcn.classic.text.LVC_RETRACTION': 'tom_alertstreams.alertstreams.alertstream.alert_logger',
|
|
116
|
+
},
|
|
117
|
+
},
|
|
118
|
+
}
|
|
119
|
+
]
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
The configuration dictionary for each `AlertStream` subclass will contain these key-value pairs:
|
|
123
|
+
* `ACTIVE`: Boolean which tells `readstreams` to access this stream. Should be `True`, unless you want to
|
|
124
|
+
keep a configuration dictionary, but ignore the stream.
|
|
125
|
+
* `NAME`: The name of the `AlertStream` subclass that implements the interface to this Kafka stream. `tom_alertstreams`
|
|
126
|
+
will provide `AlertStream` subclasses for major astromical Kafka streams. See below for instructions on Subclassing
|
|
127
|
+
the `AlertStream` base class.
|
|
128
|
+
* `OPTIONS`: A dictionary of key-value pairs specific to the`AlertStream` subclass given by `NAME`. The doc string for
|
|
129
|
+
the `AlertStream` subclass should document what is expected. Typically, a URL, authentication information, and a
|
|
130
|
+
dictionary, `TOPIC_HANDLERS`, will be required. See "Subclassing `AlertStream`" below. The `AlertStream` subclass will
|
|
131
|
+
convert the key-value pairs of the `OPTIONS` dictionary into properties (and values) of the `AlertStream` subclass
|
|
132
|
+
instance.
|
|
133
|
+
* The hopskotch alert stream supports a wildcard of `*` for an alert handler topic name. If specified, ALL public topics will be subscribed and use that handler function. A directly specified topic handler will always be used before the `*` handler for any topic that is covered twice.
|
|
134
|
+
|
|
135
|
+
### Getting Kafka Stream Credentials
|
|
136
|
+
As part of your `OPTIONS` for each Kafka stream, you need to configure access credentials. Visit these links
|
|
137
|
+
to get credentials for [Hopskotch](https://hop.scimma.org/) and [GCN Classic over Kafka](https://gcn.nasa.gov/quickstart).
|
|
138
|
+
Set the environment variables with the username and passwords obtained. Do not check them in to your code repository.
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
## Alert Handling
|
|
142
|
+
|
|
143
|
+
Assuming that an `AlertStream` subclass exists for the Kafka stream of interest,
|
|
144
|
+
the keys of the `TOPIC_HANDLERS` dictionary are the topics that will be subscribed to. The values
|
|
145
|
+
of the `TOPIC_HANDLERS` dictionary specify alert handling methods that will be imported and called
|
|
146
|
+
for each alert recieved on that topic. An example is provided,
|
|
147
|
+
`tom_alerts.alertstreams.alertstream.alert_logger`, which simply logs the alert.
|
|
148
|
+
|
|
149
|
+
To customize this behaviour according to the needs of your TOM, define an alert handling function for each
|
|
150
|
+
topic that you wish to subscribe to. Your `TOPIC_HANDLERS` dictionary will have a an entry for each topic
|
|
151
|
+
whose key is the topic name and whose value is a string indicating the dot-path to the alert handling function.
|
|
152
|
+
When the `AlertStream` subclass is instanciated, the `OPTIONS` dictionary is read and an `alert_handler`
|
|
153
|
+
dictionary is created. It is keyed by topic name and it's values are the imported callable functions specified by the
|
|
154
|
+
dot-path strings. `readstreams` will call the alert handler for each alert that comes in on the topic. The signiture
|
|
155
|
+
of the alert handling function is specific to the `AlertStream` subclasss.
|
|
156
|
+
|
|
157
|
+
## Subclassing `AlertStream`
|
|
158
|
+
|
|
159
|
+
Ideally, As a TOM developer, there is already an `AlertStream`-subclass for the alert stream that you
|
|
160
|
+
want your TOM to listen to. If so, you need only to configure your TOM to use it in `settings.py`
|
|
161
|
+
`ALERT_STREAMS`. If you must implement your own `AlertStream` subclass, please get in touch. In the meantime, here's a brief outline:
|
|
162
|
+
|
|
163
|
+
1. Create subclass of `AlertStream`.
|
|
164
|
+
|
|
165
|
+
2. Create `required_keys` and `allowed_keys` class variables in your `AlertStream`-subclass.
|
|
166
|
+
|
|
167
|
+
These are lists of strings refering to the keys of the `OPTIONS` dictionary. The purpose of these is to
|
|
168
|
+
help TOM developers using your `AlertStream`-subclass with the key-value pairs in their `ALERT_STREAMS`
|
|
169
|
+
`OPTIONS` configuration dictionary.
|
|
170
|
+
|
|
171
|
+
3. Implement the `listen()` method.
|
|
172
|
+
|
|
173
|
+
This method will be called by the `readstreams` management command and is not expected to return. It
|
|
174
|
+
should instanciate your consumer, subscribe to the topics configured in `ALERT_STREAMS`, and start
|
|
175
|
+
consuming. The detail of this will depend on the kafka-client used. See `alertstreams.gcn.listen()`
|
|
176
|
+
and `alertstreams.hopskotch.listen()` for examples to follow.
|
|
177
|
+
|
|
178
|
+
The loop which consumes messages in your `listen()` method should extract the topic from each message
|
|
179
|
+
and call `self.alert_handler[topic]()` with the message or message-derived arguments specific to your
|
|
180
|
+
kafka client. Users of your `AlertStream`-subclass will write these topic-specific alert handling methods
|
|
181
|
+
and configure them in the `TOPIC_HANLDERS` dictionary of their `ALERT_STREAMS` configuration.
|
|
182
|
+
The `AlertStream` base class will set up the `alert_handler` dictionary according to your users'
|
|
183
|
+
configuration. It helps your users to provide an example `alert_hander()` function in your module as
|
|
184
|
+
an example. (Again, see `alertstreams.gcn.listen()` and `alertstreams.hopskotch.listen()`, their
|
|
185
|
+
configurations in `settings.py`, and the `alertstreams.gcn.alert_logger()` and
|
|
186
|
+
`alertstreams.hopskotch.alert_logger() methods, for example).
|
|
187
|
+
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
# tom-alertstreams
|
|
2
|
+
|
|
3
|
+
`tom-alertstreams` is a reusable TOM Toolkit app for listening to kafka streams.
|
|
4
|
+
|
|
5
|
+
`tom-alertstreams` provides a management command, `readstreams`. There are no `urlpatterns`,
|
|
6
|
+
no Views, and no templates. The `readstreams` management command reads the `settings.py` `ALERT_STREAMS`
|
|
7
|
+
configuration and starts listening to each configured Kafka stream. It is not expected
|
|
8
|
+
to return, and is intended to run along side your TOM's server component. The `ALERT_STREAMS`
|
|
9
|
+
configuration (see below) tells `readstreams` what streams to access, how to access them,
|
|
10
|
+
what topics to listen to, and what to do with messages that arrive on a given topic.
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
|
|
14
|
+
1. Install the package into your TOM environment:
|
|
15
|
+
```bash
|
|
16
|
+
pip install tom-alertstreams
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
2. In your project `settings.py`, add `tom_alertstreams` to your `INSTALLED_APPS` setting:
|
|
20
|
+
|
|
21
|
+
```python
|
|
22
|
+
INSTALLED_APPS = [
|
|
23
|
+
...
|
|
24
|
+
'tom_alertstreams',
|
|
25
|
+
]
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
At this point you can verify the installation by running `./manage.py` to list the available
|
|
29
|
+
management commands and see
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
[tom_alertstreams]
|
|
33
|
+
readstreams
|
|
34
|
+
```
|
|
35
|
+
in the output.
|
|
36
|
+
|
|
37
|
+
## Configuration
|
|
38
|
+
|
|
39
|
+
Each Kafka stream that your TOM listens to (via `readstreams`) will have a configuration dictionary
|
|
40
|
+
in your `settings.py` `ALERT_STREAMS`. `ALERT_STREAMS` is a list of configuration dictionaries, one
|
|
41
|
+
dictionary for each Kafka stream. Here's an example `ALERT_STREAMS` configuration for two Kafka streams:
|
|
42
|
+
[SCiMMA Hopskotch](https://scimma.org/hopskotch.html) and
|
|
43
|
+
[GCN Classic over Kafka](https://gcn.nasa.gov/quickstart).
|
|
44
|
+
|
|
45
|
+
```python
|
|
46
|
+
ALERT_STREAMS = [
|
|
47
|
+
{
|
|
48
|
+
'ACTIVE': True,
|
|
49
|
+
'NAME': 'tom_alertstreams.alertstreams.hopskotch.HopskotchAlertStream',
|
|
50
|
+
'OPTIONS': {
|
|
51
|
+
'URL': 'kafka://kafka.scimma.org/',
|
|
52
|
+
# The hop-client requires that the GROUP_ID prefix match the SCIMMA_AUTH_USERNAME
|
|
53
|
+
'GROUP_ID': os.getenv('SCIMMA_AUTH_USERNAME', "") + '-' + 'uniqueidforyourapp12345',
|
|
54
|
+
'USERNAME': os.getenv('SCIMMA_AUTH_USERNAME', None),
|
|
55
|
+
'PASSWORD': os.getenv('SCIMMA_AUTH_PASSWORD', None),
|
|
56
|
+
'TOPIC_HANDLERS': {
|
|
57
|
+
'sys.heartbeat': 'tom_alertstreams.alertstreams.hopskotch.heartbeat_handler',
|
|
58
|
+
'tomtoolkit.test': 'tom_alertstreams.alertstreams.hopskotch.alert_logger',
|
|
59
|
+
'hermes.test': 'tom_alertstreams.alertstreams.hopskotch.alert_logger',
|
|
60
|
+
'hermes.*': 'regex match public topics here, requires * handler to be defined'
|
|
61
|
+
'*': 'default_handler_here'
|
|
62
|
+
},
|
|
63
|
+
},
|
|
64
|
+
},
|
|
65
|
+
{
|
|
66
|
+
'ACTIVE': True,
|
|
67
|
+
'NAME': 'tom_alertstreams.alertstreams.gcn.GCNClassicAlertStream',
|
|
68
|
+
# The keys of the OPTIONS dictionary become (lower-case) properties of the AlertStream instance.
|
|
69
|
+
'OPTIONS': {
|
|
70
|
+
# see https://github.com/nasa-gcn/gcn-kafka-python#to-use for configuration details.
|
|
71
|
+
'GCN_CLASSIC_CLIENT_ID': os.getenv('GCN_CLASSIC_CLIENT_ID', None),
|
|
72
|
+
'GCN_CLASSIC_CLIENT_SECRET': os.getenv('GCN_CLASSIC_CLIENT_SECRET', None),
|
|
73
|
+
'DOMAIN': 'gcn.nasa.gov', # optional, defaults to 'gcn.nasa.gov'
|
|
74
|
+
'CONFIG': { # optional
|
|
75
|
+
# 'group.id': 'tom_alertstreams-my-custom-group-id',
|
|
76
|
+
# 'auto.offset.reset': 'earliest',
|
|
77
|
+
# 'enable.auto.commit': False
|
|
78
|
+
},
|
|
79
|
+
'TOPIC_HANDLERS': {
|
|
80
|
+
'gcn.classic.text.LVC_INITIAL': 'tom_alertstreams.alertstreams.alertstream.alert_logger',
|
|
81
|
+
'gcn.classic.text.LVC_PRELIMINARY': 'tom_alertstreams.alertstreams.alertstream.alert_logger',
|
|
82
|
+
'gcn.classic.text.LVC_RETRACTION': 'tom_alertstreams.alertstreams.alertstream.alert_logger',
|
|
83
|
+
},
|
|
84
|
+
},
|
|
85
|
+
}
|
|
86
|
+
]
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
The configuration dictionary for each `AlertStream` subclass will contain these key-value pairs:
|
|
90
|
+
* `ACTIVE`: Boolean which tells `readstreams` to access this stream. Should be `True`, unless you want to
|
|
91
|
+
keep a configuration dictionary, but ignore the stream.
|
|
92
|
+
* `NAME`: The name of the `AlertStream` subclass that implements the interface to this Kafka stream. `tom_alertstreams`
|
|
93
|
+
will provide `AlertStream` subclasses for major astromical Kafka streams. See below for instructions on Subclassing
|
|
94
|
+
the `AlertStream` base class.
|
|
95
|
+
* `OPTIONS`: A dictionary of key-value pairs specific to the`AlertStream` subclass given by `NAME`. The doc string for
|
|
96
|
+
the `AlertStream` subclass should document what is expected. Typically, a URL, authentication information, and a
|
|
97
|
+
dictionary, `TOPIC_HANDLERS`, will be required. See "Subclassing `AlertStream`" below. The `AlertStream` subclass will
|
|
98
|
+
convert the key-value pairs of the `OPTIONS` dictionary into properties (and values) of the `AlertStream` subclass
|
|
99
|
+
instance.
|
|
100
|
+
* The hopskotch alert stream supports a wildcard of `*` for an alert handler topic name. If specified, ALL public topics will be subscribed and use that handler function. A directly specified topic handler will always be used before the `*` handler for any topic that is covered twice.
|
|
101
|
+
|
|
102
|
+
### Getting Kafka Stream Credentials
|
|
103
|
+
As part of your `OPTIONS` for each Kafka stream, you need to configure access credentials. Visit these links
|
|
104
|
+
to get credentials for [Hopskotch](https://hop.scimma.org/) and [GCN Classic over Kafka](https://gcn.nasa.gov/quickstart).
|
|
105
|
+
Set the environment variables with the username and passwords obtained. Do not check them in to your code repository.
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
## Alert Handling
|
|
109
|
+
|
|
110
|
+
Assuming that an `AlertStream` subclass exists for the Kafka stream of interest,
|
|
111
|
+
the keys of the `TOPIC_HANDLERS` dictionary are the topics that will be subscribed to. The values
|
|
112
|
+
of the `TOPIC_HANDLERS` dictionary specify alert handling methods that will be imported and called
|
|
113
|
+
for each alert recieved on that topic. An example is provided,
|
|
114
|
+
`tom_alerts.alertstreams.alertstream.alert_logger`, which simply logs the alert.
|
|
115
|
+
|
|
116
|
+
To customize this behaviour according to the needs of your TOM, define an alert handling function for each
|
|
117
|
+
topic that you wish to subscribe to. Your `TOPIC_HANDLERS` dictionary will have a an entry for each topic
|
|
118
|
+
whose key is the topic name and whose value is a string indicating the dot-path to the alert handling function.
|
|
119
|
+
When the `AlertStream` subclass is instanciated, the `OPTIONS` dictionary is read and an `alert_handler`
|
|
120
|
+
dictionary is created. It is keyed by topic name and it's values are the imported callable functions specified by the
|
|
121
|
+
dot-path strings. `readstreams` will call the alert handler for each alert that comes in on the topic. The signiture
|
|
122
|
+
of the alert handling function is specific to the `AlertStream` subclasss.
|
|
123
|
+
|
|
124
|
+
## Subclassing `AlertStream`
|
|
125
|
+
|
|
126
|
+
Ideally, As a TOM developer, there is already an `AlertStream`-subclass for the alert stream that you
|
|
127
|
+
want your TOM to listen to. If so, you need only to configure your TOM to use it in `settings.py`
|
|
128
|
+
`ALERT_STREAMS`. If you must implement your own `AlertStream` subclass, please get in touch. In the meantime, here's a brief outline:
|
|
129
|
+
|
|
130
|
+
1. Create subclass of `AlertStream`.
|
|
131
|
+
|
|
132
|
+
2. Create `required_keys` and `allowed_keys` class variables in your `AlertStream`-subclass.
|
|
133
|
+
|
|
134
|
+
These are lists of strings refering to the keys of the `OPTIONS` dictionary. The purpose of these is to
|
|
135
|
+
help TOM developers using your `AlertStream`-subclass with the key-value pairs in their `ALERT_STREAMS`
|
|
136
|
+
`OPTIONS` configuration dictionary.
|
|
137
|
+
|
|
138
|
+
3. Implement the `listen()` method.
|
|
139
|
+
|
|
140
|
+
This method will be called by the `readstreams` management command and is not expected to return. It
|
|
141
|
+
should instanciate your consumer, subscribe to the topics configured in `ALERT_STREAMS`, and start
|
|
142
|
+
consuming. The detail of this will depend on the kafka-client used. See `alertstreams.gcn.listen()`
|
|
143
|
+
and `alertstreams.hopskotch.listen()` for examples to follow.
|
|
144
|
+
|
|
145
|
+
The loop which consumes messages in your `listen()` method should extract the topic from each message
|
|
146
|
+
and call `self.alert_handler[topic]()` with the message or message-derived arguments specific to your
|
|
147
|
+
kafka client. Users of your `AlertStream`-subclass will write these topic-specific alert handling methods
|
|
148
|
+
and configure them in the `TOPIC_HANLDERS` dictionary of their `ALERT_STREAMS` configuration.
|
|
149
|
+
The `AlertStream` base class will set up the `alert_handler` dictionary according to your users'
|
|
150
|
+
configuration. It helps your users to provide an example `alert_hander()` function in your module as
|
|
151
|
+
an example. (Again, see `alertstreams.gcn.listen()` and `alertstreams.hopskotch.listen()`, their
|
|
152
|
+
configurations in `settings.py`, and the `alertstreams.gcn.alert_logger()` and
|
|
153
|
+
`alertstreams.hopskotch.alert_logger() methods, for example).
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "tom-alertstreams"
|
|
3
|
+
description = "Reusable TOMToolkit app for listening to kafka streams."
|
|
4
|
+
authors = [
|
|
5
|
+
{name = "TOM Toolkit Project", email = "tomtoolkit-maintainers@lco.global"},
|
|
6
|
+
{name = "Lindy Lindstrom", email = "llindstrom@lco.global"}
|
|
7
|
+
]
|
|
8
|
+
maintainers = [
|
|
9
|
+
{name = "Joey Chatelain", email = "jchate6@gmail.com"},
|
|
10
|
+
{name = "William Lindstrom", email = "llindstrom@lco.global"},
|
|
11
|
+
]
|
|
12
|
+
license = "GPL-3.0-only"
|
|
13
|
+
readme = "README.md"
|
|
14
|
+
repository = "https://github.com/TOMToolkit/tom-alertstreams"
|
|
15
|
+
keywords = ["tomtoolkit", "astronomy", "astrophysics", "cosmology", "science"]
|
|
16
|
+
classifiers = [
|
|
17
|
+
"Environment :: Web Environment",
|
|
18
|
+
"Framework :: Django",
|
|
19
|
+
"Framework :: Django :: 4.1",
|
|
20
|
+
"Intended Audience :: Developers",
|
|
21
|
+
"Intended Audience :: Science/Research",
|
|
22
|
+
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
|
|
23
|
+
"Operating System :: OS Independent",
|
|
24
|
+
"Programming Language :: Python",
|
|
25
|
+
"Programming Language :: Python :: 3",
|
|
26
|
+
"Programming Language :: Python :: 3 :: Only",
|
|
27
|
+
"Programming Language :: Python :: 3.9",
|
|
28
|
+
"Programming Language :: Python :: 3.10",
|
|
29
|
+
"Programming Language :: Python :: 3.11",
|
|
30
|
+
"Programming Language :: Python :: 3.12",
|
|
31
|
+
"Topic :: Scientific/Engineering :: Astronomy",
|
|
32
|
+
"Topic :: Scientific/Engineering :: Physics"
|
|
33
|
+
]
|
|
34
|
+
dynamic = ["version"]
|
|
35
|
+
requires-python = ">=3.9.0,<3.13"
|
|
36
|
+
packages = [
|
|
37
|
+
{ include = "tom_alertstreams" }
|
|
38
|
+
]
|
|
39
|
+
dependencies = [
|
|
40
|
+
"tomtoolkit >=2.22,<3.0",
|
|
41
|
+
"psycopg2-binary >=2.9,<3.0",
|
|
42
|
+
"gcn-kafka >=0.3,<1.0",
|
|
43
|
+
"hop-client >=0.10,<1.0"
|
|
44
|
+
]
|
|
45
|
+
|
|
46
|
+
[tool.setuptools_scm]
|
|
47
|
+
|
|
48
|
+
[tool.poetry]
|
|
49
|
+
version = "0.0.0" # version supplied by poetry-dynamic-versioning
|
|
50
|
+
[tool.poetry.group.test.dependencies]
|
|
51
|
+
coverage = "^7.6.1"
|
|
52
|
+
factory_boy = "^3.1.0"
|
|
53
|
+
pytest = "^8.3.2"
|
|
54
|
+
|
|
55
|
+
[tool.poetry.requires-plugins]
|
|
56
|
+
poetry-dynamic-versioning = { version = ">=1.0.0,<2.0.0", extras = ["plugin"] }
|
|
57
|
+
|
|
58
|
+
[tool.poetry-dynamic-versioning]
|
|
59
|
+
enable = true
|
|
60
|
+
vcs = "git"
|
|
61
|
+
style = "pep440"
|
|
62
|
+
# the default pattern regex makes the 'v' manditory
|
|
63
|
+
# this pattern modifies the default regex in order to make the 'v' optional
|
|
64
|
+
# ('v' becomes '[v]?' meaning a single v, [v], and ? means optional)
|
|
65
|
+
pattern = "(?x)^[v]?((?P<epoch>\\d+)!)?(?P<base>\\d+(\\.\\d+)*)([-._]?((?P<stage>[a-zA-Z]+)[-._]?(?P<revision>\\d+)?))?(\\+(?P<tagged_metadata>.+))?$"
|
|
66
|
+
|
|
67
|
+
# substitute version not only in pyproject.toml (which the config block above does)
|
|
68
|
+
# but also the __version__.py file (using the default value of the files property).
|
|
69
|
+
[tool.poetry-dynamic-versioning.substitution]
|
|
70
|
+
|
|
71
|
+
[build-system]
|
|
72
|
+
requires = [
|
|
73
|
+
"poetry-core>=1.0.0",
|
|
74
|
+
"poetry-dynamic-versioning >=1.0.0, <2.0.0"]
|
|
75
|
+
build-backend = "poetry.core.masonry.api"
|
|
File without changes
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
from django.conf import settings
|
|
5
|
+
from django.core.exceptions import ImproperlyConfigured
|
|
6
|
+
from django.utils.module_loading import import_string
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
logger.setLevel(logging.DEBUG)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_default_alert_streams():
|
|
13
|
+
"""Return the AlertStreams configured in settings.py
|
|
14
|
+
"""
|
|
15
|
+
try:
|
|
16
|
+
alert_streams = get_alert_streams(settings.ALERT_STREAMS)
|
|
17
|
+
except AttributeError as err:
|
|
18
|
+
raise ImproperlyConfigured(err)
|
|
19
|
+
|
|
20
|
+
return alert_streams
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_alert_streams(alert_stream_configs: list):
|
|
24
|
+
"""Return the AlertStreams configured in the given alert_stream_configs
|
|
25
|
+
(a list of configuration dictionaries )
|
|
26
|
+
|
|
27
|
+
Use get_default_alert_streams() if you want the AlertStreams configured in settings.py.
|
|
28
|
+
"""
|
|
29
|
+
alert_streams = [] # build and return this list of AlertStream subclass instances
|
|
30
|
+
for alert_stream_config in alert_stream_configs:
|
|
31
|
+
if not alert_stream_config.get('ACTIVE', True):
|
|
32
|
+
logger.debug(f'get_alert_streams - ignoring inactive stream: {alert_stream_config["NAME"]}')
|
|
33
|
+
continue # skip configs that are not active; default to ACTIVE
|
|
34
|
+
try:
|
|
35
|
+
klass = import_string(alert_stream_config['NAME'])
|
|
36
|
+
except ImportError:
|
|
37
|
+
msg = (
|
|
38
|
+
f'The module (the value of the NAME key): {alert_stream_config["NAME"]} could not be imported. '
|
|
39
|
+
f'Check your ALERT_STREAMS setting.'
|
|
40
|
+
)
|
|
41
|
+
raise ImproperlyConfigured(msg)
|
|
42
|
+
|
|
43
|
+
alert_stream: AlertStream = klass(**alert_stream_config.get("OPTIONS", {}))
|
|
44
|
+
alert_streams.append(alert_stream)
|
|
45
|
+
|
|
46
|
+
return alert_streams
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class AlertStream(abc.ABC):
|
|
50
|
+
"""Base class for specific alert streams like Hopskotch, GCNClassic, etc.
|
|
51
|
+
|
|
52
|
+
* kwargs to __init__ is the OPTIONS dictionary defined in ALERT_STREAMS configuration
|
|
53
|
+
dictionary (for example, see settings.py).
|
|
54
|
+
* allowed_keys and required_keys should be defined as class properties in subclasses.
|
|
55
|
+
* The allowed_keys are turned into instance properties in __init__.
|
|
56
|
+
* Missing required_keys result in an ImproperlyConfigured Django exception.
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
To implmement subclass:
|
|
60
|
+
1. define allowed_key, required_keys as class variables
|
|
61
|
+
<say what these do: used with OPTIONS dict in ALERT_STREAMS config dict>
|
|
62
|
+
2. implement listen()
|
|
63
|
+
this method probably doesn't return
|
|
64
|
+
3. write your alert_handlers. which proably take and alert do something.
|
|
65
|
+
The HopskotchAlertStream.listen() method defines an 'alert_handlers' dictionary keyed by
|
|
66
|
+
alert topic with callable values (i.e call this method with alerts from this topic).
|
|
67
|
+
The GCNClassicAlertStream.listen() is another example.
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
71
|
+
super().__init__()
|
|
72
|
+
|
|
73
|
+
# filter the kwargs by allowed keys and add them as properties to AlertStream instance
|
|
74
|
+
self.__dict__.update((k.lower(), v) for k, v in kwargs.items() if k in self.allowed_keys)
|
|
75
|
+
|
|
76
|
+
missing_keys = set(self.required_keys) - set(kwargs.keys())
|
|
77
|
+
if missing_keys:
|
|
78
|
+
msg = (
|
|
79
|
+
f'The following required keys are missing from the configuration OPTIONS of '
|
|
80
|
+
f'{self._get_stream_classname()}: {list(missing_keys)} ; '
|
|
81
|
+
f'These keys were found: {list(kwargs.keys())} ; '
|
|
82
|
+
f'Check your ALERT_STREAMS setting.'
|
|
83
|
+
)
|
|
84
|
+
raise ImproperlyConfigured(msg)
|
|
85
|
+
|
|
86
|
+
self.alert_handler = self._process_topic_handlers()
|
|
87
|
+
|
|
88
|
+
def _get_stream_classname(self) -> str:
|
|
89
|
+
return type(self).__qualname__
|
|
90
|
+
|
|
91
|
+
def _process_topic_handlers(self):
|
|
92
|
+
""" convert the TOPIC_HANDLERS values in to callable functions in
|
|
93
|
+
the returned message_handler dictionary. (keyed by topic, value is callable)
|
|
94
|
+
"""
|
|
95
|
+
alert_handler = {}
|
|
96
|
+
for topic, callable_string in self.topic_handlers.items():
|
|
97
|
+
# convert string from TOPIC_HANDLERS in to a callable function in
|
|
98
|
+
# the message_handler dictionary (both keyed by topic string)
|
|
99
|
+
alert_handler[topic] = import_string(callable_string)
|
|
100
|
+
return alert_handler
|
|
101
|
+
|
|
102
|
+
@abc.abstractmethod
|
|
103
|
+
def listen(self):
|
|
104
|
+
"""Listen at the steam and dispatch alerts to handlers. Subclass extentions of
|
|
105
|
+
this method are not expected to return. See hopskotch.py and gcn.py for example
|
|
106
|
+
implementations.
|
|
107
|
+
"""
|
|
108
|
+
pass
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from gcn_kafka import Consumer
|
|
4
|
+
|
|
5
|
+
from tom_alertstreams.alertstreams.alertstream import AlertStream
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
logger.setLevel(logging.DEBUG)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GCNClassicAlertStream(AlertStream):
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
Pre-requisite: visit gcn.nasa.gov and sign-up to get your client_id and
|
|
16
|
+
client_secret.
|
|
17
|
+
"""
|
|
18
|
+
# Upon __init__, the AlertStream base class creates instance properties from
|
|
19
|
+
# the settings OPTIONS dictionary, converting the keys to lowercase.
|
|
20
|
+
required_keys = ['GCN_CLASSIC_CLIENT_ID', 'GCN_CLASSIC_CLIENT_SECRET', 'TOPIC_HANDLERS']
|
|
21
|
+
allowed_keys = ['GCN_CLASSIC_CLIENT_ID', 'GCN_CLASSIC_CLIENT_SECRET', 'TOPIC_HANDLERS', 'DOMAIN', 'CONFIG']
|
|
22
|
+
|
|
23
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
24
|
+
super().__init__(*args, **kwargs)
|
|
25
|
+
# properties have been created from the OPTIONS dicttionary
|
|
26
|
+
|
|
27
|
+
def listen(self):
|
|
28
|
+
super().listen()
|
|
29
|
+
|
|
30
|
+
consumer = Consumer(client_id=self.gcn_classic_client_id,
|
|
31
|
+
client_secret=self.gcn_classic_client_secret,
|
|
32
|
+
domain=self.domain,
|
|
33
|
+
config=self.config,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
consumer.subscribe(list(self.topic_handlers.keys()))
|
|
37
|
+
|
|
38
|
+
# logger.debug(f'Here is a list of the available topics for {self.domain}')
|
|
39
|
+
# for topic in consumer.list_topics().topics:
|
|
40
|
+
# logger.debug(f'topic: {topic}')
|
|
41
|
+
|
|
42
|
+
# what is a cimpl.Message?, cimpl.KafkaError?
|
|
43
|
+
# see https://docs.confluent.io/4.1.1/clients/confluent-kafka-python/index.html#message
|
|
44
|
+
while True:
|
|
45
|
+
for alert in consumer.consume():
|
|
46
|
+
kafka_error = alert.error() # cimpl.KafkaError
|
|
47
|
+
if kafka_error is None:
|
|
48
|
+
# no error, so call the alert handler
|
|
49
|
+
topic = alert.topic()
|
|
50
|
+
try:
|
|
51
|
+
self.alert_handler[topic](alert)
|
|
52
|
+
except KeyError as err:
|
|
53
|
+
logger.error(f'alert from topic {topic} received but no handler defined. err: {err}')
|
|
54
|
+
else:
|
|
55
|
+
logger.error(f'GCNClassicAlertStream KafkaError: {kafka_error.name()}: {kafka_error.str()}')
|
|
56
|
+
consumer.close()
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def alert_logger(alert):
|
|
60
|
+
"""Example alert handler for GCN Classic over Kafka
|
|
61
|
+
|
|
62
|
+
This alert handler simply logs the topic and value of the cimpl.Message instance.
|
|
63
|
+
|
|
64
|
+
See https://docs.confluent.io/4.1.1/clients/confluent-kafka-python/index.html#message
|
|
65
|
+
for cimpl.Message details.
|
|
66
|
+
"""
|
|
67
|
+
logger.info(f'gcn.alert_logger alert.topic(): {alert.topic()}')
|
|
68
|
+
logger.info(f'gcn.alert_logger alert.value(): {alert.value()}')
|
|
69
|
+
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
from datetime import datetime, timezone
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
import uuid
|
|
5
|
+
import traceback
|
|
6
|
+
|
|
7
|
+
from django.utils import timezone as tz
|
|
8
|
+
from django.core.exceptions import ImproperlyConfigured
|
|
9
|
+
|
|
10
|
+
from hop import Stream
|
|
11
|
+
from hop.auth import Auth
|
|
12
|
+
from hop.models import JSONBlob
|
|
13
|
+
from hop.io import Metadata, StartPosition, list_topics
|
|
14
|
+
|
|
15
|
+
from tom_alertstreams.alertstreams.alertstream import AlertStream
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class HopskotchAlertStream(AlertStream):
|
|
21
|
+
"""
|
|
22
|
+
"""
|
|
23
|
+
required_keys = ['URL', 'GROUP_ID', 'USERNAME', 'PASSWORD', 'TOPIC_HANDLERS']
|
|
24
|
+
allowed_keys = ['URL', 'GROUP_ID', 'USERNAME', 'PASSWORD', 'TOPIC_HANDLERS']
|
|
25
|
+
PUBLIC_TOPIC_CHECK_INTERVAL = 300 # Seconds between checking for new public topics
|
|
26
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
27
|
+
super().__init__(*args, **kwargs)
|
|
28
|
+
# the following methods may fail if improperly configured.
|
|
29
|
+
# So, do them now to catch any errors, before listen() is spawned in it's own Process.
|
|
30
|
+
self.public_topics = self.get_all_public_topics()
|
|
31
|
+
self.stream_url = self.get_stream_url()
|
|
32
|
+
self.stream = self.get_stream()
|
|
33
|
+
|
|
34
|
+
def get_all_public_topics(self) -> list[str]:
|
|
35
|
+
"""Returns the up-to-date list of Topic names to consume.
|
|
36
|
+
|
|
37
|
+
Use the saved options to repeatedly construct the topic list, and
|
|
38
|
+
keep it in sync with the publicaly_readable topics from SCiMMA Auth.
|
|
39
|
+
|
|
40
|
+
The Topic list is a combination of the
|
|
41
|
+
a. the publicly_readable Topics from SCiMMA Auth
|
|
42
|
+
b. any topics supplied on the command line via -T, --topic
|
|
43
|
+
"""
|
|
44
|
+
hop_auth = Auth(self.username, self.password)
|
|
45
|
+
logger.info('getting publicly_readable topics from SCiMMA Auth.')
|
|
46
|
+
# use the hop-client to ask Kafka directly for the topics since SCiMMA Auth can be out of sync
|
|
47
|
+
# include only topics that a) contain a '.'; b) don't start with '__' (excludes __consumer_offsets)
|
|
48
|
+
publicly_readable_topics = [topic for topic in list_topics(self.url, hop_auth).keys()
|
|
49
|
+
if not (topic.startswith('__') and (topic.count('.')==0))]
|
|
50
|
+
logger.info(f'publicly_readable_topics: {publicly_readable_topics}')
|
|
51
|
+
|
|
52
|
+
return publicly_readable_topics
|
|
53
|
+
|
|
54
|
+
def get_stream_url(self) -> str:
|
|
55
|
+
"""For Hopskotch, topics are specified on the url. So, this
|
|
56
|
+
method gets a base url (from super) and then adds topics to it.
|
|
57
|
+
|
|
58
|
+
Hopskotch (hop.io) requires at least one topic to be specified.
|
|
59
|
+
|
|
60
|
+
You might not need a method like this if your Kafka client provides
|
|
61
|
+
alternative ways to subscribe to a topic. For example, the gcn_kafka.Consumer
|
|
62
|
+
class provides a 'substribe([list of topics])' method. (see gcn.py).
|
|
63
|
+
"""
|
|
64
|
+
logger.debug(f'HopskotchAlertStream.get_stream_url topics: {list(self.topic_handlers.keys())}')
|
|
65
|
+
if self.topic_handlers == {}:
|
|
66
|
+
msg = 'Hopskotch requires at least one topic to open the stream. Check ALERT_STREAMS in settings.py'
|
|
67
|
+
raise ImproperlyConfigured(msg)
|
|
68
|
+
|
|
69
|
+
base_stream_url = self.url
|
|
70
|
+
|
|
71
|
+
# if not present, add trailing slash to base_stream url
|
|
72
|
+
# so, comma-separated topics can be appeneded.
|
|
73
|
+
if base_stream_url[-1] != '/':
|
|
74
|
+
base_stream_url += '/'
|
|
75
|
+
|
|
76
|
+
# append comma-separated topics to base URL
|
|
77
|
+
specified_topics = list(self.topic_handlers.keys())
|
|
78
|
+
if '*' in specified_topics:
|
|
79
|
+
# Add all public topics if a asterisk is set in the topic_handlers
|
|
80
|
+
specified_topics = list(set(specified_topics + self.public_topics))
|
|
81
|
+
# Also remove topics with wildcards in them
|
|
82
|
+
specified_topics = [topic for topic in specified_topics if not '*' in topic]
|
|
83
|
+
|
|
84
|
+
topics = ','.join(specified_topics) # 'topic1,topic2,topic3'
|
|
85
|
+
hopskotch_stream_url = base_stream_url + topics
|
|
86
|
+
|
|
87
|
+
logger.debug(f'HopskotchAlertStream.get_stream_url url: {hopskotch_stream_url}')
|
|
88
|
+
return hopskotch_stream_url
|
|
89
|
+
|
|
90
|
+
def get_stream(self, start_position=StartPosition.LATEST) -> Stream:
|
|
91
|
+
hop_auth = Auth(self.username, self.password)
|
|
92
|
+
|
|
93
|
+
# TODO: allow StartPosition to be set from OPTIONS configuration dictionary
|
|
94
|
+
stream = Stream(auth=hop_auth, start_at=start_position)
|
|
95
|
+
return stream
|
|
96
|
+
|
|
97
|
+
def listen(self):
|
|
98
|
+
super().listen()
|
|
99
|
+
# TODO: alternatively, WARN upon OPTIONS['topics'] extries that don't have
|
|
100
|
+
# handlers in the alert_handler. (i.e they've configured a topic subscription
|
|
101
|
+
# without providing a handler for the topic. So, warn them).
|
|
102
|
+
last_check_time = tz.now()
|
|
103
|
+
while True:
|
|
104
|
+
try:
|
|
105
|
+
logger.info(f'HopskotchAlertStream.listen opening stream: {self.stream_url} with group_id: {self.group_id}')
|
|
106
|
+
with self.stream.open(self.stream_url, 'r', group_id=self.group_id) as src:
|
|
107
|
+
for alert, metadata in src.read(metadata=True):
|
|
108
|
+
# type(gcn_circular) is <hop.models.GNCCircular>
|
|
109
|
+
# type(metadata) is <hop.io.Metadata>
|
|
110
|
+
if metadata.topic in self.alert_handler:
|
|
111
|
+
# TODO: should probably use *args, **kwargs to pass unknow number of arguments
|
|
112
|
+
self.alert_handler[metadata.topic](alert, metadata)
|
|
113
|
+
elif '*' in self.alert_handler:
|
|
114
|
+
# First check all wildcard topics to see if they will match this topic
|
|
115
|
+
matched_handler = False
|
|
116
|
+
for topic in self.alert_handler.keys():
|
|
117
|
+
if topic != '*' and '*' in topic and re.match(topic, metadata.topic):
|
|
118
|
+
self.alert_handler[topic](alert, metadata)
|
|
119
|
+
matched_handler = True
|
|
120
|
+
break
|
|
121
|
+
# If nothing matched, fall back to default public topic handler
|
|
122
|
+
if not matched_handler:
|
|
123
|
+
self.alert_handler['*'](alert, metadata)
|
|
124
|
+
else:
|
|
125
|
+
logger.error(f'alert from topic {metadata.topic} received but no handler defined.')
|
|
126
|
+
# TODO: should define a default handler for all unhandeled topics
|
|
127
|
+
if (tz.now() - last_check_time).total_seconds() > self.PUBLIC_TOPIC_CHECK_INTERVAL:
|
|
128
|
+
last_check_time = tz.now()
|
|
129
|
+
public_topics = self.get_all_public_topics()
|
|
130
|
+
if set(public_topics) != set(self.public_topics):
|
|
131
|
+
logger.info(f"New public topics found, restarting hop stream")
|
|
132
|
+
self.public_topics = public_topics
|
|
133
|
+
self.stream_url = self.get_stream_url()
|
|
134
|
+
break
|
|
135
|
+
except Exception as ex:
|
|
136
|
+
logger.error(f'HopskotchAlertStream.listen: {ex}')
|
|
137
|
+
logger.error(traceback.format_exc()) # Show the traceback so we have a chance of figuring out what is breaking
|
|
138
|
+
|
|
139
|
+
def heartbeat_handler(heartbeat: JSONBlob, metadata: Metadata):
|
|
140
|
+
"""Example alert handler for HopskotchAlertStream sys.heartbeat topic.
|
|
141
|
+
|
|
142
|
+
Note that HopskotchAlertStream.listen() method knows that Hopskotch alerts come with
|
|
143
|
+
both alert and metadata. So, the alert_handler methods have a signiture (taking both
|
|
144
|
+
as arguments) specific to this stream.
|
|
145
|
+
"""
|
|
146
|
+
content: dict = heartbeat.content # see hop_client reatthedocs
|
|
147
|
+
timestamp = datetime.fromtimestamp(content["timestamp"] / 1e6, tz=timezone.utc)
|
|
148
|
+
if heartbeat.content['count'] % 300 == 0:
|
|
149
|
+
# mod 300 just for convenience so as not to flood logger
|
|
150
|
+
logging.info(f'{timestamp.isoformat()} heartbeat.content dict: {heartbeat.content}. metadata: {metadata}')
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def alert_logger(alert: JSONBlob, metadata: Metadata):
|
|
154
|
+
"""Example alert handler. The method signsture is specific to Hopskotch alerts.
|
|
155
|
+
"""
|
|
156
|
+
# search the header (list of tuples) for a UUID-tuple (keyed by '_id')
|
|
157
|
+
# eg. ('_id', b'$\xd6oGmVM\xed\x97\xe7|\x1c\x8f\x11V\xe9')
|
|
158
|
+
alert_uuid_tuple = next((item for item in metadata.headers if item[0] == '_id'), None)
|
|
159
|
+
if alert_uuid_tuple:
|
|
160
|
+
alert_uuid = uuid.UUID(bytes=alert_uuid_tuple[1])
|
|
161
|
+
else:
|
|
162
|
+
# in this case the alert was probably published with hop-client<0.8.0
|
|
163
|
+
alert_uuid = None
|
|
164
|
+
logger.info(f'Alert (uuid={alert_uuid}) received on topic {metadata.topic}: {alert}; metatdata: {metadata}')
|
|
File without changes
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
from django.core.exceptions import ImproperlyConfigured
|
|
5
|
+
from django.core.management.base import BaseCommand
|
|
6
|
+
|
|
7
|
+
from tom_alertstreams.alertstreams.alertstream import get_default_alert_streams
|
|
8
|
+
from tom_alertstreams.alertstreams.hopskotch import HopskotchAlertStream
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
# logger.setLevel(logging.DEBUG)
|
|
12
|
+
logger.setLevel(logging.INFO)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Command(BaseCommand):
|
|
16
|
+
help = 'Publish a timestamped test message to Hopskotch tomtoolkit.test topic.'
|
|
17
|
+
|
|
18
|
+
def handle(self, *args, **options):
|
|
19
|
+
logger.debug(f'hoptestpub.Command.handle() args: {args}')
|
|
20
|
+
logger.debug(f'hoptestpub.Command.handle() options: {options}')
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
alert_streams = get_default_alert_streams()
|
|
24
|
+
# extract the HopskotchAlertStream
|
|
25
|
+
hopskotch_alert_stream = next(stream for stream in alert_streams if isinstance(stream, HopskotchAlertStream))
|
|
26
|
+
|
|
27
|
+
except ImproperlyConfigured as ex:
|
|
28
|
+
logger.error(f'{ex.__class__.__name__}: Configure alert streams in settings.py ALERT_STREAMS: {ex}')
|
|
29
|
+
exit(1)
|
|
30
|
+
|
|
31
|
+
stream = hopskotch_alert_stream.get_stream()
|
|
32
|
+
topic = 'tomtoolkit.test' # SCiMMA Admin topic permissions for Credential are assumed to have been set up
|
|
33
|
+
|
|
34
|
+
with stream.open(hopskotch_alert_stream.url+topic, "w") as s:
|
|
35
|
+
s.write({
|
|
36
|
+
'created': datetime.datetime.utcnow().isoformat(),
|
|
37
|
+
'created-by': 'tom-alertstreams hoptestpub.py'
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
logger.info('hoptestpub Command.handle() returning...')
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from threading import Thread
|
|
3
|
+
|
|
4
|
+
from django.core.exceptions import ImproperlyConfigured
|
|
5
|
+
from django.core.management.base import BaseCommand
|
|
6
|
+
|
|
7
|
+
from tom_alertstreams.alertstreams.alertstream import get_default_alert_streams
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
# logger.setLevel(logging.DEBUG)
|
|
12
|
+
logger.setLevel(logging.INFO)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Command(BaseCommand):
|
|
16
|
+
help = 'Consume alerts from the alert streams configured in the settings.py ALERT_STREAMS'
|
|
17
|
+
|
|
18
|
+
def handle(self, *args, **options):
|
|
19
|
+
logger.debug(f'readstreams.Command.handle() args: {args}')
|
|
20
|
+
logger.debug(f'readstreams.Command.handle() options: {options}')
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
alert_streams = get_default_alert_streams()
|
|
24
|
+
except ImproperlyConfigured as ex:
|
|
25
|
+
logger.error(f'{ex.__class__.__name__}: Configure alert streams in settings.py ALERT_STREAMS: {ex}')
|
|
26
|
+
exit(1)
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
# listen to each alert_stream in it's own Thread (sort of at the same time)
|
|
30
|
+
for alert_stream in alert_streams:
|
|
31
|
+
t = Thread(target=alert_stream.listen, name=alert_stream._get_stream_classname())
|
|
32
|
+
t.start()
|
|
33
|
+
logger.info((f'read_streams {alert_stream._get_stream_classname()} TID={t.native_id} ; '
|
|
34
|
+
f'thread identifier={t.ident}'))
|
|
35
|
+
except KeyboardInterrupt as msg:
|
|
36
|
+
logger.info(f'read_streams handling KeyboardInterupt {msg}')
|
|
37
|
+
|
|
38
|
+
logger.info('readstreams Command.handle() returning...')
|
|
File without changes
|