intelmq-extensions 1.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. intelmq_extensions/__init__.py +0 -0
  2. intelmq_extensions/bots/__init__.py +0 -0
  3. intelmq_extensions/bots/collectors/blackkite/__init__.py +0 -0
  4. intelmq_extensions/bots/collectors/blackkite/_client.py +167 -0
  5. intelmq_extensions/bots/collectors/blackkite/collector.py +182 -0
  6. intelmq_extensions/bots/collectors/disp/__init__.py +0 -0
  7. intelmq_extensions/bots/collectors/disp/_client.py +121 -0
  8. intelmq_extensions/bots/collectors/disp/collector.py +104 -0
  9. intelmq_extensions/bots/collectors/xmpp/__init__.py +0 -0
  10. intelmq_extensions/bots/collectors/xmpp/collector.py +210 -0
  11. intelmq_extensions/bots/experts/__init__.py +0 -0
  12. intelmq_extensions/bots/experts/certat_contact_intern/__init__.py +0 -0
  13. intelmq_extensions/bots/experts/certat_contact_intern/expert.py +139 -0
  14. intelmq_extensions/bots/experts/copy_extra/__init__.py +0 -0
  15. intelmq_extensions/bots/experts/copy_extra/expert.py +27 -0
  16. intelmq_extensions/bots/experts/event_group_splitter/__init__.py +0 -0
  17. intelmq_extensions/bots/experts/event_group_splitter/expert.py +117 -0
  18. intelmq_extensions/bots/experts/event_splitter/__init__.py +0 -0
  19. intelmq_extensions/bots/experts/event_splitter/expert.py +41 -0
  20. intelmq_extensions/bots/experts/squelcher/__init__.py +0 -0
  21. intelmq_extensions/bots/experts/squelcher/expert.py +316 -0
  22. intelmq_extensions/bots/experts/vulnerability_lookup/__init__.py +0 -0
  23. intelmq_extensions/bots/experts/vulnerability_lookup/expert.py +136 -0
  24. intelmq_extensions/bots/outputs/__init__.py +0 -0
  25. intelmq_extensions/bots/outputs/mattermost/__init__.py +0 -0
  26. intelmq_extensions/bots/outputs/mattermost/output.py +113 -0
  27. intelmq_extensions/bots/outputs/to_logs/__init__.py +0 -0
  28. intelmq_extensions/bots/outputs/to_logs/output.py +12 -0
  29. intelmq_extensions/bots/outputs/xmpp/__init__.py +0 -0
  30. intelmq_extensions/bots/outputs/xmpp/output.py +180 -0
  31. intelmq_extensions/bots/parsers/__init__.py +0 -0
  32. intelmq_extensions/bots/parsers/blackkite/__init__.py +0 -0
  33. intelmq_extensions/bots/parsers/blackkite/_transformers.py +202 -0
  34. intelmq_extensions/bots/parsers/blackkite/parser.py +65 -0
  35. intelmq_extensions/bots/parsers/disp/__init__.py +0 -0
  36. intelmq_extensions/bots/parsers/disp/parser.py +125 -0
  37. intelmq_extensions/bots/parsers/malwaredomains/__init__.py +0 -0
  38. intelmq_extensions/bots/parsers/malwaredomains/parser.py +63 -0
  39. intelmq_extensions/cli/__init__.py +0 -0
  40. intelmq_extensions/cli/create_reports.py +161 -0
  41. intelmq_extensions/cli/intelmqcli.py +657 -0
  42. intelmq_extensions/cli/lib.py +670 -0
  43. intelmq_extensions/cli/utils.py +12 -0
  44. intelmq_extensions/etc/harmonization.conf +434 -0
  45. intelmq_extensions/etc/squelcher.conf +52 -0
  46. intelmq_extensions/lib/__init__.py +0 -0
  47. intelmq_extensions/lib/api_helpers.py +105 -0
  48. intelmq_extensions/lib/blackkite.py +29 -0
  49. intelmq_extensions/tests/__init__.py +0 -0
  50. intelmq_extensions/tests/base.py +336 -0
  51. intelmq_extensions/tests/bots/__init__.py +0 -0
  52. intelmq_extensions/tests/bots/collectors/__init__.py +0 -0
  53. intelmq_extensions/tests/bots/collectors/blackkite/__init__.py +0 -0
  54. intelmq_extensions/tests/bots/collectors/blackkite/base.py +45 -0
  55. intelmq_extensions/tests/bots/collectors/blackkite/test_client.py +154 -0
  56. intelmq_extensions/tests/bots/collectors/blackkite/test_collector.py +287 -0
  57. intelmq_extensions/tests/bots/collectors/disp/__init__.py +0 -0
  58. intelmq_extensions/tests/bots/collectors/disp/base.py +147 -0
  59. intelmq_extensions/tests/bots/collectors/disp/test_client.py +134 -0
  60. intelmq_extensions/tests/bots/collectors/disp/test_collector.py +137 -0
  61. intelmq_extensions/tests/bots/collectors/xmpp/__init__.py +0 -0
  62. intelmq_extensions/tests/bots/collectors/xmpp/test_collector.py +10 -0
  63. intelmq_extensions/tests/bots/experts/__init__.py +0 -0
  64. intelmq_extensions/tests/bots/experts/certat_contact_intern/__init__.py +0 -0
  65. intelmq_extensions/tests/bots/experts/certat_contact_intern/test_expert.py +176 -0
  66. intelmq_extensions/tests/bots/experts/copy_extra/__init__.py +0 -0
  67. intelmq_extensions/tests/bots/experts/copy_extra/test_expert.py +42 -0
  68. intelmq_extensions/tests/bots/experts/event_group_splitter/__init__.py +0 -0
  69. intelmq_extensions/tests/bots/experts/event_group_splitter/test_expert.py +302 -0
  70. intelmq_extensions/tests/bots/experts/event_splitter/__init__.py +0 -0
  71. intelmq_extensions/tests/bots/experts/event_splitter/test_expert.py +101 -0
  72. intelmq_extensions/tests/bots/experts/squelcher/__init__.py +0 -0
  73. intelmq_extensions/tests/bots/experts/squelcher/test_expert.py +548 -0
  74. intelmq_extensions/tests/bots/experts/vulnerability_lookup/__init__.py +0 -0
  75. intelmq_extensions/tests/bots/experts/vulnerability_lookup/test_expert.py +203 -0
  76. intelmq_extensions/tests/bots/outputs/__init__.py +0 -0
  77. intelmq_extensions/tests/bots/outputs/mattermost/__init__.py +0 -0
  78. intelmq_extensions/tests/bots/outputs/mattermost/test_output.py +138 -0
  79. intelmq_extensions/tests/bots/outputs/xmpp/__init__.py +0 -0
  80. intelmq_extensions/tests/bots/outputs/xmpp/test_output.py +10 -0
  81. intelmq_extensions/tests/bots/parsers/__init__.py +0 -0
  82. intelmq_extensions/tests/bots/parsers/blackkite/__init__.py +0 -0
  83. intelmq_extensions/tests/bots/parsers/blackkite/data.py +69 -0
  84. intelmq_extensions/tests/bots/parsers/blackkite/test_parser.py +197 -0
  85. intelmq_extensions/tests/bots/parsers/disp/__init__.py +0 -0
  86. intelmq_extensions/tests/bots/parsers/disp/test_parser.py +282 -0
  87. intelmq_extensions/tests/bots/parsers/malwaredomains/__init__.py +0 -0
  88. intelmq_extensions/tests/bots/parsers/malwaredomains/test_parser.py +62 -0
  89. intelmq_extensions/tests/cli/__init__.py +0 -0
  90. intelmq_extensions/tests/cli/test_create_reports.py +97 -0
  91. intelmq_extensions/tests/cli/test_intelmqcli.py +158 -0
  92. intelmq_extensions/tests/lib/__init__.py +0 -0
  93. intelmq_extensions/tests/lib/base.py +81 -0
  94. intelmq_extensions/tests/lib/test_api_helpers.py +126 -0
  95. intelmq_extensions-1.8.1.dist-info/METADATA +60 -0
  96. intelmq_extensions-1.8.1.dist-info/RECORD +100 -0
  97. intelmq_extensions-1.8.1.dist-info/WHEEL +5 -0
  98. intelmq_extensions-1.8.1.dist-info/entry_points.txt +33 -0
  99. intelmq_extensions-1.8.1.dist-info/licenses/LICENSE +661 -0
  100. intelmq_extensions-1.8.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,316 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Squelcher Expert marks events as new or old depending on a TTL(ASN, Net, IP).
4
+ """
5
+
6
+ from __future__ import unicode_literals
7
+
8
+ from ipaddress import ip_address, ip_network
9
+
10
+ from intelmq.lib.bot import Bot
11
+ from intelmq.lib.message import Event
12
+ from intelmq.lib.utils import load_configuration
13
+
14
+ try:
15
+ import psycopg2
16
+ except ImportError:
17
+ psycopg2 = None
18
+ try:
19
+ import netaddr
20
+ except ImportError:
21
+ netaddr = None
22
+
23
+ """
24
+ If the event in the DB is older than 2 days, then we also check if it has been sent out.
25
+ If this is not the case, we assume the event will be sent out, thus we squelch the new event.
26
+ """
27
+ SELECT_QUERY = """
28
+ SELECT COUNT(*) FROM {table}
29
+ WHERE
30
+ "time.source" >= LOCALTIMESTAMP - INTERVAL '%(ttl)s SECONDS' AND
31
+ "classification.type" = %(type)s AND
32
+ "classification.identifier" = %(identifier)s AND
33
+ {source_filters}
34
+ notify IS TRUE AND
35
+ ("time.source" >= LOCALTIMESTAMP - INTERVAL %(sending_interval)s OR
36
+ (sent_at IS NOT NULL AND "time.source" < LOCALTIMESTAMP - INTERVAL %(sending_interval)s)
37
+ )
38
+ """
39
+
40
+ # If the event is newer than sending interval, assume it will be sent soon or already has been,
41
+ # regardless of the report id. If the source time is older, ignore events without report id as
42
+ # they were most probably forgotten
43
+ OPEN_REPORT_QUERY = """
44
+ SELECT COUNT(*) FROM {table}
45
+ WHERE
46
+ "time.source" >= LOCALTIMESTAMP - INTERVAL '%(ttl)s SECONDS' AND
47
+ "classification.type" = %(type)s AND
48
+ "classification.identifier" = %(identifier)s AND
49
+ {source_filters}
50
+ notify IS TRUE AND
51
+ (
52
+ "time.source" >= LOCALTIMESTAMP - INTERVAL %(sending_interval)s OR
53
+ (rtir_report_id IS NOT NULL AND "time.source" < LOCALTIMESTAMP - INTERVAL %(sending_interval)s)
54
+ )
55
+ """
56
+
57
+ QUERY_MAP = {"base": SELECT_QUERY, "open_report": OPEN_REPORT_QUERY}
58
+ JSON_FIELDS = ["extra."]
59
+
60
+
61
+ class SquelcherExpertBot(Bot):
62
+ configuration_path: str = ""
63
+ connect_timeout: int = 5
64
+ database: str = ""
65
+ user: str = ""
66
+ password: str = ""
67
+ host: str = ""
68
+ port: str = ""
69
+ sslmode: str = ""
70
+ autocommit: bool = True
71
+ table: str = "contacts"
72
+ sending_time_interval: int = 1
73
+ overwrite: bool = False
74
+ query: str = "base" # base, open_report
75
+ source_fields: str = "source.ip"
76
+ filter_ip_only: str = True
77
+ use_ttl_field: str = "extra.ttl"
78
+
79
+ _filters_mapping: dict = None
80
+
81
+ def init(self):
82
+ self.config = load_configuration(self.configuration_path)
83
+
84
+ self.logger.debug("Connecting to PostgreSQL.")
85
+ if psycopg2 is None:
86
+ raise ValueError("Could not import psycopg2. Please install it.")
87
+ if netaddr is None:
88
+ raise ValueError("Could not import netaddr. Please install it.")
89
+
90
+ try:
91
+ self.con = psycopg2.connect(
92
+ database=self.database,
93
+ user=self.user,
94
+ password=self.password,
95
+ host=self.host,
96
+ port=self.port,
97
+ sslmode=self.sslmode,
98
+ connect_timeout=self.connect_timeout,
99
+ )
100
+ self.cur = self.con.cursor()
101
+ self.con.autocommit = self.autocommit
102
+
103
+ except Exception:
104
+ self.logger.exception("Failed to connect to database.")
105
+ self.stop()
106
+ self.logger.info("Connected to PostgreSQL.")
107
+
108
+ self.query_tpl = self._build_query()
109
+ self.convert_config()
110
+
111
+ def _build_query(self):
112
+ # TODO: Build it using sql-safe syntax
113
+ # TODO: ensure source field is in the harmonization
114
+ template = QUERY_MAP[self.query]
115
+ self._filters_mapping = dict()
116
+ for idx, field in enumerate(self.source_fields.split(",")):
117
+ self._filters_mapping[field] = f"source_{idx}"
118
+
119
+ conditions = []
120
+ for field, filter_key in self._filters_mapping.items():
121
+ json_fields = [f for f in JSON_FIELDS if field.startswith(f)]
122
+ if json_fields:
123
+ conditions.append(
124
+ (
125
+ f'"{json_fields[0][:-1]}" ->> \'{field.replace(json_fields[0], "")}\''
126
+ f" IS NOT DISTINCT FROM %({filter_key})s AND"
127
+ )
128
+ )
129
+ else:
130
+ # IS NOT DISTINCT makes a good job, but doesn't play well with
131
+ # unique indexes unless probably PostgreSQL 15 and UNIQUE NULLS NOT DISTINCT
132
+ # index configuration
133
+ conditions.append(
134
+ f"""CASE WHEN %({filter_key})s IS null THEN
135
+ "{field}" is null
136
+ ELSE "{field}" = %({filter_key})s
137
+ END AND
138
+ """
139
+ )
140
+ return template.format(table=self.table, source_filters="\n".join(conditions))
141
+
142
+ def convert_config(self):
143
+ for rule_index, ruleset in enumerate(self.config):
144
+ for key, value in ruleset[0].items():
145
+ if isinstance(value, list):
146
+ self.config[rule_index][0][key] = tuple(value)
147
+ if isinstance(value, dict):
148
+ self.config[rule_index][0][key] = tuple(value.items())
149
+
150
+ def convert_event(self, event):
151
+ event_copy = event.to_dict()
152
+ for key, value in event_copy.items():
153
+ if isinstance(value, list):
154
+ event_copy[key] = tuple(value)
155
+ if isinstance(value, dict):
156
+ event_copy[key] = tuple(value.items())
157
+ return event_copy
158
+
159
+ def process(self):
160
+ event = self.receive_message()
161
+
162
+ if "notify" in event and not self.overwrite:
163
+ self.logger.debug(
164
+ "Notify field present and not allowed to overwrite, skipping."
165
+ )
166
+ self.modify_end(event)
167
+ return
168
+
169
+ if self.filter_ip_only:
170
+ if "source.ip" not in event and "source.fqdn" in event:
171
+ self.logger.debug(
172
+ "Filtering restricted to IPs, setting notify=true for domain event."
173
+ )
174
+ event.add("notify", True, overwrite=True)
175
+ self.modify_end(event)
176
+ return
177
+ if "source.asn" not in event:
178
+ self.logger.debug("Discarding event as it lacks AS number.")
179
+ event.add("notify", False, overwrite=True)
180
+ self.modify_end(event)
181
+ return
182
+ ttl = None
183
+
184
+ if self.use_ttl_field:
185
+ try:
186
+ ttl = int(event[self.use_ttl_field])
187
+ except KeyError:
188
+ pass
189
+
190
+ if ttl is None:
191
+ for ruleset in self.config:
192
+ condition = ruleset[0].copy()
193
+ conditions = []
194
+ if "source.network" in condition and "source.ip" in event:
195
+ conditions.append(
196
+ ip_address(event["source.ip"])
197
+ in ip_network(condition["source.network"])
198
+ )
199
+ del condition["source.network"]
200
+ if "source.iprange" in condition and "source.ip" in event:
201
+ conditions.append(
202
+ event["source.ip"]
203
+ in netaddr.IPRange(*condition["source.iprange"])
204
+ )
205
+ del condition["source.iprange"]
206
+ if set(condition.items()).issubset(
207
+ self.convert_event(event).items()
208
+ ) and all(conditions):
209
+ ttl = ruleset[1]["ttl"]
210
+ break
211
+
212
+ self.logger.debug(
213
+ "Found TTL {} for ({}, {})." "".format(
214
+ ttl, event.get("source.asn"), event.get("source.ip")
215
+ )
216
+ )
217
+
218
+ try:
219
+ if ttl >= 0:
220
+ source_filters = {
221
+ filter_key: event.get(field)
222
+ for field, filter_key in self._filters_mapping.items()
223
+ }
224
+ self.cur.execute(
225
+ self.query_tpl,
226
+ {
227
+ "ttl": ttl,
228
+ "type": event["classification.type"],
229
+ "identifier": event["classification.identifier"],
230
+ "sending_interval": self.sending_time_interval,
231
+ **source_filters,
232
+ },
233
+ )
234
+ result = self.cur.fetchone()[0]
235
+ else: # never notify with ttl -1
236
+ result = 1
237
+ except (
238
+ psycopg2.InterfaceError,
239
+ psycopg2.InternalError,
240
+ psycopg2.OperationalError,
241
+ AttributeError,
242
+ ):
243
+ self.logger.exception("Cursor has been closed, connecting again.")
244
+ self.init()
245
+ else:
246
+ if result == 0:
247
+ notify = True
248
+ else:
249
+ notify = False
250
+
251
+ event.add("notify", notify, overwrite=True)
252
+ self.modify_end(event)
253
+
254
+ def shutdown(self):
255
+ try:
256
+ self.cur.close()
257
+ except Exception:
258
+ pass
259
+ try:
260
+ self.con.close()
261
+ except Exception:
262
+ pass
263
+
264
+ def modify_end(self, event):
265
+ self.send_message(event)
266
+ self.acknowledge_message()
267
+
268
+ @staticmethod
269
+ def check(parameters):
270
+ retval = []
271
+ try:
272
+ config = load_configuration(parameters["configuration_path"])
273
+ except ValueError as exc:
274
+ return [["error", "Could not load configuration: %r." % exc]]
275
+ for ruleset in config:
276
+ condition = ruleset[0].copy()
277
+ if "source.network" in condition:
278
+ try:
279
+ ip_network(condition["source.network"])
280
+ except ValueError as exc:
281
+ retval += [
282
+ [
283
+ "warning",
284
+ "%r is not a valid IP network: %r."
285
+ % (condition["source.network"], exc),
286
+ ]
287
+ ]
288
+ del condition["source.network"]
289
+ if "source.iprange" in condition:
290
+ try:
291
+ netaddr.IPRange(*condition["source.iprange"])
292
+ except ValueError as exc:
293
+ retval += [
294
+ [
295
+ "warning",
296
+ "%r is not a valid IP range: %r."
297
+ % (condition["source.iprange"], exc),
298
+ ]
299
+ ]
300
+ del condition["source.iprange"]
301
+ try:
302
+ Event(condition)
303
+ except Exception as exc:
304
+ retval += [
305
+ ["warning", "Failed to parse conditions as Event: %r." % (exc)]
306
+ ]
307
+ try:
308
+ int(ruleset[1]["ttl"])
309
+ except ValueError as exc:
310
+ retval += [
311
+ ["error", "%r is not a valid TTL: %r." % (ruleset[1]["ttl"], exc)]
312
+ ]
313
+ return retval if retval else None
314
+
315
+
316
+ BOT = SquelcherExpertBot
@@ -0,0 +1,136 @@
1
+ import json
2
+ from typing import Optional
3
+
4
+ from intelmq.lib.bot import ExpertBot
5
+ from intelmq.lib.mixins import CacheMixin
6
+ from intelmq.lib.utils import create_request_session
7
+
8
+ CACHE_FORMAT = "vuln:{identifier}"
9
+ CACHE_NOT_FOUND = "VULN_NOT_FOUND"
10
+
11
+
12
+ class VulnerabilityLookupExpertBot(ExpertBot, CacheMixin):
13
+ url = "https://vulnerability.circl.lu"
14
+ vulnerability_field = "classification.identifier"
15
+ description_length = 500
16
+ overwrite = False
17
+
18
+ filter_classification_type = ["vulnerable-system"]
19
+
20
+ redis_cache_ttl = 86400 # 1 day
21
+
22
+ def init(self):
23
+ self.set_request_parameters()
24
+ self.http_session = create_request_session(self)
25
+
26
+ def _get_vulnerability_data(self, vuln_id: Optional[str]) -> Optional[dict]:
27
+ vuln_id = (vuln_id or "").strip().lower()
28
+ if not vuln_id:
29
+ return None
30
+
31
+ cache_key = CACHE_FORMAT.format(identifier=vuln_id)
32
+ cached_data = self.cache_get(cache_key)
33
+ if cached_data:
34
+ if cached_data == CACHE_NOT_FOUND:
35
+ return None
36
+ return json.loads(cached_data)
37
+
38
+ response = self.http_session.get(f"{self.url}/api/vulnerability/{vuln_id}")
39
+ if response.status_code != 200:
40
+ response.raise_for_status()
41
+
42
+ vuln_raw_data = response.json()
43
+ if response.status_code == 404 or not vuln_raw_data:
44
+ self.cache_set(cache_key, CACHE_NOT_FOUND)
45
+ return None
46
+
47
+ epss = None
48
+ response = self.http_session.get(f"{self.url}/api/epss/{vuln_id}")
49
+ if response.status_code == 200:
50
+ epss_data = response.json().get("data", [])
51
+ if epss_data:
52
+ epss = epss_data[0].get("epss", None)
53
+ else:
54
+ self.logger.info(
55
+ "Cannot get EPSS score, status code: %d", response.status_code
56
+ )
57
+
58
+ vuln_data = {"url": f"{self.url}/vuln/{vuln_id}"}
59
+ if epss:
60
+ vuln_data["epss"] = epss
61
+
62
+ description = None
63
+ cvss3_1 = None
64
+ cvss3_0 = None
65
+ cvss4_0 = None
66
+
67
+ # Every source queried by Vulnerability Lookup provides different data format
68
+
69
+ # CVE records
70
+ if vuln_raw_data.get("dataType", "") == "CVE_RECORD":
71
+ cna = vuln_raw_data.get("containers", {}).get("cna")
72
+ if cna:
73
+ for item in cna.get("descriptions", []):
74
+ if item.get("lang", "") in ["en"]:
75
+ description = item.get("value", "")
76
+ break
77
+
78
+ for item in cna.get("metrics", []):
79
+ if "cvssV3_1" in item:
80
+ cvss3_1 = item["cvssV3_1"].get("baseScore")
81
+ if "cvssV3_0" in item:
82
+ cvss3_0 = item["cvssV3_0"].get("baseScore")
83
+ if "cvssV4_0" in item:
84
+ cvss4_0 = item["cvssV4_0"].get("baseScore")
85
+
86
+ # GitHub Security Advisories
87
+ if vuln_id.startswith("ghsa-"):
88
+ description = vuln_raw_data.get("details")
89
+ # TODO: Calculate CVSS
90
+
91
+ if description:
92
+ # Some CSV readers do not understand multi-line texts
93
+ description = description.replace("\n", " ")
94
+ vuln_data["description"] = description[: self.description_length]
95
+ if len(description) > self.description_length:
96
+ vuln_data["description"] += "..."
97
+
98
+ if cvss3_0:
99
+ vuln_data["cvss3_0"] = cvss3_0
100
+
101
+ if cvss3_1:
102
+ vuln_data["cvss3_1"] = cvss3_1
103
+
104
+ if cvss4_0:
105
+ vuln_data["cvss4_0"] = cvss4_0
106
+
107
+ self.cache_set(cache_key, json.dumps(vuln_data))
108
+ return vuln_data
109
+
110
+ def process(self):
111
+ event = self.receive_message()
112
+
113
+ if (
114
+ not self.filter_classification_type
115
+ or event.get("classification.type") in self.filter_classification_type
116
+ ):
117
+ vuln_id = event.get(self.vulnerability_field)
118
+ vuln_data = self._get_vulnerability_data(vuln_id) or {}
119
+
120
+ if description := vuln_data.get("description"):
121
+ event.add(
122
+ "event_description.text", description, overwrite=self.overwrite
123
+ )
124
+
125
+ if url := vuln_data.get("url"):
126
+ event.add("event_description.url", url, overwrite=self.overwrite)
127
+
128
+ for score_type in ["cvss3_0", "cvss3_1", "cvss4_0", "epss"]:
129
+ if score := vuln_data.get(score_type):
130
+ event.add(f"extra.{score_type}", score, overwrite=self.overwrite)
131
+
132
+ self.send_message(event)
133
+ self.acknowledge_message()
134
+
135
+
136
+ BOT = VulnerabilityLookupExpertBot
File without changes
File without changes
@@ -0,0 +1,113 @@
1
+ from copy import deepcopy
2
+
3
+ from intelmq.lib.bot import OutputBot
4
+ from intelmq.lib.utils import create_request_session
5
+
6
+
7
+ class MattermostOutputBot(OutputBot):
8
+ mm_url: str
9
+ bot_token: str
10
+ channel_id: str
11
+
12
+ message: str = None
13
+
14
+ # https://developers.mattermost.com/integrate/reference/message-attachments/
15
+ fallback: str = None
16
+ pretext: str = None
17
+ text: str = None
18
+
19
+ title: str = None
20
+ title_link: str = None
21
+
22
+ author_name: str = "IntelMQ"
23
+ author_icon: str = None
24
+ author_link: str = None
25
+
26
+ color: str = None
27
+
28
+ fields: list[dict] = None
29
+
30
+ footer: str = None
31
+
32
+ # https://developers.mattermost.com/integrate/webhooks/incoming/#parameters
33
+ card: str = None
34
+
35
+ _template_fields = [
36
+ "fallback",
37
+ "message",
38
+ "pretext",
39
+ "text",
40
+ "title",
41
+ "value",
42
+ "footer",
43
+ ]
44
+ _attachment_fields = [
45
+ "fallback",
46
+ "pretext",
47
+ "text",
48
+ "title",
49
+ "title_link",
50
+ "author_name",
51
+ "author_icon",
52
+ "author_link",
53
+ "color",
54
+ "fields",
55
+ "footer",
56
+ ]
57
+ _is_attachment = False
58
+
59
+ def init(self):
60
+ self.set_request_parameters()
61
+ self.session = create_request_session(self)
62
+
63
+ if not self.message and not self.text:
64
+ raise ValueError("Either message or text have to be configured")
65
+
66
+ if any(getattr(self, f, None) for f in self._attachment_fields):
67
+ self._is_attachment = True
68
+
69
+ def process(self):
70
+ event = self.receive_message()
71
+ event.set_default_value()
72
+
73
+ request_data = {"channel_id": self.channel_id, "props": {}}
74
+
75
+ if self.message:
76
+ request_data["message"] = self.message.format(ev=event)
77
+
78
+ if self._is_attachment:
79
+ request_data["props"]["attachments"] = [self._prepare_attachment(event)]
80
+
81
+ if self.card:
82
+ request_data["props"]["card"] = self.card.format(ev=event)
83
+
84
+ result = self.session.post(
85
+ f"{self.mm_url}/api/v4/posts",
86
+ json=request_data,
87
+ headers={"Authorization": f"Bearer {self.bot_token}"},
88
+ )
89
+ result.raise_for_status()
90
+
91
+ self.acknowledge_message()
92
+
93
+ def _prepare_attachment(self, event) -> dict:
94
+ attachment = {}
95
+ for field in self._attachment_fields:
96
+ data = getattr(self, field, None)
97
+ if data is None:
98
+ continue
99
+ if field == "fields":
100
+ data: list[dict] = deepcopy(data)
101
+ for item in data:
102
+ if "title" in item:
103
+ item["title"] = item["title"].format(ev=event)
104
+ if "value" in item:
105
+ item["value"] = item["value"].format(ev=event)
106
+ elif field in self._template_fields:
107
+ data = data.format(ev=event)
108
+
109
+ attachment[field] = data
110
+ return attachment
111
+
112
+
113
+ BOT = MattermostOutputBot
File without changes
@@ -0,0 +1,12 @@
1
+ from intelmq.lib.bot import Bot
2
+
3
+
4
+ class ToLogOutput(Bot):
5
+ def process(self):
6
+ event = self.receive_message()
7
+ jevent = event.to_json()
8
+ self.logger.info("Got message %s", jevent)
9
+ self.acknowledge_message()
10
+
11
+
12
+ BOT = ToLogOutput
File without changes