intelmq-extensions 1.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- intelmq_extensions/__init__.py +0 -0
- intelmq_extensions/bots/__init__.py +0 -0
- intelmq_extensions/bots/collectors/blackkite/__init__.py +0 -0
- intelmq_extensions/bots/collectors/blackkite/_client.py +167 -0
- intelmq_extensions/bots/collectors/blackkite/collector.py +182 -0
- intelmq_extensions/bots/collectors/disp/__init__.py +0 -0
- intelmq_extensions/bots/collectors/disp/_client.py +121 -0
- intelmq_extensions/bots/collectors/disp/collector.py +104 -0
- intelmq_extensions/bots/collectors/xmpp/__init__.py +0 -0
- intelmq_extensions/bots/collectors/xmpp/collector.py +210 -0
- intelmq_extensions/bots/experts/__init__.py +0 -0
- intelmq_extensions/bots/experts/certat_contact_intern/__init__.py +0 -0
- intelmq_extensions/bots/experts/certat_contact_intern/expert.py +139 -0
- intelmq_extensions/bots/experts/copy_extra/__init__.py +0 -0
- intelmq_extensions/bots/experts/copy_extra/expert.py +27 -0
- intelmq_extensions/bots/experts/event_group_splitter/__init__.py +0 -0
- intelmq_extensions/bots/experts/event_group_splitter/expert.py +117 -0
- intelmq_extensions/bots/experts/event_splitter/__init__.py +0 -0
- intelmq_extensions/bots/experts/event_splitter/expert.py +41 -0
- intelmq_extensions/bots/experts/squelcher/__init__.py +0 -0
- intelmq_extensions/bots/experts/squelcher/expert.py +316 -0
- intelmq_extensions/bots/experts/vulnerability_lookup/__init__.py +0 -0
- intelmq_extensions/bots/experts/vulnerability_lookup/expert.py +136 -0
- intelmq_extensions/bots/outputs/__init__.py +0 -0
- intelmq_extensions/bots/outputs/mattermost/__init__.py +0 -0
- intelmq_extensions/bots/outputs/mattermost/output.py +113 -0
- intelmq_extensions/bots/outputs/to_logs/__init__.py +0 -0
- intelmq_extensions/bots/outputs/to_logs/output.py +12 -0
- intelmq_extensions/bots/outputs/xmpp/__init__.py +0 -0
- intelmq_extensions/bots/outputs/xmpp/output.py +180 -0
- intelmq_extensions/bots/parsers/__init__.py +0 -0
- intelmq_extensions/bots/parsers/blackkite/__init__.py +0 -0
- intelmq_extensions/bots/parsers/blackkite/_transformers.py +202 -0
- intelmq_extensions/bots/parsers/blackkite/parser.py +65 -0
- intelmq_extensions/bots/parsers/disp/__init__.py +0 -0
- intelmq_extensions/bots/parsers/disp/parser.py +125 -0
- intelmq_extensions/bots/parsers/malwaredomains/__init__.py +0 -0
- intelmq_extensions/bots/parsers/malwaredomains/parser.py +63 -0
- intelmq_extensions/cli/__init__.py +0 -0
- intelmq_extensions/cli/create_reports.py +161 -0
- intelmq_extensions/cli/intelmqcli.py +657 -0
- intelmq_extensions/cli/lib.py +670 -0
- intelmq_extensions/cli/utils.py +12 -0
- intelmq_extensions/etc/harmonization.conf +434 -0
- intelmq_extensions/etc/squelcher.conf +52 -0
- intelmq_extensions/lib/__init__.py +0 -0
- intelmq_extensions/lib/api_helpers.py +105 -0
- intelmq_extensions/lib/blackkite.py +29 -0
- intelmq_extensions/tests/__init__.py +0 -0
- intelmq_extensions/tests/base.py +336 -0
- intelmq_extensions/tests/bots/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/blackkite/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/blackkite/base.py +45 -0
- intelmq_extensions/tests/bots/collectors/blackkite/test_client.py +154 -0
- intelmq_extensions/tests/bots/collectors/blackkite/test_collector.py +287 -0
- intelmq_extensions/tests/bots/collectors/disp/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/disp/base.py +147 -0
- intelmq_extensions/tests/bots/collectors/disp/test_client.py +134 -0
- intelmq_extensions/tests/bots/collectors/disp/test_collector.py +137 -0
- intelmq_extensions/tests/bots/collectors/xmpp/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/xmpp/test_collector.py +10 -0
- intelmq_extensions/tests/bots/experts/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/certat_contact_intern/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/certat_contact_intern/test_expert.py +176 -0
- intelmq_extensions/tests/bots/experts/copy_extra/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/copy_extra/test_expert.py +42 -0
- intelmq_extensions/tests/bots/experts/event_group_splitter/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/event_group_splitter/test_expert.py +302 -0
- intelmq_extensions/tests/bots/experts/event_splitter/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/event_splitter/test_expert.py +101 -0
- intelmq_extensions/tests/bots/experts/squelcher/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/squelcher/test_expert.py +548 -0
- intelmq_extensions/tests/bots/experts/vulnerability_lookup/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/vulnerability_lookup/test_expert.py +203 -0
- intelmq_extensions/tests/bots/outputs/__init__.py +0 -0
- intelmq_extensions/tests/bots/outputs/mattermost/__init__.py +0 -0
- intelmq_extensions/tests/bots/outputs/mattermost/test_output.py +138 -0
- intelmq_extensions/tests/bots/outputs/xmpp/__init__.py +0 -0
- intelmq_extensions/tests/bots/outputs/xmpp/test_output.py +10 -0
- intelmq_extensions/tests/bots/parsers/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/blackkite/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/blackkite/data.py +69 -0
- intelmq_extensions/tests/bots/parsers/blackkite/test_parser.py +197 -0
- intelmq_extensions/tests/bots/parsers/disp/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/disp/test_parser.py +282 -0
- intelmq_extensions/tests/bots/parsers/malwaredomains/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/malwaredomains/test_parser.py +62 -0
- intelmq_extensions/tests/cli/__init__.py +0 -0
- intelmq_extensions/tests/cli/test_create_reports.py +97 -0
- intelmq_extensions/tests/cli/test_intelmqcli.py +158 -0
- intelmq_extensions/tests/lib/__init__.py +0 -0
- intelmq_extensions/tests/lib/base.py +81 -0
- intelmq_extensions/tests/lib/test_api_helpers.py +126 -0
- intelmq_extensions-1.8.1.dist-info/METADATA +60 -0
- intelmq_extensions-1.8.1.dist-info/RECORD +100 -0
- intelmq_extensions-1.8.1.dist-info/WHEEL +5 -0
- intelmq_extensions-1.8.1.dist-info/entry_points.txt +33 -0
- intelmq_extensions-1.8.1.dist-info/licenses/LICENSE +661 -0
- intelmq_extensions-1.8.1.dist-info/top_level.txt +1 -0
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"""Client to access the BlackKite library
|
|
2
|
+
|
|
3
|
+
SPDX-FileCopyrightText: 2023 CERT.at GmbH <https://cert.at/>
|
|
4
|
+
SPDX-License-Identifier: AGPL-3.0-or-later
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import math
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from typing import Iterator, Sequence
|
|
11
|
+
|
|
12
|
+
import requests
|
|
13
|
+
|
|
14
|
+
from intelmq_extensions.lib.api_helpers import (
|
|
15
|
+
DEFAULT_REFRESH_WINDOW,
|
|
16
|
+
OAuthAccessMixin,
|
|
17
|
+
RateLimiter,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
from ....lib.blackkite import Category
|
|
21
|
+
|
|
22
|
+
default_logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Status(str, Enum):
|
|
26
|
+
ACTIVE = "Active"
|
|
27
|
+
FALSE_POSITIVE = "FalsePositive"
|
|
28
|
+
SUPPRESSED = "Suppressed"
|
|
29
|
+
ACKNOWLEDGED = "Acknowledged"
|
|
30
|
+
DELETED = "Deleted"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class Severity(str, Enum):
|
|
34
|
+
INFO = "Info"
|
|
35
|
+
LOW = "Low"
|
|
36
|
+
MEDIUM = "Medium"
|
|
37
|
+
HIGH = "High"
|
|
38
|
+
CRITICAL = "Critical"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class Output(str, Enum):
|
|
42
|
+
INFO = "Info"
|
|
43
|
+
PASSED = "Passed"
|
|
44
|
+
WARNING = "Warning"
|
|
45
|
+
FAILED = "Failed"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
_DEFAULT_STATUSES = [Status.ACTIVE]
|
|
49
|
+
_DEFAULT_SEVERITY = [Severity.CRITICAL]
|
|
50
|
+
|
|
51
|
+
CATEGORIES_WITH_OUTPUT = [
|
|
52
|
+
Category.DNSHealth,
|
|
53
|
+
Category.ApplicationSecurity,
|
|
54
|
+
Category.EmailSecurity,
|
|
55
|
+
Category.NetworkSecurity,
|
|
56
|
+
Category.DDoSResiliency,
|
|
57
|
+
Category.SSLTLSstrength,
|
|
58
|
+
Category.InformationDisclosure,
|
|
59
|
+
]
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class BlackKiteClient(OAuthAccessMixin):
|
|
63
|
+
def __init__(
|
|
64
|
+
self,
|
|
65
|
+
url: str,
|
|
66
|
+
client_id: str,
|
|
67
|
+
client_secret: str,
|
|
68
|
+
refresh_before: str = DEFAULT_REFRESH_WINDOW,
|
|
69
|
+
session: requests.Session = None,
|
|
70
|
+
logger: logging.Logger = default_logger,
|
|
71
|
+
limit_requests: int = 60,
|
|
72
|
+
limit_period: int = 60,
|
|
73
|
+
page_size: int = 100,
|
|
74
|
+
) -> None:
|
|
75
|
+
self.url = url
|
|
76
|
+
self.logger = logger
|
|
77
|
+
self._session = session
|
|
78
|
+
self._page_size = page_size
|
|
79
|
+
|
|
80
|
+
self.limiter = RateLimiter(limit_requests, limit_period)
|
|
81
|
+
self.init_oauth(
|
|
82
|
+
oauth_url=f"{url}/oauth/token",
|
|
83
|
+
oauth_clientid=client_id,
|
|
84
|
+
oauth_clientsecret=client_secret,
|
|
85
|
+
session=session,
|
|
86
|
+
logger=logger,
|
|
87
|
+
refresh_before=refresh_before,
|
|
88
|
+
limiter=self.limiter,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def get(self, path: str, params: dict = None, raw: bool = False):
|
|
92
|
+
with self.limiter.call():
|
|
93
|
+
response = self._session.get(
|
|
94
|
+
f"{self.url}/{path}",
|
|
95
|
+
params=params,
|
|
96
|
+
headers={"Authorization": f"Bearer {self.access_token}"},
|
|
97
|
+
)
|
|
98
|
+
if not response.ok:
|
|
99
|
+
self.logger.error(
|
|
100
|
+
"Request %s failed with error %s, message: %s.",
|
|
101
|
+
path,
|
|
102
|
+
response.status_code,
|
|
103
|
+
response.text,
|
|
104
|
+
)
|
|
105
|
+
raise RuntimeError(f"Request to {path} failed with {response.status_code}")
|
|
106
|
+
return response if raw else response.json()
|
|
107
|
+
|
|
108
|
+
def get_paginated(self, path: str, params: dict = None) -> Iterator[dict]:
|
|
109
|
+
last = False
|
|
110
|
+
page = 1
|
|
111
|
+
params = params or {}
|
|
112
|
+
while not last:
|
|
113
|
+
params.update({"page_number": page, "page_size": self._page_size})
|
|
114
|
+
response = self.get(path, params, raw=True)
|
|
115
|
+
|
|
116
|
+
total_items = int(response.headers.get("X-Total-Items", "0"))
|
|
117
|
+
last = page >= math.ceil(total_items / self._page_size)
|
|
118
|
+
|
|
119
|
+
for element in response.json():
|
|
120
|
+
yield element
|
|
121
|
+
page += 1
|
|
122
|
+
|
|
123
|
+
def list_findings(
|
|
124
|
+
self,
|
|
125
|
+
path: str,
|
|
126
|
+
company_id: int,
|
|
127
|
+
severities: Sequence[Severity] = None,
|
|
128
|
+
statuses: Sequence[Status] = None,
|
|
129
|
+
outputs: Sequence[Output] = None,
|
|
130
|
+
) -> Iterator[dict]:
|
|
131
|
+
severities = severities or _DEFAULT_SEVERITY
|
|
132
|
+
statuses = statuses or _DEFAULT_STATUSES
|
|
133
|
+
|
|
134
|
+
params = {"status": ",".join(statuses), "severity": ",".join(severities)}
|
|
135
|
+
if outputs:
|
|
136
|
+
params["output"] = ",".join(outputs)
|
|
137
|
+
|
|
138
|
+
return self.get_paginated(f"companies/{company_id}/findings/{path}", params)
|
|
139
|
+
|
|
140
|
+
def status(self) -> dict:
|
|
141
|
+
return self.get("status")
|
|
142
|
+
|
|
143
|
+
def companies(self) -> Iterator[dict]:
|
|
144
|
+
return self.get_paginated("companies")
|
|
145
|
+
|
|
146
|
+
def get_findings_from_category(
|
|
147
|
+
self,
|
|
148
|
+
category: Category,
|
|
149
|
+
company_id: int,
|
|
150
|
+
severities: Sequence[Severity] = None,
|
|
151
|
+
statuses: Sequence[Status] = None,
|
|
152
|
+
outputs: Sequence[Output] = None,
|
|
153
|
+
):
|
|
154
|
+
if category not in CATEGORIES_WITH_OUTPUT:
|
|
155
|
+
outputs = None
|
|
156
|
+
return self.list_findings(
|
|
157
|
+
category.name.lower(), company_id, severities, statuses, outputs
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
def acknowledge_finding(self, company_id: int, finding_id: int):
|
|
161
|
+
with self.limiter.call():
|
|
162
|
+
result = self._session.patch(
|
|
163
|
+
f"{self.url}/companies/{company_id}/findings/{finding_id}",
|
|
164
|
+
headers={"Authorization": f"Bearer {self.access_token}"},
|
|
165
|
+
json={"Status": Status.ACKNOWLEDGED.value},
|
|
166
|
+
)
|
|
167
|
+
self.logger.debug("ACK finding %s, result: %s.", finding_id, result.text)
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""Collector of data from BlackKite API
|
|
2
|
+
|
|
3
|
+
SPDX-FileCopyrightText: 2023 CERT.at GmbH <https://cert.at/>
|
|
4
|
+
SPDX-License-Identifier: AGPL-3.0-or-later
|
|
5
|
+
|
|
6
|
+
Parameters:
|
|
7
|
+
|
|
8
|
+
url
|
|
9
|
+
client_id
|
|
10
|
+
client_secret
|
|
11
|
+
refresh_before
|
|
12
|
+
|
|
13
|
+
categories (dict):
|
|
14
|
+
{category-code, e.g. PATCH}:
|
|
15
|
+
severities: [{list of names}] (optional, override)
|
|
16
|
+
outputs: [{list of names}] (optional, override)
|
|
17
|
+
statuses: [{list of names}] (optional, override)
|
|
18
|
+
include: [{list of ids, eg. XXX-001}] (optional, mutual exclusive with exclude)
|
|
19
|
+
exclude: [{list of ids, eg. XXX-001}] (optional, mutual exclusive with include)
|
|
20
|
+
acknowledge: bool (optional, whether change finding's status to acknowledged or not
|
|
21
|
+
(default: false))
|
|
22
|
+
severities: []{list of names}
|
|
23
|
+
outputs: [{list of names}]
|
|
24
|
+
statuses: [{list of names}]
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
import json
|
|
28
|
+
|
|
29
|
+
from intelmq.lib.bot import CollectorBot
|
|
30
|
+
from intelmq.lib.utils import create_request_session
|
|
31
|
+
|
|
32
|
+
from intelmq_extensions.lib.blackkite import Category
|
|
33
|
+
|
|
34
|
+
from ._client import CATEGORIES_WITH_OUTPUT, BlackKiteClient, Output, Severity, Status
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class BlackKiteCollectorBot(CollectorBot):
|
|
38
|
+
url: str = ""
|
|
39
|
+
client_id: str = ""
|
|
40
|
+
client_secret: str = ""
|
|
41
|
+
# refresh access token when it's less than 10 minutes to expire
|
|
42
|
+
refresh_token_before: int = 10
|
|
43
|
+
# BlackKite has API rate limit 60 req./1 minute
|
|
44
|
+
limit_requests: int = 60
|
|
45
|
+
limit_period: int = 60 # 1 minute
|
|
46
|
+
page_size: int = 100
|
|
47
|
+
|
|
48
|
+
categories: dict = {}
|
|
49
|
+
severities: list = [Severity.CRITICAL.value]
|
|
50
|
+
outputs: list = [Output.FAILED.value]
|
|
51
|
+
statuses: list = [Status.ACTIVE.value]
|
|
52
|
+
|
|
53
|
+
def init(self):
|
|
54
|
+
self.set_request_parameters()
|
|
55
|
+
self.session = create_request_session(self)
|
|
56
|
+
self.client = BlackKiteClient(
|
|
57
|
+
url=self.url,
|
|
58
|
+
client_id=self.client_id,
|
|
59
|
+
client_secret=self.client_secret,
|
|
60
|
+
refresh_before=self.refresh_token_before,
|
|
61
|
+
session=self.session,
|
|
62
|
+
logger=self.logger,
|
|
63
|
+
limit_requests=self.limit_requests,
|
|
64
|
+
limit_period=self.limit_period,
|
|
65
|
+
page_size=self.page_size,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
self._process_settings()
|
|
69
|
+
|
|
70
|
+
def _process_settings(self):
|
|
71
|
+
self._default_config = {
|
|
72
|
+
"severities": [Severity(value) for value in self.severities],
|
|
73
|
+
"outputs": [Output(value) for value in self.outputs],
|
|
74
|
+
"statuses": [Status(value) for value in self.statuses],
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
self._categories = dict(
|
|
78
|
+
self._process_category_config(k, v) for k, v in self.categories.items()
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
def _process_category_config(key: str, data: dict):
|
|
83
|
+
data = data or {}
|
|
84
|
+
category = Category(key)
|
|
85
|
+
if "include" in data and "exclude" in data:
|
|
86
|
+
raise ValueError("Including and excluding at the same time isn't possible")
|
|
87
|
+
processed_data = {}
|
|
88
|
+
if severities := data.get("severities"):
|
|
89
|
+
processed_data["severities"] = [Severity(v) for v in severities]
|
|
90
|
+
if statuses := data.get("statuses"):
|
|
91
|
+
processed_data["statuses"] = [Status(v) for v in statuses]
|
|
92
|
+
if outputs := data.get("outputs"):
|
|
93
|
+
if category not in CATEGORIES_WITH_OUTPUT:
|
|
94
|
+
raise ValueError(f"{category.value} does not support output filtering.")
|
|
95
|
+
processed_data["outputs"] = [Output(v) for v in outputs]
|
|
96
|
+
if include := data.get("include"):
|
|
97
|
+
if any(filter(lambda v: not v.startswith(f"{category.value}-"), include)):
|
|
98
|
+
raise ValueError(f"Category {category.value} includes incorrect IDs")
|
|
99
|
+
processed_data["include"] = include
|
|
100
|
+
if exclude := data.get("exclude"):
|
|
101
|
+
if any(filter(lambda v: not v.startswith(f"{category.value}-"), exclude)):
|
|
102
|
+
raise ValueError(f"Category {category.value} excludes incorrect IDs")
|
|
103
|
+
processed_data["exclude"] = exclude
|
|
104
|
+
processed_data["acknowledge"] = data.get("acknowledge", False)
|
|
105
|
+
|
|
106
|
+
return category, processed_data
|
|
107
|
+
|
|
108
|
+
def process(self):
|
|
109
|
+
for company in self.client.companies():
|
|
110
|
+
for category, config in self._categories.items():
|
|
111
|
+
try:
|
|
112
|
+
self._process_category(
|
|
113
|
+
company, category, {**self._default_config, **config}
|
|
114
|
+
)
|
|
115
|
+
except Exception as exc:
|
|
116
|
+
self.logger.error(
|
|
117
|
+
"Error when processing a category: %s.", exc, exc_info=True
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
def _process_category(self, company: dict, category: Category, config: dict):
|
|
121
|
+
def _should_send(finding):
|
|
122
|
+
if include := config.get("include"):
|
|
123
|
+
return finding.get("ControlId") in include
|
|
124
|
+
elif exclude := config.get("exclude"):
|
|
125
|
+
return finding.get("ControlId") not in exclude
|
|
126
|
+
|
|
127
|
+
return True
|
|
128
|
+
|
|
129
|
+
for finding in self.client.get_findings_from_category(
|
|
130
|
+
category,
|
|
131
|
+
company["CompanyId"],
|
|
132
|
+
statuses=config.get("statuses"),
|
|
133
|
+
severities=config.get("severities"),
|
|
134
|
+
outputs=config.get("outputs"),
|
|
135
|
+
):
|
|
136
|
+
if not _should_send(finding):
|
|
137
|
+
continue
|
|
138
|
+
report = self.new_report()
|
|
139
|
+
report.add("raw", json.dumps({"company": company, "finding": finding}))
|
|
140
|
+
self.send_message(report)
|
|
141
|
+
if config.get("acknowledge"):
|
|
142
|
+
self.client.acknowledge_finding(
|
|
143
|
+
company.get("CompanyId"), finding.get("FindingId")
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
@staticmethod
|
|
147
|
+
def check(parameters: dict) -> list[list[str]] or None:
|
|
148
|
+
errors = []
|
|
149
|
+
definitions = [
|
|
150
|
+
("severities", Severity),
|
|
151
|
+
("outputs", Output),
|
|
152
|
+
("statuses", Status),
|
|
153
|
+
]
|
|
154
|
+
for key, type_ in definitions:
|
|
155
|
+
try:
|
|
156
|
+
[type_(value) for value in parameters.get(key, [])]
|
|
157
|
+
except ValueError as exc:
|
|
158
|
+
errors.append(["error", f"Error processing '{key}': {exc}."])
|
|
159
|
+
|
|
160
|
+
allowed_keys = {
|
|
161
|
+
"severities",
|
|
162
|
+
"outputs",
|
|
163
|
+
"statuses",
|
|
164
|
+
"include",
|
|
165
|
+
"exclude",
|
|
166
|
+
"acknowledge",
|
|
167
|
+
}
|
|
168
|
+
for category, value in parameters.get("categories", {}).items():
|
|
169
|
+
value = value or {}
|
|
170
|
+
try:
|
|
171
|
+
if set(value.keys()) - allowed_keys:
|
|
172
|
+
raise ValueError("Unsupported config key")
|
|
173
|
+
BlackKiteCollectorBot._process_category_config(category, value)
|
|
174
|
+
except (ValueError, TypeError) as exc:
|
|
175
|
+
errors.append(
|
|
176
|
+
["error", f"Error processing category {category}: {exc}."]
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
return errors or None
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
BOT = BlackKiteCollectorBot
|
|
File without changes
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""Client to access the DISP API
|
|
2
|
+
|
|
3
|
+
SPDX-FileCopyrightText: 2023 CERT.at GmbH <https://cert.at/>
|
|
4
|
+
SPDX-License-Identifier: AGPL-3.0-or-later
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from urllib.parse import quote, urlencode
|
|
10
|
+
|
|
11
|
+
import requests
|
|
12
|
+
|
|
13
|
+
from intelmq_extensions.lib.api_helpers import DEFAULT_REFRESH_WINDOW, OAuthAccessMixin
|
|
14
|
+
|
|
15
|
+
_SCOPE = "https://gateway.disp.deloitte.com/.default"
|
|
16
|
+
_GRANT_TYPE = "client_credentials"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
default_logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DISPClient(OAuthAccessMixin):
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
api_url: str,
|
|
26
|
+
auth_token: str,
|
|
27
|
+
oauth_clientid: str,
|
|
28
|
+
oauth_clientsecret: str,
|
|
29
|
+
oauth_url: str,
|
|
30
|
+
session: requests.Session,
|
|
31
|
+
refresh_before: int = DEFAULT_REFRESH_WINDOW,
|
|
32
|
+
logger: logging.Logger = default_logger,
|
|
33
|
+
) -> None:
|
|
34
|
+
self.api_url = api_url
|
|
35
|
+
self.auth_token = auth_token
|
|
36
|
+
self._access_token = None
|
|
37
|
+
self._session = session
|
|
38
|
+
self._page_size = 10
|
|
39
|
+
self.logger = logger
|
|
40
|
+
|
|
41
|
+
self.init_oauth(
|
|
42
|
+
oauth_clientid=oauth_clientid,
|
|
43
|
+
oauth_clientsecret=oauth_clientsecret,
|
|
44
|
+
oauth_url=oauth_url,
|
|
45
|
+
oauth_scope=_SCOPE,
|
|
46
|
+
oauth_grant_type=_GRANT_TYPE,
|
|
47
|
+
logger=self.logger,
|
|
48
|
+
refresh_before=refresh_before,
|
|
49
|
+
session=self._session,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
def _auth(self):
|
|
53
|
+
return {
|
|
54
|
+
"Authorization": f"Bearer {self.auth_token}",
|
|
55
|
+
"OAuth": self.access_token,
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
def get(self, path: str, params: dict = None):
|
|
59
|
+
response = self._session.get(
|
|
60
|
+
f"{self.api_url}/{path}",
|
|
61
|
+
params=params,
|
|
62
|
+
headers=self._auth(),
|
|
63
|
+
)
|
|
64
|
+
if not response.ok:
|
|
65
|
+
self.logger.error(
|
|
66
|
+
"Request %s failed with error %s, message: %s",
|
|
67
|
+
path,
|
|
68
|
+
response.status_code,
|
|
69
|
+
response.text,
|
|
70
|
+
)
|
|
71
|
+
raise RuntimeError(f"Request to {path} failed with {response.status_code}")
|
|
72
|
+
return response.json()
|
|
73
|
+
|
|
74
|
+
def post(self, path: str, params: dict = None) -> requests.Response:
|
|
75
|
+
response = self._session.post(
|
|
76
|
+
f"{self.api_url}/{path}", params=params, headers=self._auth()
|
|
77
|
+
)
|
|
78
|
+
if not response.ok:
|
|
79
|
+
self.logger.error(
|
|
80
|
+
"Request %s failed with error %s, message: %s",
|
|
81
|
+
path,
|
|
82
|
+
response.status_code,
|
|
83
|
+
response.text,
|
|
84
|
+
)
|
|
85
|
+
raise RuntimeError(f"Request to {path} failed with {response.status_code}")
|
|
86
|
+
return response
|
|
87
|
+
|
|
88
|
+
def get_paginated(self, path: str, params: dict = None):
|
|
89
|
+
last = False
|
|
90
|
+
page = 0
|
|
91
|
+
params = params or {}
|
|
92
|
+
while not last:
|
|
93
|
+
# TODO: Use 'nextLink'
|
|
94
|
+
params.update({"page": page, "size": self._page_size})
|
|
95
|
+
response = self.get(path, params)
|
|
96
|
+
last = response.get("last", True)
|
|
97
|
+
for element in response.get("content", []):
|
|
98
|
+
yield element
|
|
99
|
+
page += 1
|
|
100
|
+
|
|
101
|
+
def incidents(
|
|
102
|
+
self, after: datetime = None, only_unread: bool = False, query: str = None
|
|
103
|
+
):
|
|
104
|
+
if not query:
|
|
105
|
+
conditions = []
|
|
106
|
+
if after:
|
|
107
|
+
long_timestamp = int(after.timestamp() * 1000)
|
|
108
|
+
conditions.append(f"validationDate > {long_timestamp}")
|
|
109
|
+
if only_unread:
|
|
110
|
+
conditions.append("UNREAD")
|
|
111
|
+
query = " AND ".join(conditions)
|
|
112
|
+
# DISP rejects default encoding with + as space
|
|
113
|
+
query = urlencode({"query": query}, quote_via=quote)
|
|
114
|
+
|
|
115
|
+
return self.get_paginated(f"incident/?{query}")
|
|
116
|
+
|
|
117
|
+
def download_evidence_json(self, incident_id: str, file_id: str):
|
|
118
|
+
return self.get(f"incident/{incident_id}/file/{file_id}")
|
|
119
|
+
|
|
120
|
+
def mark_incident_read(self, incident_id: str):
|
|
121
|
+
self.post("incident/read", params={"id": incident_id, "read": True})
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""Collector for Deloitte Intelligence Service Portal
|
|
2
|
+
|
|
3
|
+
SPDX-FileCopyrightText: 2023 CERT.at GmbH <https://cert.at/>
|
|
4
|
+
SPDX-License-Identifier: AGPL-3.0-or-later
|
|
5
|
+
|
|
6
|
+
Connects to the DISP portal and collects selected type of incidents
|
|
7
|
+
for every monitored company. Currently, we are prepared for the credential tracing
|
|
8
|
+
only.
|
|
9
|
+
|
|
10
|
+
Parameters:
|
|
11
|
+
api_url
|
|
12
|
+
auth_token
|
|
13
|
+
oauth_clientid
|
|
14
|
+
oauth_clientsecret
|
|
15
|
+
ouath_url
|
|
16
|
+
|
|
17
|
+
mark_as_read
|
|
18
|
+
wait_for_evidences
|
|
19
|
+
mask_password # hardcoded to true
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
import json
|
|
23
|
+
from datetime import datetime, timedelta
|
|
24
|
+
|
|
25
|
+
from intelmq.lib.bot import CollectorBot
|
|
26
|
+
from intelmq.lib.utils import create_request_session, parse_relative
|
|
27
|
+
|
|
28
|
+
from ._client import DISPClient
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class DISPCollectorBot(CollectorBot):
|
|
32
|
+
api_url: str = ""
|
|
33
|
+
auth_token: str = ""
|
|
34
|
+
oauth_clientid: str = ""
|
|
35
|
+
oauth_clientsecret: str = ""
|
|
36
|
+
ouath_url: str = ""
|
|
37
|
+
# refresh access token when it's less than 10 minutes to expire
|
|
38
|
+
refresh_token_before: int = 10
|
|
39
|
+
|
|
40
|
+
mark_as_read: bool = False
|
|
41
|
+
wait_for_evidences: bool = True
|
|
42
|
+
not_older_than: str = "7 days"
|
|
43
|
+
# mask_password: bool = True # hardcoded to true
|
|
44
|
+
|
|
45
|
+
def init(self):
|
|
46
|
+
self.set_request_parameters()
|
|
47
|
+
self.session = create_request_session(self)
|
|
48
|
+
self.client = DISPClient(
|
|
49
|
+
api_url=self.api_url,
|
|
50
|
+
auth_token=self.auth_token,
|
|
51
|
+
oauth_clientid=self.oauth_clientid,
|
|
52
|
+
oauth_clientsecret=self.oauth_clientsecret,
|
|
53
|
+
oauth_url=self.ouath_url,
|
|
54
|
+
session=self.session,
|
|
55
|
+
refresh_before=self.refresh_token_before,
|
|
56
|
+
logger=self.logger,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
def _mask_passwords(data):
|
|
61
|
+
"""Ensure passwords will never be processed nor stored in our systems"""
|
|
62
|
+
for credentials in data.get("credentials", []):
|
|
63
|
+
password = credentials.get("password")
|
|
64
|
+
if not password:
|
|
65
|
+
continue
|
|
66
|
+
if len(password) <= 3:
|
|
67
|
+
mask = "*" * len(password)
|
|
68
|
+
else:
|
|
69
|
+
mask = f"{password[:3]}{'*' * (len(password) - 3)}"
|
|
70
|
+
credentials["password"] = mask
|
|
71
|
+
|
|
72
|
+
def process(self):
|
|
73
|
+
after = datetime.utcnow() - timedelta(
|
|
74
|
+
minutes=parse_relative(self.not_older_than)
|
|
75
|
+
)
|
|
76
|
+
for incident in self.client.incidents(after=after, only_unread=True):
|
|
77
|
+
evidences = incident.get("evidences", [])
|
|
78
|
+
expected_file = f'{incident["id"]}.json.txt'
|
|
79
|
+
evidence_file = next(
|
|
80
|
+
filter(lambda f: f.get("name") == expected_file, evidences), None
|
|
81
|
+
)
|
|
82
|
+
if self.wait_for_evidences and not evidence_file:
|
|
83
|
+
self.logger.debug(
|
|
84
|
+
"Incident %s doesn't have an evidence file yet.", incident.get("id")
|
|
85
|
+
)
|
|
86
|
+
continue
|
|
87
|
+
|
|
88
|
+
evidence_data = None
|
|
89
|
+
if evidence_file:
|
|
90
|
+
evidence_data = self.client.download_evidence_json(
|
|
91
|
+
incident.get("id"), evidence_file.get("idStoredFile")
|
|
92
|
+
)
|
|
93
|
+
self._mask_passwords(evidence_data)
|
|
94
|
+
|
|
95
|
+
report = self.new_report()
|
|
96
|
+
report.add(
|
|
97
|
+
"raw", json.dumps({"incident": incident, "evidences": evidence_data})
|
|
98
|
+
)
|
|
99
|
+
self.send_message(report)
|
|
100
|
+
if self.mark_as_read:
|
|
101
|
+
self.client.mark_incident_read(incident.get("id"))
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
BOT = DISPCollectorBot
|
|
File without changes
|