intelmq-extensions 1.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- intelmq_extensions/__init__.py +0 -0
- intelmq_extensions/bots/__init__.py +0 -0
- intelmq_extensions/bots/collectors/blackkite/__init__.py +0 -0
- intelmq_extensions/bots/collectors/blackkite/_client.py +167 -0
- intelmq_extensions/bots/collectors/blackkite/collector.py +182 -0
- intelmq_extensions/bots/collectors/disp/__init__.py +0 -0
- intelmq_extensions/bots/collectors/disp/_client.py +121 -0
- intelmq_extensions/bots/collectors/disp/collector.py +104 -0
- intelmq_extensions/bots/collectors/xmpp/__init__.py +0 -0
- intelmq_extensions/bots/collectors/xmpp/collector.py +210 -0
- intelmq_extensions/bots/experts/__init__.py +0 -0
- intelmq_extensions/bots/experts/certat_contact_intern/__init__.py +0 -0
- intelmq_extensions/bots/experts/certat_contact_intern/expert.py +139 -0
- intelmq_extensions/bots/experts/copy_extra/__init__.py +0 -0
- intelmq_extensions/bots/experts/copy_extra/expert.py +27 -0
- intelmq_extensions/bots/experts/event_group_splitter/__init__.py +0 -0
- intelmq_extensions/bots/experts/event_group_splitter/expert.py +117 -0
- intelmq_extensions/bots/experts/event_splitter/__init__.py +0 -0
- intelmq_extensions/bots/experts/event_splitter/expert.py +41 -0
- intelmq_extensions/bots/experts/squelcher/__init__.py +0 -0
- intelmq_extensions/bots/experts/squelcher/expert.py +316 -0
- intelmq_extensions/bots/experts/vulnerability_lookup/__init__.py +0 -0
- intelmq_extensions/bots/experts/vulnerability_lookup/expert.py +136 -0
- intelmq_extensions/bots/outputs/__init__.py +0 -0
- intelmq_extensions/bots/outputs/mattermost/__init__.py +0 -0
- intelmq_extensions/bots/outputs/mattermost/output.py +113 -0
- intelmq_extensions/bots/outputs/to_logs/__init__.py +0 -0
- intelmq_extensions/bots/outputs/to_logs/output.py +12 -0
- intelmq_extensions/bots/outputs/xmpp/__init__.py +0 -0
- intelmq_extensions/bots/outputs/xmpp/output.py +180 -0
- intelmq_extensions/bots/parsers/__init__.py +0 -0
- intelmq_extensions/bots/parsers/blackkite/__init__.py +0 -0
- intelmq_extensions/bots/parsers/blackkite/_transformers.py +202 -0
- intelmq_extensions/bots/parsers/blackkite/parser.py +65 -0
- intelmq_extensions/bots/parsers/disp/__init__.py +0 -0
- intelmq_extensions/bots/parsers/disp/parser.py +125 -0
- intelmq_extensions/bots/parsers/malwaredomains/__init__.py +0 -0
- intelmq_extensions/bots/parsers/malwaredomains/parser.py +63 -0
- intelmq_extensions/cli/__init__.py +0 -0
- intelmq_extensions/cli/create_reports.py +161 -0
- intelmq_extensions/cli/intelmqcli.py +657 -0
- intelmq_extensions/cli/lib.py +670 -0
- intelmq_extensions/cli/utils.py +12 -0
- intelmq_extensions/etc/harmonization.conf +434 -0
- intelmq_extensions/etc/squelcher.conf +52 -0
- intelmq_extensions/lib/__init__.py +0 -0
- intelmq_extensions/lib/api_helpers.py +105 -0
- intelmq_extensions/lib/blackkite.py +29 -0
- intelmq_extensions/tests/__init__.py +0 -0
- intelmq_extensions/tests/base.py +336 -0
- intelmq_extensions/tests/bots/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/blackkite/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/blackkite/base.py +45 -0
- intelmq_extensions/tests/bots/collectors/blackkite/test_client.py +154 -0
- intelmq_extensions/tests/bots/collectors/blackkite/test_collector.py +287 -0
- intelmq_extensions/tests/bots/collectors/disp/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/disp/base.py +147 -0
- intelmq_extensions/tests/bots/collectors/disp/test_client.py +134 -0
- intelmq_extensions/tests/bots/collectors/disp/test_collector.py +137 -0
- intelmq_extensions/tests/bots/collectors/xmpp/__init__.py +0 -0
- intelmq_extensions/tests/bots/collectors/xmpp/test_collector.py +10 -0
- intelmq_extensions/tests/bots/experts/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/certat_contact_intern/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/certat_contact_intern/test_expert.py +176 -0
- intelmq_extensions/tests/bots/experts/copy_extra/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/copy_extra/test_expert.py +42 -0
- intelmq_extensions/tests/bots/experts/event_group_splitter/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/event_group_splitter/test_expert.py +302 -0
- intelmq_extensions/tests/bots/experts/event_splitter/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/event_splitter/test_expert.py +101 -0
- intelmq_extensions/tests/bots/experts/squelcher/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/squelcher/test_expert.py +548 -0
- intelmq_extensions/tests/bots/experts/vulnerability_lookup/__init__.py +0 -0
- intelmq_extensions/tests/bots/experts/vulnerability_lookup/test_expert.py +203 -0
- intelmq_extensions/tests/bots/outputs/__init__.py +0 -0
- intelmq_extensions/tests/bots/outputs/mattermost/__init__.py +0 -0
- intelmq_extensions/tests/bots/outputs/mattermost/test_output.py +138 -0
- intelmq_extensions/tests/bots/outputs/xmpp/__init__.py +0 -0
- intelmq_extensions/tests/bots/outputs/xmpp/test_output.py +10 -0
- intelmq_extensions/tests/bots/parsers/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/blackkite/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/blackkite/data.py +69 -0
- intelmq_extensions/tests/bots/parsers/blackkite/test_parser.py +197 -0
- intelmq_extensions/tests/bots/parsers/disp/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/disp/test_parser.py +282 -0
- intelmq_extensions/tests/bots/parsers/malwaredomains/__init__.py +0 -0
- intelmq_extensions/tests/bots/parsers/malwaredomains/test_parser.py +62 -0
- intelmq_extensions/tests/cli/__init__.py +0 -0
- intelmq_extensions/tests/cli/test_create_reports.py +97 -0
- intelmq_extensions/tests/cli/test_intelmqcli.py +158 -0
- intelmq_extensions/tests/lib/__init__.py +0 -0
- intelmq_extensions/tests/lib/base.py +81 -0
- intelmq_extensions/tests/lib/test_api_helpers.py +126 -0
- intelmq_extensions-1.8.1.dist-info/METADATA +60 -0
- intelmq_extensions-1.8.1.dist-info/RECORD +100 -0
- intelmq_extensions-1.8.1.dist-info/WHEEL +5 -0
- intelmq_extensions-1.8.1.dist-info/entry_points.txt +33 -0
- intelmq_extensions-1.8.1.dist-info/licenses/LICENSE +661 -0
- intelmq_extensions-1.8.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,657 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
""" """
|
|
4
|
+
|
|
5
|
+
import csv
|
|
6
|
+
import datetime
|
|
7
|
+
import io
|
|
8
|
+
import locale
|
|
9
|
+
import os
|
|
10
|
+
import readline # noqa: F401 hooks into input()
|
|
11
|
+
import subprocess
|
|
12
|
+
import sys
|
|
13
|
+
import tempfile
|
|
14
|
+
import zipfile
|
|
15
|
+
from functools import lru_cache
|
|
16
|
+
from time import sleep
|
|
17
|
+
|
|
18
|
+
import tabulate
|
|
19
|
+
from termstyle import bold, inverted, reset
|
|
20
|
+
|
|
21
|
+
from intelmq_extensions.cli import lib
|
|
22
|
+
|
|
23
|
+
myinverted = str(reset) + str(inverted)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class IntelMQCLIContoller(lib.IntelMQCLIContollerTemplate):
|
|
27
|
+
appname = "intelmqcli"
|
|
28
|
+
usage = lib.USAGE
|
|
29
|
+
epilog = lib.EPILOG
|
|
30
|
+
table_mode = False # for sticky table mode
|
|
31
|
+
dryrun = False
|
|
32
|
+
verbose = False
|
|
33
|
+
batch = False
|
|
34
|
+
compress_csv = False
|
|
35
|
+
boilerplate = None
|
|
36
|
+
zipme = False
|
|
37
|
+
subject = None
|
|
38
|
+
|
|
39
|
+
def __init__(self, overridden_config: dict = None):
|
|
40
|
+
super().__init__(overridden_config)
|
|
41
|
+
self._collected_descriptions = set()
|
|
42
|
+
|
|
43
|
+
def run(self, args: list):
|
|
44
|
+
self.parser.add_argument(
|
|
45
|
+
"-l", "--list-feeds", action="store_true", help="List all feeds"
|
|
46
|
+
)
|
|
47
|
+
self.parser.add_argument(
|
|
48
|
+
"-i", "--list-identifiers", action="store_true", help="List all identifiers"
|
|
49
|
+
)
|
|
50
|
+
self.parser.add_argument(
|
|
51
|
+
"-L", "--list-texts", action="store_true", help="List all existing texts."
|
|
52
|
+
)
|
|
53
|
+
self.parser.add_argument(
|
|
54
|
+
"-t", "--text", nargs=1, help="Specify the text to be used."
|
|
55
|
+
)
|
|
56
|
+
self.parser.add_argument(
|
|
57
|
+
"-s",
|
|
58
|
+
"--subject",
|
|
59
|
+
nargs=1,
|
|
60
|
+
help="Specify the subject to be used instead of the per-taxonomy.",
|
|
61
|
+
)
|
|
62
|
+
self.parser.add_argument(
|
|
63
|
+
"-T", "--list-taxonomies", action="store_true", help="List all taxonomies"
|
|
64
|
+
)
|
|
65
|
+
self.parser.add_argument(
|
|
66
|
+
"-y", "--list-types", action="store_true", help="List all types"
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
self.parser.add_argument(
|
|
70
|
+
"-c",
|
|
71
|
+
"--compress-csv",
|
|
72
|
+
action="store_true",
|
|
73
|
+
help="Automatically compress/shrink the attached CSV report if"
|
|
74
|
+
" fields are empty (default = False).",
|
|
75
|
+
)
|
|
76
|
+
self.parser.add_argument(
|
|
77
|
+
"-z",
|
|
78
|
+
"--zip",
|
|
79
|
+
action="store_true",
|
|
80
|
+
help="Zip every events.csv attachement to an "
|
|
81
|
+
"investigation for RT (defaults to false)",
|
|
82
|
+
)
|
|
83
|
+
self.setup(args)
|
|
84
|
+
|
|
85
|
+
if self.args.compress_csv:
|
|
86
|
+
self.compress_csv = True
|
|
87
|
+
if self.args.text:
|
|
88
|
+
self.boilerplate = self.args.text[0]
|
|
89
|
+
if self.args.zip:
|
|
90
|
+
self.zipme = True
|
|
91
|
+
if self.args.subject:
|
|
92
|
+
self.subject = self.args.subject[0]
|
|
93
|
+
|
|
94
|
+
self.connect_database()
|
|
95
|
+
|
|
96
|
+
if self.args.list_feeds:
|
|
97
|
+
self.execute(lib.QUERY_FEED_NAMES, extend=False)
|
|
98
|
+
for row in self.cur.fetchall():
|
|
99
|
+
if row["feed.code"]:
|
|
100
|
+
print(row["feed.code"])
|
|
101
|
+
return 0
|
|
102
|
+
|
|
103
|
+
if self.args.list_texts:
|
|
104
|
+
self.execute(lib.QUERY_TEXT_NAMES, extend=False)
|
|
105
|
+
for row in self.cur.fetchall():
|
|
106
|
+
if row["key"]:
|
|
107
|
+
print(row["key"])
|
|
108
|
+
return 0
|
|
109
|
+
|
|
110
|
+
if self.args.list_identifiers:
|
|
111
|
+
self.execute(lib.QUERY_IDENTIFIER_NAMES, extend=False)
|
|
112
|
+
for row in self.cur.fetchall():
|
|
113
|
+
if row["classification.identifier"]:
|
|
114
|
+
print(row["classification.identifier"])
|
|
115
|
+
return 0
|
|
116
|
+
|
|
117
|
+
if self.args.list_taxonomies:
|
|
118
|
+
self.execute(lib.QUERY_TAXONOMY_NAMES, extend=False)
|
|
119
|
+
for row in self.cur.fetchall():
|
|
120
|
+
if row["classification.taxonomy"]:
|
|
121
|
+
print(row["classification.taxonomy"])
|
|
122
|
+
return 0
|
|
123
|
+
|
|
124
|
+
if self.args.list_types:
|
|
125
|
+
self.execute(lib.QUERY_TYPE_NAMES, extend=False)
|
|
126
|
+
for row in self.cur.fetchall():
|
|
127
|
+
if row["classification.type"]:
|
|
128
|
+
print(row["classification.type"])
|
|
129
|
+
return 0
|
|
130
|
+
|
|
131
|
+
if locale.getpreferredencoding() != "UTF-8":
|
|
132
|
+
self.logger.error(
|
|
133
|
+
"The preferred encoding of your locale setting is not UTF-8 "
|
|
134
|
+
"but %s. Exiting.",
|
|
135
|
+
locale.getpreferredencoding(),
|
|
136
|
+
)
|
|
137
|
+
return 1
|
|
138
|
+
|
|
139
|
+
if not self.rt.login():
|
|
140
|
+
self.logger.error(
|
|
141
|
+
"Could not login as %r on %r.",
|
|
142
|
+
self.config["rt"]["user"],
|
|
143
|
+
self.config["rt"]["uri"],
|
|
144
|
+
)
|
|
145
|
+
return 2
|
|
146
|
+
else:
|
|
147
|
+
self.logger.info(
|
|
148
|
+
"Logged in as %r on %r.",
|
|
149
|
+
self.config["rt"]["user"],
|
|
150
|
+
self.config["rt"]["uri"],
|
|
151
|
+
)
|
|
152
|
+
try:
|
|
153
|
+
self.execute(lib.QUERY_OPEN_TAXONOMIES)
|
|
154
|
+
taxonomies = [x["classification.taxonomy"] for x in self.cur.fetchall()]
|
|
155
|
+
self.logger.info("All taxonomies: " + ", ".join(taxonomies))
|
|
156
|
+
for taxonomy in taxonomies:
|
|
157
|
+
self.logger.info("Handling taxonomy %r.", taxonomy)
|
|
158
|
+
if (
|
|
159
|
+
taxonomy not in lib.SUBJECT or lib.SUBJECT[taxonomy] is None
|
|
160
|
+
) and not self.subject:
|
|
161
|
+
self.logger.error("No subject defined for %r." % taxonomy)
|
|
162
|
+
continue
|
|
163
|
+
self.execute(lib.QUERY_OPEN_EVENT_REPORTS_BY_TAXONOMY, (taxonomy,))
|
|
164
|
+
report_ids = [x["rtir_report_id"] for x in self.cur.fetchall()]
|
|
165
|
+
self.execute(lib.QUERY_OPEN_EVENT_IDS_BY_TAXONOMY, (taxonomy,))
|
|
166
|
+
event_ids = [x["id"] for x in self.cur.fetchall()]
|
|
167
|
+
if self.subject:
|
|
168
|
+
subject = self.subject
|
|
169
|
+
else:
|
|
170
|
+
subject = "%s %s incidents on %s" "" % (
|
|
171
|
+
len(event_ids),
|
|
172
|
+
lib.SUBJECT[taxonomy],
|
|
173
|
+
datetime.datetime.now().strftime("%Y-%m-%d"),
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
if self.dryrun:
|
|
177
|
+
self.logger.info("Simulate creation of incident.")
|
|
178
|
+
incident_id = -1
|
|
179
|
+
else:
|
|
180
|
+
incident_id = self.rt.create_ticket(
|
|
181
|
+
Queue="Incidents",
|
|
182
|
+
Subject=subject,
|
|
183
|
+
Owner=self.config["rt"]["user"],
|
|
184
|
+
)
|
|
185
|
+
if incident_id == -1:
|
|
186
|
+
self.logger.error("Could not create Incident %r.", subject)
|
|
187
|
+
continue
|
|
188
|
+
|
|
189
|
+
self.logger.info("Created Incident %s." % incident_id)
|
|
190
|
+
# XXX TODO: distinguish between national and other constituencies
|
|
191
|
+
self.rt.edit_ticket(
|
|
192
|
+
incident_id,
|
|
193
|
+
CF_Classification=taxonomy,
|
|
194
|
+
# CF_Constituency='NATIONAL',
|
|
195
|
+
CF_Function="IncidentCoord",
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
for report_id in report_ids:
|
|
199
|
+
if not self.dryrun and not self.rt.edit_link(
|
|
200
|
+
report_id, "MemberOf", incident_id
|
|
201
|
+
):
|
|
202
|
+
self.logger.error(
|
|
203
|
+
"Could not link Incident to Incident Report: (%d -> %d).",
|
|
204
|
+
incident_id,
|
|
205
|
+
report_id,
|
|
206
|
+
)
|
|
207
|
+
continue
|
|
208
|
+
elif self.dryrun:
|
|
209
|
+
self.logger.info(
|
|
210
|
+
"Would have linked Incident Report %d to Incident.",
|
|
211
|
+
report_id,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
self.executemany(
|
|
215
|
+
"UPDATE {events} SET rtir_incident_id = %s WHERE id = %s",
|
|
216
|
+
[(incident_id, event_id) for event_id in event_ids],
|
|
217
|
+
extend=False,
|
|
218
|
+
)
|
|
219
|
+
self.con.commit()
|
|
220
|
+
self.logger.info("Linked events to incident.")
|
|
221
|
+
|
|
222
|
+
if not self.dryrun:
|
|
223
|
+
self.execute(
|
|
224
|
+
lib.QUERY_DISTINCT_CONTACTS_BY_INCIDENT, (incident_id,)
|
|
225
|
+
)
|
|
226
|
+
else:
|
|
227
|
+
self.execute(
|
|
228
|
+
lib.DRY_QUERY_DISTINCT_CONTACTS_BY_TAXONOMY, (taxonomy,)
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
contacts = [x["source.abuse_contact"] for x in self.cur.fetchall()]
|
|
232
|
+
inv_results = []
|
|
233
|
+
|
|
234
|
+
for contact in contacts:
|
|
235
|
+
self.logger.info("Handling contact " + contact)
|
|
236
|
+
if not self.dryrun:
|
|
237
|
+
self.execute(
|
|
238
|
+
lib.QUERY_EVENTS_BY_ASCONTACT_INCIDENT,
|
|
239
|
+
(
|
|
240
|
+
incident_id,
|
|
241
|
+
contact,
|
|
242
|
+
),
|
|
243
|
+
)
|
|
244
|
+
else:
|
|
245
|
+
self.execute(
|
|
246
|
+
lib.DRY_QUERY_EVENTS_BY_ASCONTACT_TAXONOMY,
|
|
247
|
+
(
|
|
248
|
+
taxonomy,
|
|
249
|
+
contact,
|
|
250
|
+
),
|
|
251
|
+
)
|
|
252
|
+
data = self.cur.fetchall()
|
|
253
|
+
results = self.send(taxonomy, contact, data, incident_id)
|
|
254
|
+
if results:
|
|
255
|
+
inv_results.append(results)
|
|
256
|
+
else:
|
|
257
|
+
return 1
|
|
258
|
+
|
|
259
|
+
if all(inv_results):
|
|
260
|
+
try:
|
|
261
|
+
if not self.dryrun and not self.rt.edit_ticket(
|
|
262
|
+
incident_id, Status="resolved"
|
|
263
|
+
):
|
|
264
|
+
self.logger.error(
|
|
265
|
+
"Could not close incident %d.", incident_id
|
|
266
|
+
)
|
|
267
|
+
except IndexError:
|
|
268
|
+
# Bug in RT/python-rt
|
|
269
|
+
pass
|
|
270
|
+
else:
|
|
271
|
+
self.logger.warn(
|
|
272
|
+
"Not all investigations completed -> Can't resolve "
|
|
273
|
+
"incident %d.",
|
|
274
|
+
incident_id,
|
|
275
|
+
)
|
|
276
|
+
self.execute(lib.QUERY_HALF_PROC_INCIDENTS)
|
|
277
|
+
query = [
|
|
278
|
+
(x["rtir_incident_id"], x["classification.taxonomy"])
|
|
279
|
+
for x in self.cur.fetchall()
|
|
280
|
+
]
|
|
281
|
+
self.logger.info("All half processed incidents and taxonomy: " + str(query))
|
|
282
|
+
for incident_id, taxonomy in query:
|
|
283
|
+
self.logger.info(
|
|
284
|
+
"Handling incident %d and taxonomy %r.", incident_id, taxonomy
|
|
285
|
+
)
|
|
286
|
+
if (
|
|
287
|
+
taxonomy not in lib.SUBJECT or lib.SUBJECT[taxonomy] is None
|
|
288
|
+
) and not self.args.subject:
|
|
289
|
+
self.logger.error("No subject defined for %r." % taxonomy)
|
|
290
|
+
continue
|
|
291
|
+
|
|
292
|
+
self.execute(lib.QUERY_DISTINCT_CONTACTS_BY_INCIDENT, (incident_id,))
|
|
293
|
+
contacts = [x["source.abuse_contact"] for x in self.cur.fetchall()]
|
|
294
|
+
|
|
295
|
+
inv_results = []
|
|
296
|
+
for contact in contacts:
|
|
297
|
+
self.logger.info("Handling contact " + contact)
|
|
298
|
+
self.execute(
|
|
299
|
+
lib.QUERY_EVENTS_BY_ASCONTACT_INCIDENT,
|
|
300
|
+
(
|
|
301
|
+
incident_id,
|
|
302
|
+
contact,
|
|
303
|
+
),
|
|
304
|
+
)
|
|
305
|
+
data = self.cur.fetchall()
|
|
306
|
+
results = self.send(taxonomy, contact, data, incident_id)
|
|
307
|
+
if results:
|
|
308
|
+
inv_results.append(results)
|
|
309
|
+
else:
|
|
310
|
+
return 1
|
|
311
|
+
|
|
312
|
+
if all(inv_results):
|
|
313
|
+
# This is a terrible solution, but looks like RT has a race-condition causing
|
|
314
|
+
# re-opening incidents if we are too quick.
|
|
315
|
+
sleep(5)
|
|
316
|
+
try:
|
|
317
|
+
if not self.dryrun and not self.rt.edit_ticket(
|
|
318
|
+
incident_id, Status="resolved"
|
|
319
|
+
):
|
|
320
|
+
self.logger.error(
|
|
321
|
+
"Could not close incident %d.", incident_id
|
|
322
|
+
)
|
|
323
|
+
except IndexError:
|
|
324
|
+
# Bug in RT/python-rt
|
|
325
|
+
pass
|
|
326
|
+
else:
|
|
327
|
+
self.logger.warn(
|
|
328
|
+
"Not all investigations completed -> Can't resolve "
|
|
329
|
+
"incident %d.",
|
|
330
|
+
incident_id,
|
|
331
|
+
)
|
|
332
|
+
return 1
|
|
333
|
+
return 0
|
|
334
|
+
|
|
335
|
+
finally:
|
|
336
|
+
self.rt.logout()
|
|
337
|
+
|
|
338
|
+
def query_get_text(self, text_id):
|
|
339
|
+
self.execute(
|
|
340
|
+
lib.QUERY_GET_TEXT.format(texttab=self.config["database"]["text_table"]),
|
|
341
|
+
(text_id,),
|
|
342
|
+
extend=False,
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
def get_text(self, text_id):
|
|
346
|
+
text = None
|
|
347
|
+
if self.boilerplate: # get id from parameter
|
|
348
|
+
text_id = self.boilerplate
|
|
349
|
+
self.query_get_text(text_id)
|
|
350
|
+
if self.cur.rowcount:
|
|
351
|
+
text = self.cur.fetchall()[0]["body"]
|
|
352
|
+
if not text: # if all failed, get the default
|
|
353
|
+
self.query_get_text(self.config["database"]["default_key"])
|
|
354
|
+
if self.cur.rowcount:
|
|
355
|
+
text = self.cur.fetchall()[0]["body"]
|
|
356
|
+
else:
|
|
357
|
+
self.logger.error("Default text not found!")
|
|
358
|
+
return None
|
|
359
|
+
|
|
360
|
+
return text
|
|
361
|
+
|
|
362
|
+
def shrink_dict(self, d):
|
|
363
|
+
if not self.compress_csv:
|
|
364
|
+
return d
|
|
365
|
+
keys = list(d[0].keys())
|
|
366
|
+
empty = dict(zip(keys, [True] * len(keys)))
|
|
367
|
+
for line in d:
|
|
368
|
+
for key, value in line.items():
|
|
369
|
+
if value is not None:
|
|
370
|
+
empty[key] = False
|
|
371
|
+
return [{k: v for k, v in dicti.items() if not empty[k]} for dicti in d]
|
|
372
|
+
|
|
373
|
+
def _preprocess_row(self, row):
|
|
374
|
+
description = row.get("event_description.text")
|
|
375
|
+
if description:
|
|
376
|
+
self._collected_descriptions.add(description.replace("\\n", "\n"))
|
|
377
|
+
# \\n - encoded new lines
|
|
378
|
+
row["event_description.text"] = description.replace("\n", " ").replace(
|
|
379
|
+
"\\n", " "
|
|
380
|
+
)
|
|
381
|
+
return row
|
|
382
|
+
|
|
383
|
+
@property
|
|
384
|
+
@lru_cache()
|
|
385
|
+
def event_description_divider(self):
|
|
386
|
+
divider = None
|
|
387
|
+
self.query_get_text("event-description-divider")
|
|
388
|
+
if self.cur.rowcount:
|
|
389
|
+
divider = self.cur.fetchall()[0]["body"]
|
|
390
|
+
return divider or "\n --- \n"
|
|
391
|
+
|
|
392
|
+
def _prepare_descriptions_text(self):
|
|
393
|
+
return self.event_description_divider.join(self._collected_descriptions)
|
|
394
|
+
|
|
395
|
+
def send(self, taxonomy, contact, query, incident_id, requestor=None):
|
|
396
|
+
if not query:
|
|
397
|
+
self.logger.error("No data!")
|
|
398
|
+
return False
|
|
399
|
+
if not requestor:
|
|
400
|
+
requestor = contact
|
|
401
|
+
|
|
402
|
+
# PREPARATION
|
|
403
|
+
query = self.shrink_dict(query)
|
|
404
|
+
ids = list(str(row["id"]) for row in query)
|
|
405
|
+
|
|
406
|
+
if self.subject:
|
|
407
|
+
subject = self.subject
|
|
408
|
+
else:
|
|
409
|
+
subject = "{tax} incidents in your network: {date}" "".format(
|
|
410
|
+
date=datetime.datetime.now().strftime("%Y-%m-%d"),
|
|
411
|
+
tax=lib.SUBJECT[taxonomy],
|
|
412
|
+
)
|
|
413
|
+
text = self.get_text(taxonomy) or ""
|
|
414
|
+
csvfile = io.StringIO()
|
|
415
|
+
writer = csv.DictWriter(
|
|
416
|
+
csvfile,
|
|
417
|
+
fieldnames=lib.CSV_FIELDS,
|
|
418
|
+
quoting=csv.QUOTE_MINIMAL,
|
|
419
|
+
delimiter=str(";"),
|
|
420
|
+
extrasaction="ignore",
|
|
421
|
+
lineterminator="\n",
|
|
422
|
+
)
|
|
423
|
+
writer.writeheader()
|
|
424
|
+
query_unicode = query
|
|
425
|
+
self._collected_descriptions = set()
|
|
426
|
+
writer.writerows(self._preprocess_row(row) for row in query)
|
|
427
|
+
event_descriptions = self._prepare_descriptions_text()
|
|
428
|
+
text = text.format(event_descriptions=event_descriptions)
|
|
429
|
+
|
|
430
|
+
# note this might contain UTF-8 chars! let's ignore utf-8 errors. sorry.
|
|
431
|
+
attachment_text = csvfile.getvalue()
|
|
432
|
+
attachment_lines = attachment_text.splitlines()
|
|
433
|
+
|
|
434
|
+
if self.verbose:
|
|
435
|
+
self.logger.info(text)
|
|
436
|
+
|
|
437
|
+
showed_text = (
|
|
438
|
+
"=" * 100
|
|
439
|
+
+ """
|
|
440
|
+
To: {to}
|
|
441
|
+
Subject: {subj}
|
|
442
|
+
|
|
443
|
+
{text}
|
|
444
|
+
""".format(
|
|
445
|
+
to=requestor, subj=subject, text=text
|
|
446
|
+
)
|
|
447
|
+
)
|
|
448
|
+
showed_text_len = showed_text.count("\n")
|
|
449
|
+
|
|
450
|
+
# SHOW DATA
|
|
451
|
+
if self.table_mode:
|
|
452
|
+
if self.quiet:
|
|
453
|
+
height = 80 # assume anything for quiet mode
|
|
454
|
+
else:
|
|
455
|
+
height = lib.getTerminalHeight() - 3 - showed_text_len
|
|
456
|
+
csvfile.seek(0)
|
|
457
|
+
if len(query) > height:
|
|
458
|
+
with tempfile.NamedTemporaryFile(mode="w+") as handle:
|
|
459
|
+
handle.write(showed_text + "\n")
|
|
460
|
+
handle.write(
|
|
461
|
+
tabulate.tabulate(query, headers="keys", tablefmt="psql")
|
|
462
|
+
)
|
|
463
|
+
handle.seek(0)
|
|
464
|
+
subprocess.call(["less", handle.name])
|
|
465
|
+
else:
|
|
466
|
+
self.logger.info(showed_text)
|
|
467
|
+
self.logger.info(
|
|
468
|
+
tabulate.tabulate(query_unicode, headers="keys", tablefmt="psql")
|
|
469
|
+
)
|
|
470
|
+
else:
|
|
471
|
+
if self.quiet:
|
|
472
|
+
height = 80
|
|
473
|
+
else:
|
|
474
|
+
height = lib.getTerminalHeight() - 4
|
|
475
|
+
if 5 + len(query) > height: # cut query too, 5 is length of text
|
|
476
|
+
self.logger.info("\n".join(showed_text.splitlines()[:5]))
|
|
477
|
+
self.logger.info("...")
|
|
478
|
+
self.logger.info("\n".join(attachment_lines[: height - 5]))
|
|
479
|
+
self.logger.info("...")
|
|
480
|
+
elif showed_text_len + len(query) > height > 5 + len(query):
|
|
481
|
+
self.logger.info(
|
|
482
|
+
"\n".join(showed_text.splitlines()[: height - len(query)])
|
|
483
|
+
)
|
|
484
|
+
self.logger.info("...")
|
|
485
|
+
self.logger.info(attachment_text)
|
|
486
|
+
else:
|
|
487
|
+
self.logger.info(showed_text)
|
|
488
|
+
self.logger.info(attachment_text)
|
|
489
|
+
self.logger.info("-" * 100)
|
|
490
|
+
|
|
491
|
+
# MENU
|
|
492
|
+
if self.batch and requestor:
|
|
493
|
+
answer = "s"
|
|
494
|
+
else:
|
|
495
|
+
answer = "q"
|
|
496
|
+
if self.batch:
|
|
497
|
+
self.logger.error("You need to set a valid requestor!")
|
|
498
|
+
else:
|
|
499
|
+
answer = input(
|
|
500
|
+
"{i}{b}[a]{i}utomatic, {b}[n]{i}ext, {i}{b}[s]{i}end, show "
|
|
501
|
+
"{b}[t]{i}able, change {b}[r]{i}equestor or {b}[q]{i}uit?{r} "
|
|
502
|
+
"".format(b=bold, i=myinverted, r=reset)
|
|
503
|
+
).strip()
|
|
504
|
+
if answer == "q":
|
|
505
|
+
exit(0)
|
|
506
|
+
elif answer == "n":
|
|
507
|
+
return False
|
|
508
|
+
elif answer == "a":
|
|
509
|
+
self.batch = True
|
|
510
|
+
elif answer == "t":
|
|
511
|
+
self.table_mode = bool((self.table_mode + 1) % 2)
|
|
512
|
+
return self.send(taxonomy, contact, query, incident_id, requestor)
|
|
513
|
+
elif answer == "r":
|
|
514
|
+
answer = input(inverted("New requestor address:") + " ").strip()
|
|
515
|
+
if len(answer) == 0:
|
|
516
|
+
requestor = contact
|
|
517
|
+
else:
|
|
518
|
+
requestor = answer
|
|
519
|
+
return self.send(taxonomy, contact, query, incident_id, requestor)
|
|
520
|
+
elif answer != "s":
|
|
521
|
+
self.logger.error("Unknow command %r.", answer)
|
|
522
|
+
return self.send(taxonomy, contact, query, incident_id, requestor)
|
|
523
|
+
|
|
524
|
+
if text is None:
|
|
525
|
+
self.logger.error("I won't send with a missing text!")
|
|
526
|
+
return False
|
|
527
|
+
|
|
528
|
+
# INVESTIGATION
|
|
529
|
+
if self.dryrun:
|
|
530
|
+
self.logger.info("Simulate creation of investigation.")
|
|
531
|
+
investigation_id = -1
|
|
532
|
+
else:
|
|
533
|
+
investigation_id = self.rt.create_ticket(
|
|
534
|
+
Queue="Investigations",
|
|
535
|
+
Subject=subject,
|
|
536
|
+
Owner=self.config["rt"].get(
|
|
537
|
+
"investigation_owner", self.config["rt"]["user"]
|
|
538
|
+
),
|
|
539
|
+
Requestor=requestor,
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
if investigation_id == -1:
|
|
543
|
+
self.logger.error("Could not create Investigation %r.", subject)
|
|
544
|
+
return False
|
|
545
|
+
|
|
546
|
+
self.logger.info("Created Investigation %d.", investigation_id)
|
|
547
|
+
if not self.rt.edit_link(incident_id, "HasMember", investigation_id):
|
|
548
|
+
self.logger.error("Could not link Investigation to Incident.")
|
|
549
|
+
return False
|
|
550
|
+
|
|
551
|
+
self.executemany(
|
|
552
|
+
"UPDATE {events} SET rtir_investigation_id = %s WHERE id = %s",
|
|
553
|
+
[(investigation_id, evid) for evid in ids],
|
|
554
|
+
extend=False,
|
|
555
|
+
)
|
|
556
|
+
self.logger.info("Linked events to investigation.")
|
|
557
|
+
|
|
558
|
+
# CORRESPOND
|
|
559
|
+
filename = "%s-%s.csv" % (
|
|
560
|
+
datetime.datetime.now().strftime("%Y-%m-%d"),
|
|
561
|
+
taxonomy,
|
|
562
|
+
)
|
|
563
|
+
if self.zipme or len(query) > self.config["rt"]["zip_threshold"]:
|
|
564
|
+
attachment = io.BytesIO()
|
|
565
|
+
ziphandle = zipfile.ZipFile(
|
|
566
|
+
attachment, mode="w", compression=zipfile.ZIP_DEFLATED
|
|
567
|
+
)
|
|
568
|
+
data = csvfile.getvalue()
|
|
569
|
+
ziphandle.writestr("events.csv", data)
|
|
570
|
+
ziphandle.close()
|
|
571
|
+
attachment.seek(0)
|
|
572
|
+
filename += ".zip"
|
|
573
|
+
mimetype = "application/octet-stream"
|
|
574
|
+
else:
|
|
575
|
+
attachment = csvfile
|
|
576
|
+
attachment.seek(0)
|
|
577
|
+
mimetype = "text/csv"
|
|
578
|
+
|
|
579
|
+
try:
|
|
580
|
+
# TODO: CC
|
|
581
|
+
if self.dryrun:
|
|
582
|
+
self.logger.info("Simulate creation of correspondence.")
|
|
583
|
+
else:
|
|
584
|
+
correspond = self.rt.reply(
|
|
585
|
+
investigation_id,
|
|
586
|
+
text=text,
|
|
587
|
+
files=[(filename, attachment, mimetype)],
|
|
588
|
+
)
|
|
589
|
+
if not correspond:
|
|
590
|
+
self.logger.error("Could not correspond with text and file.")
|
|
591
|
+
return False
|
|
592
|
+
self.logger.info("Correspondence added to Investigation.")
|
|
593
|
+
|
|
594
|
+
self.execute(
|
|
595
|
+
"UPDATE {events} SET sent_at = LOCALTIMESTAMP WHERE "
|
|
596
|
+
"rtir_investigation_id = %s",
|
|
597
|
+
(investigation_id,),
|
|
598
|
+
extend=False,
|
|
599
|
+
)
|
|
600
|
+
self.logger.info("Marked events as sent.")
|
|
601
|
+
except Exception:
|
|
602
|
+
self.con.rollback()
|
|
603
|
+
raise
|
|
604
|
+
else:
|
|
605
|
+
self.con.commit()
|
|
606
|
+
|
|
607
|
+
# RESOLVE
|
|
608
|
+
try:
|
|
609
|
+
if not self.dryrun and not self.rt.edit_ticket(
|
|
610
|
+
investigation_id, Status="resolved"
|
|
611
|
+
):
|
|
612
|
+
self.logger.error(
|
|
613
|
+
"Could not close investigation %d.", investigation_id
|
|
614
|
+
)
|
|
615
|
+
except IndexError:
|
|
616
|
+
# Bug in RT/python-rt
|
|
617
|
+
pass
|
|
618
|
+
|
|
619
|
+
if requestor != contact:
|
|
620
|
+
asns = set(str(row["source.asn"]) for row in query)
|
|
621
|
+
answer = input(
|
|
622
|
+
inverted(
|
|
623
|
+
"Save recipient {!r} for ASNs {!s}? [Y/n] "
|
|
624
|
+
"".format(requestor, ", ".join(asns))
|
|
625
|
+
)
|
|
626
|
+
).strip()
|
|
627
|
+
if answer.strip().lower() in ("", "y", "j"):
|
|
628
|
+
self.executemany(
|
|
629
|
+
lib.QUERY_UPDATE_CONTACT,
|
|
630
|
+
[(requestor, asn) for asn in asns],
|
|
631
|
+
extend=False,
|
|
632
|
+
)
|
|
633
|
+
self.con.commit()
|
|
634
|
+
if self.cur.rowcount == 0:
|
|
635
|
+
self.query_insert_contact(asns=asns, contact=requestor)
|
|
636
|
+
|
|
637
|
+
return True
|
|
638
|
+
|
|
639
|
+
def query_insert_contact(self, contact, asns):
|
|
640
|
+
user = os.environ["USER"]
|
|
641
|
+
time = datetime.datetime.now().strftime("%c")
|
|
642
|
+
comment = "Added by {user} @ {time}".format(user=user, time=time)
|
|
643
|
+
self.executemany(
|
|
644
|
+
lib.QUERY_INSERT_CONTACT,
|
|
645
|
+
[(asn, contact, comment) for asn in asns],
|
|
646
|
+
extend=False,
|
|
647
|
+
)
|
|
648
|
+
self.con.commit()
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def main():
|
|
652
|
+
controller = IntelMQCLIContoller()
|
|
653
|
+
sys.exit(controller.run(sys.argv[1:]))
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+
if __name__ == "__main__":
|
|
657
|
+
main()
|