secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
secator/hooks/mongodb.py
ADDED
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import time
|
|
3
|
+
|
|
4
|
+
import pymongo
|
|
5
|
+
from bson.objectid import ObjectId
|
|
6
|
+
from celery import shared_task
|
|
7
|
+
|
|
8
|
+
from secator.config import CONFIG
|
|
9
|
+
from secator.output_types import OUTPUT_TYPES
|
|
10
|
+
from secator.runners import Scan, Task, Workflow
|
|
11
|
+
from secator.utils import debug, escape_mongodb_url
|
|
12
|
+
|
|
13
|
+
# import gevent.monkey
|
|
14
|
+
# gevent.monkey.patch_all()
|
|
15
|
+
|
|
16
|
+
MONGODB_URL = CONFIG.addons.mongodb.url
|
|
17
|
+
MONGODB_UPDATE_FREQUENCY = CONFIG.addons.mongodb.update_frequency
|
|
18
|
+
MONGODB_CONNECT_TIMEOUT = CONFIG.addons.mongodb.server_selection_timeout_ms
|
|
19
|
+
MONGODB_MAX_POOL_SIZE = CONFIG.addons.mongodb.max_pool_size
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
_mongodb_client = None
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_mongodb_client():
|
|
27
|
+
"""Get or create MongoDB client"""
|
|
28
|
+
global _mongodb_client
|
|
29
|
+
if _mongodb_client is None:
|
|
30
|
+
_mongodb_client = pymongo.MongoClient(
|
|
31
|
+
escape_mongodb_url(MONGODB_URL),
|
|
32
|
+
maxPoolSize=MONGODB_MAX_POOL_SIZE,
|
|
33
|
+
serverSelectionTimeoutMS=MONGODB_CONNECT_TIMEOUT,
|
|
34
|
+
connect=False
|
|
35
|
+
)
|
|
36
|
+
return _mongodb_client
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_runner_dbg(runner):
|
|
40
|
+
"""Runner debug object"""
|
|
41
|
+
return {
|
|
42
|
+
runner.unique_name: runner.status,
|
|
43
|
+
'type': runner.config.type,
|
|
44
|
+
'class': runner.__class__.__name__,
|
|
45
|
+
'caller': runner.config.name,
|
|
46
|
+
**runner.context
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_results(uuids):
|
|
51
|
+
"""Get results from MongoDB based on a list of uuids.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
uuids (list[str | Output]): List of uuids, but can also be a mix of uuids and output types.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Generator of findings.
|
|
58
|
+
"""
|
|
59
|
+
client = get_mongodb_client()
|
|
60
|
+
db = client.main
|
|
61
|
+
del_uuids = []
|
|
62
|
+
for r in uuids:
|
|
63
|
+
if isinstance(r, tuple(OUTPUT_TYPES)):
|
|
64
|
+
yield r
|
|
65
|
+
del_uuids.append(r)
|
|
66
|
+
uuids = [ObjectId(u) for u in uuids if u not in del_uuids and ObjectId.is_valid(u)]
|
|
67
|
+
for r in db.findings.find({'_id': {'$in': uuids}}):
|
|
68
|
+
finding = load_finding(r)
|
|
69
|
+
yield finding
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def update_runner(self):
|
|
73
|
+
client = get_mongodb_client()
|
|
74
|
+
db = client.main
|
|
75
|
+
type = self.config.type
|
|
76
|
+
collection = f'{type}s'
|
|
77
|
+
update = self.toDict()
|
|
78
|
+
chunk = update.get('chunk')
|
|
79
|
+
_id = self.context.get(f'{type}_chunk_id') if chunk else self.context.get(f'{type}_id')
|
|
80
|
+
debug('to_update', sub='hooks.mongodb', id=_id, obj=get_runner_dbg(self), obj_after=True, obj_breaklines=False, verbose=True) # noqa: E501
|
|
81
|
+
start_time = time.time()
|
|
82
|
+
if _id:
|
|
83
|
+
db = client.main
|
|
84
|
+
start_time = time.time()
|
|
85
|
+
db[collection].update_one({'_id': ObjectId(_id)}, {'$set': update})
|
|
86
|
+
end_time = time.time()
|
|
87
|
+
elapsed = end_time - start_time
|
|
88
|
+
debug(
|
|
89
|
+
f'[dim gold4]updated in {elapsed:.4f}s[/]', sub='hooks.mongodb', id=_id, obj=get_runner_dbg(self), obj_after=False) # noqa: E501
|
|
90
|
+
self.last_updated_db = start_time
|
|
91
|
+
else: # sync update and save result to runner object
|
|
92
|
+
runner = db[collection].insert_one(update)
|
|
93
|
+
_id = str(runner.inserted_id)
|
|
94
|
+
if chunk:
|
|
95
|
+
self.context[f'{type}_chunk_id'] = _id
|
|
96
|
+
else:
|
|
97
|
+
self.context[f'{type}_id'] = _id
|
|
98
|
+
end_time = time.time()
|
|
99
|
+
elapsed = end_time - start_time
|
|
100
|
+
debug(f'in {elapsed:.4f}s', sub='hooks.mongodb', id=_id, obj=get_runner_dbg(self), obj_after=False)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def update_finding(self, item):
|
|
104
|
+
if type(item) not in OUTPUT_TYPES:
|
|
105
|
+
return item
|
|
106
|
+
start_time = time.time()
|
|
107
|
+
client = get_mongodb_client()
|
|
108
|
+
db = client.main
|
|
109
|
+
update = item.toDict()
|
|
110
|
+
_type = item._type
|
|
111
|
+
_id = ObjectId(item._uuid) if ObjectId.is_valid(item._uuid) else None
|
|
112
|
+
if _id:
|
|
113
|
+
finding = db['findings'].update_one({'_id': _id}, {'$set': update})
|
|
114
|
+
status = 'UPDATED'
|
|
115
|
+
else:
|
|
116
|
+
finding = db['findings'].insert_one(update)
|
|
117
|
+
item._uuid = str(finding.inserted_id)
|
|
118
|
+
status = 'CREATED'
|
|
119
|
+
end_time = time.time()
|
|
120
|
+
elapsed = end_time - start_time
|
|
121
|
+
debug_obj = {
|
|
122
|
+
_type: status,
|
|
123
|
+
'type': 'finding',
|
|
124
|
+
'class': self.__class__.__name__,
|
|
125
|
+
'caller': self.config.name,
|
|
126
|
+
**self.context
|
|
127
|
+
}
|
|
128
|
+
debug(f'in {elapsed:.4f}s', sub='hooks.mongodb', id=str(item._uuid), obj=debug_obj, obj_after=False) # noqa: E501
|
|
129
|
+
return item
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def find_duplicates(self):
|
|
133
|
+
from secator.celery import IN_CELERY_WORKER_PROCESS
|
|
134
|
+
ws_id = self.toDict().get('context', {}).get('workspace_id')
|
|
135
|
+
if not ws_id:
|
|
136
|
+
return
|
|
137
|
+
if not IN_CELERY_WORKER_PROCESS:
|
|
138
|
+
tag_duplicates(ws_id)
|
|
139
|
+
else:
|
|
140
|
+
tag_duplicates.delay(ws_id)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def load_finding(obj, exclude_types=[]):
|
|
144
|
+
finding_type = obj['_type']
|
|
145
|
+
klass = None
|
|
146
|
+
for otype in OUTPUT_TYPES:
|
|
147
|
+
oname = otype.get_name()
|
|
148
|
+
if oname in exclude_types:
|
|
149
|
+
continue
|
|
150
|
+
if finding_type == oname:
|
|
151
|
+
klass = otype
|
|
152
|
+
item = klass.load(obj)
|
|
153
|
+
item._uuid = str(obj['_id'])
|
|
154
|
+
return item
|
|
155
|
+
return None
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def load_findings(objs, exclude_types=[]):
|
|
159
|
+
findings = [load_finding(obj, exclude_types) for obj in objs]
|
|
160
|
+
return [f for f in findings if f is not None]
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
@shared_task
|
|
164
|
+
def tag_duplicates(ws_id: str = None, full_scan: bool = False, exclude_types=[]):
|
|
165
|
+
"""Tag duplicates in workspace.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
ws_id (str): Workspace id.
|
|
169
|
+
full_scan (bool): If True, scan all findings, otherwise only untagged findings.
|
|
170
|
+
"""
|
|
171
|
+
debug(f'running duplicate check on workspace {ws_id}', sub='hooks.mongodb')
|
|
172
|
+
init_time = time.time()
|
|
173
|
+
client = get_mongodb_client()
|
|
174
|
+
db = client.main
|
|
175
|
+
start_time = time.time()
|
|
176
|
+
workspace_query = {'_context.workspace_id': str(ws_id), '_context.workspace_duplicate': False, '_tagged': True}
|
|
177
|
+
untagged_query = {'_context.workspace_id': str(ws_id), '_tagged': {'$ne': True}}
|
|
178
|
+
if full_scan:
|
|
179
|
+
del untagged_query['_tagged']
|
|
180
|
+
workspace_findings = load_findings(list(db.findings.find(workspace_query).sort('_timestamp', -1)), exclude_types)
|
|
181
|
+
untagged_findings = load_findings(list(db.findings.find(untagged_query).sort('_timestamp', -1)), exclude_types)
|
|
182
|
+
debug(
|
|
183
|
+
f'Workspace non-duplicates findings: {len(workspace_findings)}, '
|
|
184
|
+
f'Untagged findings: {len(untagged_findings)}. '
|
|
185
|
+
f'Query time: {time.time() - start_time}s',
|
|
186
|
+
sub='hooks.mongodb'
|
|
187
|
+
)
|
|
188
|
+
start_time = time.time()
|
|
189
|
+
seen = []
|
|
190
|
+
db_updates = {}
|
|
191
|
+
|
|
192
|
+
for item in untagged_findings:
|
|
193
|
+
if item._uuid in seen:
|
|
194
|
+
continue
|
|
195
|
+
|
|
196
|
+
debug(
|
|
197
|
+
f'Processing: {repr(item)} ({item._timestamp}) [{item._uuid}]',
|
|
198
|
+
sub='hooks.mongodb',
|
|
199
|
+
verbose=True
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
duplicate_ids = [
|
|
203
|
+
_._uuid
|
|
204
|
+
for _ in untagged_findings
|
|
205
|
+
if _ == item and _._uuid != item._uuid
|
|
206
|
+
]
|
|
207
|
+
seen.extend(duplicate_ids)
|
|
208
|
+
|
|
209
|
+
debug(
|
|
210
|
+
f'Found {len(duplicate_ids)} duplicates for item',
|
|
211
|
+
sub='hooks.mongodb',
|
|
212
|
+
verbose=True
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
duplicate_ws = [
|
|
216
|
+
_ for _ in workspace_findings
|
|
217
|
+
if _ == item and _._uuid != item._uuid
|
|
218
|
+
]
|
|
219
|
+
debug(f' --> Found {len(duplicate_ws)} workspace duplicates for item', sub='hooks.mongodb', verbose=True)
|
|
220
|
+
|
|
221
|
+
related_ids = []
|
|
222
|
+
if duplicate_ws:
|
|
223
|
+
duplicate_ws_ids = [_._uuid for _ in duplicate_ws]
|
|
224
|
+
duplicate_ids.extend(duplicate_ws_ids)
|
|
225
|
+
for related in duplicate_ws:
|
|
226
|
+
related_ids.extend(related._related)
|
|
227
|
+
|
|
228
|
+
debug(f' --> Found {len(duplicate_ids)} total duplicates for item', sub='hooks.mongodb', verbose=True)
|
|
229
|
+
|
|
230
|
+
db_updates[item._uuid] = {
|
|
231
|
+
'_related': duplicate_ids + related_ids,
|
|
232
|
+
'_context.workspace_duplicate': False,
|
|
233
|
+
'_tagged': True
|
|
234
|
+
}
|
|
235
|
+
for uuid in duplicate_ids:
|
|
236
|
+
db_updates[uuid] = {
|
|
237
|
+
'_context.workspace_duplicate': True,
|
|
238
|
+
'_tagged': True
|
|
239
|
+
}
|
|
240
|
+
debug(f'Finished processing untagged findings in {time.time() - start_time}s', sub='hooks.mongodb')
|
|
241
|
+
start_time = time.time()
|
|
242
|
+
|
|
243
|
+
debug(f'Executing {len(db_updates)} database updates', sub='hooks.mongodb')
|
|
244
|
+
|
|
245
|
+
from pymongo import UpdateOne
|
|
246
|
+
if not db_updates:
|
|
247
|
+
debug('no db updates to execute', sub='hooks.mongodb')
|
|
248
|
+
return
|
|
249
|
+
|
|
250
|
+
result = db.findings.bulk_write(
|
|
251
|
+
[UpdateOne({'_id': ObjectId(uuid)}, {'$set': update}) for uuid, update in db_updates.items()]
|
|
252
|
+
)
|
|
253
|
+
debug(result, sub='hooks.mongodb')
|
|
254
|
+
debug(f'Finished running db update in {time.time() - start_time}s', sub='hooks.mongodb')
|
|
255
|
+
debug(f'Finished running tag duplicates in {time.time() - init_time}s', sub='hooks.mongodb')
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
HOOKS = {
|
|
259
|
+
Scan: {
|
|
260
|
+
'on_init': [update_runner],
|
|
261
|
+
'on_start': [update_runner],
|
|
262
|
+
'on_interval': [update_runner],
|
|
263
|
+
'on_duplicate': [update_finding],
|
|
264
|
+
'on_end': [update_runner],
|
|
265
|
+
},
|
|
266
|
+
Workflow: {
|
|
267
|
+
'on_init': [update_runner],
|
|
268
|
+
'on_start': [update_runner],
|
|
269
|
+
'on_interval': [update_runner],
|
|
270
|
+
'on_duplicate': [update_finding],
|
|
271
|
+
'on_end': [update_runner],
|
|
272
|
+
},
|
|
273
|
+
Task: {
|
|
274
|
+
'on_init': [update_runner],
|
|
275
|
+
'on_start': [update_runner],
|
|
276
|
+
'on_item': [update_finding],
|
|
277
|
+
'on_duplicate': [update_finding],
|
|
278
|
+
'on_interval': [update_runner],
|
|
279
|
+
'on_end': [update_runner]
|
|
280
|
+
}
|
|
281
|
+
}
|