secator 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +160 -185
- secator/celery_utils.py +268 -0
- secator/cli.py +327 -106
- secator/config.py +27 -11
- secator/configs/workflows/host_recon.yaml +5 -3
- secator/configs/workflows/port_scan.yaml +7 -3
- secator/configs/workflows/url_bypass.yaml +10 -0
- secator/configs/workflows/url_vuln.yaml +1 -1
- secator/decorators.py +169 -92
- secator/definitions.py +10 -3
- secator/exporters/__init__.py +7 -5
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +27 -19
- secator/exporters/gdrive.py +16 -11
- secator/exporters/json.py +3 -1
- secator/exporters/table.py +30 -2
- secator/exporters/txt.py +20 -16
- secator/hooks/gcs.py +53 -0
- secator/hooks/mongodb.py +53 -27
- secator/output_types/__init__.py +29 -11
- secator/output_types/_base.py +11 -1
- secator/output_types/error.py +36 -0
- secator/output_types/exploit.py +1 -1
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +7 -0
- secator/output_types/port.py +8 -1
- secator/output_types/progress.py +5 -0
- secator/output_types/record.py +3 -1
- secator/output_types/stat.py +33 -0
- secator/output_types/tag.py +6 -4
- secator/output_types/url.py +6 -3
- secator/output_types/vulnerability.py +3 -2
- secator/output_types/warning.py +24 -0
- secator/report.py +55 -23
- secator/rich.py +44 -39
- secator/runners/_base.py +622 -635
- secator/runners/_helpers.py +5 -91
- secator/runners/celery.py +18 -0
- secator/runners/command.py +364 -211
- secator/runners/scan.py +8 -24
- secator/runners/task.py +21 -55
- secator/runners/workflow.py +41 -40
- secator/scans/__init__.py +28 -0
- secator/serializers/dataclass.py +6 -0
- secator/serializers/json.py +10 -5
- secator/serializers/regex.py +12 -4
- secator/tasks/_categories.py +5 -2
- secator/tasks/bbot.py +293 -0
- secator/tasks/bup.py +98 -0
- secator/tasks/cariddi.py +38 -49
- secator/tasks/dalfox.py +3 -0
- secator/tasks/dirsearch.py +12 -23
- secator/tasks/dnsx.py +49 -30
- secator/tasks/dnsxbrute.py +2 -0
- secator/tasks/feroxbuster.py +8 -17
- secator/tasks/ffuf.py +3 -2
- secator/tasks/fping.py +3 -3
- secator/tasks/gau.py +5 -0
- secator/tasks/gf.py +2 -2
- secator/tasks/gospider.py +4 -0
- secator/tasks/grype.py +9 -9
- secator/tasks/h8mail.py +31 -41
- secator/tasks/httpx.py +58 -21
- secator/tasks/katana.py +18 -22
- secator/tasks/maigret.py +26 -24
- secator/tasks/mapcidr.py +2 -3
- secator/tasks/msfconsole.py +4 -16
- secator/tasks/naabu.py +3 -1
- secator/tasks/nmap.py +50 -35
- secator/tasks/nuclei.py +9 -2
- secator/tasks/searchsploit.py +17 -9
- secator/tasks/subfinder.py +5 -1
- secator/tasks/wpscan.py +79 -93
- secator/template.py +61 -45
- secator/thread.py +24 -0
- secator/utils.py +330 -80
- secator/utils_test.py +48 -23
- secator/workflows/__init__.py +28 -0
- {secator-0.6.0.dist-info → secator-0.7.0.dist-info}/METADATA +11 -5
- secator-0.7.0.dist-info/RECORD +115 -0
- {secator-0.6.0.dist-info → secator-0.7.0.dist-info}/WHEEL +1 -1
- secator-0.6.0.dist-info/RECORD +0 -101
- {secator-0.6.0.dist-info → secator-0.7.0.dist-info}/entry_points.txt +0 -0
- {secator-0.6.0.dist-info → secator-0.7.0.dist-info}/licenses/LICENSE +0 -0
secator/celery_utils.py
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
from contextlib import nullcontext
|
|
2
|
+
from time import sleep
|
|
3
|
+
|
|
4
|
+
import kombu
|
|
5
|
+
import kombu.exceptions
|
|
6
|
+
|
|
7
|
+
from celery.result import AsyncResult, GroupResult
|
|
8
|
+
from greenlet import GreenletExit
|
|
9
|
+
from rich.panel import Panel
|
|
10
|
+
from rich.padding import Padding
|
|
11
|
+
|
|
12
|
+
from rich.progress import Progress as RichProgress, SpinnerColumn, TextColumn, TimeElapsedColumn
|
|
13
|
+
from secator.config import CONFIG
|
|
14
|
+
from secator.definitions import STATE_COLORS
|
|
15
|
+
from secator.output_types import Error
|
|
16
|
+
from secator.rich import console
|
|
17
|
+
from secator.utils import debug, traceback_as_string
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class CeleryData(object):
|
|
21
|
+
"""Utility to simplify tracking a Celery task and all of its subtasks."""
|
|
22
|
+
|
|
23
|
+
def iter_results(
|
|
24
|
+
result,
|
|
25
|
+
ids_map={},
|
|
26
|
+
description=True,
|
|
27
|
+
refresh_interval=CONFIG.runners.poll_frequency,
|
|
28
|
+
print_remote_info=True,
|
|
29
|
+
print_remote_title='Results'
|
|
30
|
+
):
|
|
31
|
+
"""Generator to get results from Celery task.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
result (Union[AsyncResult, GroupResult]): Celery result.
|
|
35
|
+
description (bool): Whether to show task description.
|
|
36
|
+
refresh_interval (int): Refresh interval.
|
|
37
|
+
print_remote_info (bool): Whether to display live results.
|
|
38
|
+
print_remote_title (str): Title for the progress panel.
|
|
39
|
+
|
|
40
|
+
Yields:
|
|
41
|
+
dict: Subtasks state and results.
|
|
42
|
+
"""
|
|
43
|
+
# Display live results if print_remote_info is set
|
|
44
|
+
if print_remote_info:
|
|
45
|
+
class PanelProgress(RichProgress):
|
|
46
|
+
def get_renderables(self):
|
|
47
|
+
yield Padding(Panel(
|
|
48
|
+
self.make_tasks_table(self.tasks),
|
|
49
|
+
title=print_remote_title,
|
|
50
|
+
border_style='bold gold3',
|
|
51
|
+
expand=False,
|
|
52
|
+
highlight=True), pad=(2, 0, 0, 0))
|
|
53
|
+
progress = PanelProgress(
|
|
54
|
+
SpinnerColumn('dots'),
|
|
55
|
+
TextColumn('{task.fields[descr]} ') if description else '',
|
|
56
|
+
TextColumn('[bold cyan]{task.fields[full_name]}[/]'),
|
|
57
|
+
TextColumn('{task.fields[state]:<20}'),
|
|
58
|
+
TimeElapsedColumn(),
|
|
59
|
+
TextColumn('{task.fields[count]}'),
|
|
60
|
+
TextColumn('{task.fields[progress]}%'),
|
|
61
|
+
# TextColumn('\[[bold magenta]{task.fields[id]:<30}[/]]'), # noqa: W605
|
|
62
|
+
refresh_per_second=1,
|
|
63
|
+
transient=False,
|
|
64
|
+
console=console,
|
|
65
|
+
# redirect_stderr=True,
|
|
66
|
+
# redirect_stdout=False
|
|
67
|
+
)
|
|
68
|
+
else:
|
|
69
|
+
progress = nullcontext()
|
|
70
|
+
|
|
71
|
+
with progress:
|
|
72
|
+
|
|
73
|
+
# Make initial progress
|
|
74
|
+
if print_remote_info:
|
|
75
|
+
progress_cache = CeleryData.init_progress(progress, ids_map)
|
|
76
|
+
|
|
77
|
+
# Get live results and print progress
|
|
78
|
+
for data in CeleryData.poll(result, ids_map, refresh_interval):
|
|
79
|
+
yield from data['results']
|
|
80
|
+
|
|
81
|
+
if print_remote_info:
|
|
82
|
+
task_id = data['id']
|
|
83
|
+
progress_id = progress_cache[task_id]
|
|
84
|
+
CeleryData.update_progress(progress, progress_id, data)
|
|
85
|
+
|
|
86
|
+
# Update all tasks to 100 %
|
|
87
|
+
if print_remote_info:
|
|
88
|
+
for progress_id in progress_cache.values():
|
|
89
|
+
progress.update(progress_id, advance=100)
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def init_progress(progress, ids_map):
|
|
93
|
+
cache = {}
|
|
94
|
+
for task_id, data in ids_map.items():
|
|
95
|
+
pdata = data.copy()
|
|
96
|
+
state = data['state']
|
|
97
|
+
pdata['state'] = f'[{STATE_COLORS[state]}]{state}[/]'
|
|
98
|
+
id = progress.add_task('', advance=0, **pdata)
|
|
99
|
+
cache[task_id] = id
|
|
100
|
+
return cache
|
|
101
|
+
|
|
102
|
+
@staticmethod
|
|
103
|
+
def update_progress(progress, progress_id, data):
|
|
104
|
+
"""Update rich progress with fresh data."""
|
|
105
|
+
pdata = data.copy()
|
|
106
|
+
state = data['state']
|
|
107
|
+
pdata['state'] = f'[{STATE_COLORS[state]}]{state}[/]'
|
|
108
|
+
pdata = {k: v for k, v in pdata.items() if v}
|
|
109
|
+
progress.update(progress_id, **pdata)
|
|
110
|
+
|
|
111
|
+
@staticmethod
|
|
112
|
+
def poll(result, ids_map, refresh_interval):
|
|
113
|
+
"""Poll Celery subtasks results in real-time. Fetch task metadata and partial results from each task that runs.
|
|
114
|
+
|
|
115
|
+
Yields:
|
|
116
|
+
dict: Subtasks state and results.
|
|
117
|
+
"""
|
|
118
|
+
while True:
|
|
119
|
+
try:
|
|
120
|
+
yield from CeleryData.get_all_data(result, ids_map)
|
|
121
|
+
if result.ready():
|
|
122
|
+
debug('result is ready', sub='celery.poll', id=result.id)
|
|
123
|
+
yield from CeleryData.get_all_data(result, ids_map)
|
|
124
|
+
break
|
|
125
|
+
except (KeyboardInterrupt, GreenletExit):
|
|
126
|
+
debug('encounted KeyboardInterrupt or GreenletExit', sub='celery.poll')
|
|
127
|
+
raise
|
|
128
|
+
except Exception as e:
|
|
129
|
+
error = Error.from_exception(e)
|
|
130
|
+
debug(repr(error), sub='celery.poll')
|
|
131
|
+
pass
|
|
132
|
+
finally:
|
|
133
|
+
sleep(refresh_interval)
|
|
134
|
+
|
|
135
|
+
@staticmethod
|
|
136
|
+
def get_all_data(result, ids_map):
|
|
137
|
+
"""Get Celery results from main result object, AND all subtasks results.
|
|
138
|
+
|
|
139
|
+
Yields:
|
|
140
|
+
dict: Subtasks state and results.
|
|
141
|
+
"""
|
|
142
|
+
task_ids = list(ids_map.keys())
|
|
143
|
+
for task_id in task_ids:
|
|
144
|
+
data = CeleryData.get_task_data(task_id, ids_map)
|
|
145
|
+
if not data:
|
|
146
|
+
continue
|
|
147
|
+
debug(
|
|
148
|
+
'POLL',
|
|
149
|
+
sub='celery.poll',
|
|
150
|
+
id=data['id'],
|
|
151
|
+
obj={data['full_name']: data['state'], 'count': data['count']},
|
|
152
|
+
verbose=True
|
|
153
|
+
)
|
|
154
|
+
yield data
|
|
155
|
+
|
|
156
|
+
# Calculate and yield parent task progress
|
|
157
|
+
# if not datas:
|
|
158
|
+
# return
|
|
159
|
+
# total = len(datas)
|
|
160
|
+
# count_finished = sum([i['ready'] for i in datas if i])
|
|
161
|
+
# percent = int(count_finished * 100 / total) if total > 0 else 0
|
|
162
|
+
# parent_id = [c for c in ids_map.values() if c['full_name'] == datas[-1]]
|
|
163
|
+
# data['progress'] = percent
|
|
164
|
+
# yield data
|
|
165
|
+
|
|
166
|
+
@staticmethod
|
|
167
|
+
def get_task_data(task_id, ids_map):
|
|
168
|
+
"""Get task info.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
task_id (str): Celery task id.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
dict: Task info (id, name, state, results, chunk_info, count, error, ready).
|
|
175
|
+
"""
|
|
176
|
+
|
|
177
|
+
# Get task data
|
|
178
|
+
data = ids_map.get(task_id, {})
|
|
179
|
+
if not data:
|
|
180
|
+
ids_map[task_id] = {}
|
|
181
|
+
elif data.get('ready', False):
|
|
182
|
+
return
|
|
183
|
+
|
|
184
|
+
# if not data:
|
|
185
|
+
# debug('task not in ids_map', sub='debug.celery', id=task_id)
|
|
186
|
+
# return
|
|
187
|
+
|
|
188
|
+
# Get remote result
|
|
189
|
+
res = AsyncResult(task_id)
|
|
190
|
+
if not res:
|
|
191
|
+
debug('empty response', sub='celery.data', id=task_id)
|
|
192
|
+
return
|
|
193
|
+
|
|
194
|
+
# Set up task state
|
|
195
|
+
data.update({
|
|
196
|
+
'state': res.state,
|
|
197
|
+
'ready': False,
|
|
198
|
+
'results': []
|
|
199
|
+
})
|
|
200
|
+
|
|
201
|
+
# Get remote task data
|
|
202
|
+
info = res.info
|
|
203
|
+
|
|
204
|
+
# Depending on the task state, info will be either an Exception (FAILURE), a list (SUCCESS), or a dict (RUNNING).
|
|
205
|
+
# - If it's an Exception, it's an unhandled error.
|
|
206
|
+
# - If it's a list, it's the task results.
|
|
207
|
+
# - If it's a dict, it's the custom user metadata.
|
|
208
|
+
|
|
209
|
+
if isinstance(info, Exception):
|
|
210
|
+
debug('unhandled exception', obj={'msg': str(info), 'tb': traceback_as_string(info)}, sub='celery.data', id=task_id)
|
|
211
|
+
raise info
|
|
212
|
+
|
|
213
|
+
elif isinstance(info, list):
|
|
214
|
+
data['results'] = info
|
|
215
|
+
errors = [e for e in info if e._type == 'error']
|
|
216
|
+
status = 'FAILURE' if errors else 'SUCCESS'
|
|
217
|
+
data['count'] = len([c for c in info if c._source.startswith(data['name'])])
|
|
218
|
+
data['state'] = status
|
|
219
|
+
|
|
220
|
+
elif isinstance(info, dict):
|
|
221
|
+
data.update(info)
|
|
222
|
+
|
|
223
|
+
# Set ready flag and progress
|
|
224
|
+
ready = data['state'] in ['FAILURE', 'SUCCESS', 'REVOKED']
|
|
225
|
+
data['ready'] = ready
|
|
226
|
+
ids_map[task_id]['ready'] = data['ready']
|
|
227
|
+
if data['ready']:
|
|
228
|
+
data['progress'] = 100
|
|
229
|
+
elif data['results']:
|
|
230
|
+
progresses = [e for e in data['results'] if e._type == 'progress' and e._source == data['full_name']]
|
|
231
|
+
if progresses:
|
|
232
|
+
data['progress'] = progresses[-1].percent
|
|
233
|
+
|
|
234
|
+
debug('data', obj=data, sub='celery.data', id=task_id, verbose=True)
|
|
235
|
+
return data
|
|
236
|
+
|
|
237
|
+
@staticmethod
|
|
238
|
+
def get_task_ids(result, ids=[]):
|
|
239
|
+
"""Get all Celery task ids recursively.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
result (Union[AsyncResult, GroupResult]): Celery result object.
|
|
243
|
+
ids (list): List of ids.
|
|
244
|
+
"""
|
|
245
|
+
if result is None:
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
try:
|
|
249
|
+
if isinstance(result, GroupResult):
|
|
250
|
+
CeleryData.get_task_ids(result.parent, ids=ids)
|
|
251
|
+
|
|
252
|
+
elif isinstance(result, AsyncResult):
|
|
253
|
+
if result.id not in ids:
|
|
254
|
+
ids.append(result.id)
|
|
255
|
+
|
|
256
|
+
if hasattr(result, 'children'):
|
|
257
|
+
children = result.children
|
|
258
|
+
if isinstance(children, list):
|
|
259
|
+
for child in children:
|
|
260
|
+
CeleryData.get_task_ids(child, ids=ids)
|
|
261
|
+
|
|
262
|
+
# Browse parent
|
|
263
|
+
if hasattr(result, 'parent') and result.parent:
|
|
264
|
+
CeleryData.get_task_ids(result.parent, ids=ids)
|
|
265
|
+
|
|
266
|
+
except kombu.exceptions.DecodeError:
|
|
267
|
+
debug('kombu decode error', sub='celery.data.get_task_ids')
|
|
268
|
+
return
|