secator 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/__init__.py +0 -0
- secator/celery.py +482 -0
- secator/cli.py +617 -0
- secator/config.py +137 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +7 -0
- secator/configs/profiles/default.yaml +9 -0
- secator/configs/profiles/stealth.yaml +7 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +18 -0
- secator/configs/scans/host.yaml +14 -0
- secator/configs/scans/network.yaml +17 -0
- secator/configs/scans/subdomain.yaml +8 -0
- secator/configs/scans/url.yaml +12 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +28 -0
- secator/configs/workflows/code_scan.yaml +11 -0
- secator/configs/workflows/host_recon.yaml +41 -0
- secator/configs/workflows/port_scan.yaml +34 -0
- secator/configs/workflows/subdomain_recon.yaml +33 -0
- secator/configs/workflows/url_crawl.yaml +29 -0
- secator/configs/workflows/url_dirsearch.yaml +29 -0
- secator/configs/workflows/url_fuzz.yaml +35 -0
- secator/configs/workflows/url_nuclei.yaml +11 -0
- secator/configs/workflows/url_vuln.yaml +55 -0
- secator/configs/workflows/user_hunt.yaml +10 -0
- secator/configs/workflows/wordpress.yaml +14 -0
- secator/decorators.py +309 -0
- secator/definitions.py +165 -0
- secator/exporters/__init__.py +12 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/csv.py +30 -0
- secator/exporters/gdrive.py +118 -0
- secator/exporters/json.py +15 -0
- secator/exporters/table.py +7 -0
- secator/exporters/txt.py +25 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/mongodb.py +212 -0
- secator/output_types/__init__.py +24 -0
- secator/output_types/_base.py +95 -0
- secator/output_types/exploit.py +50 -0
- secator/output_types/ip.py +33 -0
- secator/output_types/port.py +45 -0
- secator/output_types/progress.py +35 -0
- secator/output_types/record.py +34 -0
- secator/output_types/subdomain.py +42 -0
- secator/output_types/tag.py +46 -0
- secator/output_types/target.py +30 -0
- secator/output_types/url.py +76 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +97 -0
- secator/report.py +107 -0
- secator/rich.py +124 -0
- secator/runners/__init__.py +12 -0
- secator/runners/_base.py +833 -0
- secator/runners/_helpers.py +153 -0
- secator/runners/command.py +638 -0
- secator/runners/scan.py +65 -0
- secator/runners/task.py +106 -0
- secator/runners/workflow.py +135 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +33 -0
- secator/serializers/json.py +15 -0
- secator/serializers/regex.py +17 -0
- secator/tasks/__init__.py +10 -0
- secator/tasks/_categories.py +304 -0
- secator/tasks/cariddi.py +102 -0
- secator/tasks/dalfox.py +65 -0
- secator/tasks/dirsearch.py +90 -0
- secator/tasks/dnsx.py +56 -0
- secator/tasks/dnsxbrute.py +34 -0
- secator/tasks/feroxbuster.py +91 -0
- secator/tasks/ffuf.py +86 -0
- secator/tasks/fping.py +44 -0
- secator/tasks/gau.py +47 -0
- secator/tasks/gf.py +33 -0
- secator/tasks/gospider.py +71 -0
- secator/tasks/grype.py +79 -0
- secator/tasks/h8mail.py +81 -0
- secator/tasks/httpx.py +99 -0
- secator/tasks/katana.py +133 -0
- secator/tasks/maigret.py +78 -0
- secator/tasks/mapcidr.py +32 -0
- secator/tasks/msfconsole.py +174 -0
- secator/tasks/naabu.py +52 -0
- secator/tasks/nmap.py +344 -0
- secator/tasks/nuclei.py +97 -0
- secator/tasks/searchsploit.py +52 -0
- secator/tasks/subfinder.py +40 -0
- secator/tasks/wpscan.py +179 -0
- secator/utils.py +445 -0
- secator/utils_test.py +183 -0
- secator-0.0.1.dist-info/LICENSE +60 -0
- secator-0.0.1.dist-info/METADATA +199 -0
- secator-0.0.1.dist-info/RECORD +114 -0
- secator-0.0.1.dist-info/WHEEL +5 -0
- secator-0.0.1.dist-info/entry_points.txt +2 -0
- secator-0.0.1.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/inputs.py +42 -0
- tests/integration/outputs.py +392 -0
- tests/integration/test_scans.py +82 -0
- tests/integration/test_tasks.py +103 -0
- tests/integration/test_workflows.py +163 -0
- tests/performance/__init__.py +0 -0
- tests/performance/loadtester.py +56 -0
- tests/unit/__init__.py +0 -0
- tests/unit/test_celery.py +39 -0
- tests/unit/test_scans.py +0 -0
- tests/unit/test_serializers.py +51 -0
- tests/unit/test_tasks.py +348 -0
- tests/unit/test_workflows.py +96 -0
secator/decorators.py
ADDED
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
from collections import OrderedDict
|
|
2
|
+
|
|
3
|
+
import rich_click as click
|
|
4
|
+
from rich_click.rich_click import _get_rich_console
|
|
5
|
+
from rich_click.rich_group import RichGroup
|
|
6
|
+
|
|
7
|
+
from secator.celery import is_celery_worker_alive
|
|
8
|
+
from secator.definitions import OPT_NOT_SUPPORTED
|
|
9
|
+
from secator.runners import Scan, Task, Workflow
|
|
10
|
+
from secator.utils import (deduplicate, expand_input, get_command_category,
|
|
11
|
+
get_command_cls)
|
|
12
|
+
|
|
13
|
+
RUNNER_OPTS = {
|
|
14
|
+
'output': {'type': str, 'default': '', 'help': 'Output options (-o table,json,csv,gdrive)', 'short': 'o'},
|
|
15
|
+
'workspace': {'type': str, 'default': 'default', 'help': 'Workspace', 'short': 'ws'},
|
|
16
|
+
'json': {'is_flag': True, 'default': False, 'help': 'Enable JSON mode'},
|
|
17
|
+
'orig': {'is_flag': True, 'default': False, 'help': 'Enable original output (no schema conversion)'},
|
|
18
|
+
'raw': {'is_flag': True, 'default': False, 'help': 'Enable text output for piping to other tools'},
|
|
19
|
+
'show': {'is_flag': True, 'default': False, 'help': 'Show command that will be run (tasks only)'},
|
|
20
|
+
'format': {'default': '', 'short': 'fmt', 'help': 'Output formatting string'},
|
|
21
|
+
# 'filter': {'default': '', 'short': 'f', 'help': 'Results filter', 'short': 'of'}, # TODO add this
|
|
22
|
+
'quiet': {'is_flag': True, 'default': False, 'help': 'Enable quiet mode'},
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
RUNNER_GLOBAL_OPTS = {
|
|
26
|
+
'sync': {'is_flag': True, 'help': 'Run tasks synchronously (automatic if no worker is alive)'},
|
|
27
|
+
'worker': {'is_flag': True, 'help': 'Run tasks in worker (automatic if worker is alive)'},
|
|
28
|
+
'proxy': {'type': str, 'help': 'HTTP proxy'},
|
|
29
|
+
'driver': {'type': str, 'help': 'Export real-time results. E.g: "mongodb"'}
|
|
30
|
+
# 'debug': {'type': int, 'default': 0, 'help': 'Debug mode'},
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
DEFAULT_CLI_OPTIONS = list(RUNNER_OPTS.keys()) + list(RUNNER_GLOBAL_OPTS.keys())
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class OrderedGroup(RichGroup):
|
|
37
|
+
def __init__(self, name=None, commands=None, **attrs):
|
|
38
|
+
super(OrderedGroup, self).__init__(name, commands, **attrs)
|
|
39
|
+
self.commands = commands or OrderedDict()
|
|
40
|
+
|
|
41
|
+
def group(self, *args, **kwargs):
|
|
42
|
+
"""Behaves the same as `click.Group.group()` except if passed
|
|
43
|
+
a list of names, all after the first will be aliases for the first.
|
|
44
|
+
"""
|
|
45
|
+
def decorator(f):
|
|
46
|
+
aliases = kwargs.pop('aliases', [])
|
|
47
|
+
aliased_group = []
|
|
48
|
+
if aliases:
|
|
49
|
+
max_width = _get_rich_console().width
|
|
50
|
+
# we have a list so create group aliases
|
|
51
|
+
aliases_str = ', '.join(f'[bold cyan]{alias}[/]' for alias in aliases)
|
|
52
|
+
padding = max_width // 4
|
|
53
|
+
f.__doc__ = f'{f.__doc__:<{padding}}[dim](aliases)[/] {aliases_str}'
|
|
54
|
+
for alias in aliases:
|
|
55
|
+
grp = super(OrderedGroup, self).group(
|
|
56
|
+
alias, *args, hidden=True, **kwargs)(f)
|
|
57
|
+
aliased_group.append(grp)
|
|
58
|
+
|
|
59
|
+
# create the main group
|
|
60
|
+
grp = super(OrderedGroup, self).group(*args, **kwargs)(f)
|
|
61
|
+
grp.aliases = aliases
|
|
62
|
+
|
|
63
|
+
# for all of the aliased groups, share the main group commands
|
|
64
|
+
for aliased in aliased_group:
|
|
65
|
+
aliased.commands = grp.commands
|
|
66
|
+
|
|
67
|
+
return grp
|
|
68
|
+
return decorator
|
|
69
|
+
|
|
70
|
+
def list_commands(self, ctx):
|
|
71
|
+
return self.commands
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def get_command_options(*tasks):
|
|
75
|
+
"""Get unified list of command options from a list of secator tasks classes.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
tasks (list): List of secator command classes.
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
list: List of deduplicated options.
|
|
82
|
+
"""
|
|
83
|
+
opt_cache = []
|
|
84
|
+
all_opts = OrderedDict({})
|
|
85
|
+
|
|
86
|
+
for cls in tasks:
|
|
87
|
+
opts = OrderedDict(RUNNER_GLOBAL_OPTS, **RUNNER_OPTS, **cls.meta_opts, **cls.opts)
|
|
88
|
+
for opt, opt_conf in opts.items():
|
|
89
|
+
|
|
90
|
+
# Get opt key map if any
|
|
91
|
+
opt_key_map = getattr(cls, 'opt_key_map', {})
|
|
92
|
+
|
|
93
|
+
# Opt is not supported by this task
|
|
94
|
+
if opt not in opt_key_map\
|
|
95
|
+
and opt not in cls.opts\
|
|
96
|
+
and opt not in RUNNER_OPTS\
|
|
97
|
+
and opt not in RUNNER_GLOBAL_OPTS:
|
|
98
|
+
continue
|
|
99
|
+
|
|
100
|
+
if opt_key_map.get(opt) == OPT_NOT_SUPPORTED:
|
|
101
|
+
continue
|
|
102
|
+
|
|
103
|
+
# Get opt prefix
|
|
104
|
+
prefix = None
|
|
105
|
+
if opt in cls.opts:
|
|
106
|
+
prefix = cls.__name__
|
|
107
|
+
elif opt in cls.meta_opts:
|
|
108
|
+
# TODO: Add options categories
|
|
109
|
+
# category = get_command_category(cls)
|
|
110
|
+
# prefix = category
|
|
111
|
+
prefix = 'Meta'
|
|
112
|
+
elif opt in RUNNER_OPTS:
|
|
113
|
+
prefix = 'Output'
|
|
114
|
+
elif opt in RUNNER_GLOBAL_OPTS:
|
|
115
|
+
prefix = 'Execution'
|
|
116
|
+
|
|
117
|
+
# Check if opt already processed before
|
|
118
|
+
opt = opt.replace('_', '-')
|
|
119
|
+
if opt in opt_cache:
|
|
120
|
+
continue
|
|
121
|
+
|
|
122
|
+
# Build help
|
|
123
|
+
conf = opt_conf.copy()
|
|
124
|
+
conf['show_default'] = True
|
|
125
|
+
conf['prefix'] = prefix
|
|
126
|
+
all_opts[opt] = conf
|
|
127
|
+
opt_cache.append(opt)
|
|
128
|
+
|
|
129
|
+
return all_opts
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def decorate_command_options(opts):
|
|
133
|
+
"""Add click.option decorator to decorate click command.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
opts (dict): Dict of command options.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
function: Decorator.
|
|
140
|
+
"""
|
|
141
|
+
def decorator(f):
|
|
142
|
+
reversed_opts = OrderedDict(list(opts.items())[::-1])
|
|
143
|
+
for opt_name, opt_conf in reversed_opts.items():
|
|
144
|
+
conf = opt_conf.copy()
|
|
145
|
+
short = conf.pop('short', None)
|
|
146
|
+
conf.pop('prefix', None)
|
|
147
|
+
long = f'--{opt_name}'
|
|
148
|
+
short = f'-{short}' if short else f'-{opt_name}'
|
|
149
|
+
f = click.option(long, short, **conf)(f)
|
|
150
|
+
return f
|
|
151
|
+
return decorator
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def task():
|
|
155
|
+
def decorator(cls):
|
|
156
|
+
cls.__task__ = True
|
|
157
|
+
return cls
|
|
158
|
+
return decorator
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def register_runner(cli_endpoint, config):
|
|
162
|
+
fmt_opts = {
|
|
163
|
+
'print_cmd': True,
|
|
164
|
+
}
|
|
165
|
+
short_help = ''
|
|
166
|
+
input_type = 'targets'
|
|
167
|
+
input_required = True
|
|
168
|
+
runner_cls = None
|
|
169
|
+
tasks = []
|
|
170
|
+
no_args_is_help = True
|
|
171
|
+
|
|
172
|
+
if cli_endpoint.name == 'scan':
|
|
173
|
+
# TODO: this should be refactored to scan.get_tasks_from_conf() or scan.tasks
|
|
174
|
+
from secator.cli import ALL_CONFIGS
|
|
175
|
+
tasks = [
|
|
176
|
+
get_command_cls(task)
|
|
177
|
+
for workflow in ALL_CONFIGS.workflow
|
|
178
|
+
for task in Task.get_tasks_from_conf(workflow.tasks)
|
|
179
|
+
if workflow.name in list(config.workflows.keys())
|
|
180
|
+
]
|
|
181
|
+
input_type = 'targets'
|
|
182
|
+
name = config.name
|
|
183
|
+
short_help = config.description or ''
|
|
184
|
+
if config.alias:
|
|
185
|
+
short_help += f' [dim]alias: {config.alias}'
|
|
186
|
+
fmt_opts['print_start'] = True
|
|
187
|
+
fmt_opts['print_run_summary'] = True
|
|
188
|
+
runner_cls = Scan
|
|
189
|
+
|
|
190
|
+
elif cli_endpoint.name == 'workflow':
|
|
191
|
+
# TODO: this should be refactored to workflow.get_tasks_from_conf() or workflow.tasks
|
|
192
|
+
tasks = [
|
|
193
|
+
get_command_cls(task) for task in Task.get_tasks_from_conf(config.tasks)
|
|
194
|
+
]
|
|
195
|
+
input_type = 'targets'
|
|
196
|
+
name = config.name
|
|
197
|
+
short_help = config.description or ''
|
|
198
|
+
if config.alias:
|
|
199
|
+
short_help = f'{short_help:<55} [dim](alias)[/][bold cyan] {config.alias}'
|
|
200
|
+
fmt_opts['print_start'] = True
|
|
201
|
+
fmt_opts['print_run_summary'] = True
|
|
202
|
+
runner_cls = Workflow
|
|
203
|
+
|
|
204
|
+
elif cli_endpoint.name == 'task':
|
|
205
|
+
tasks = [
|
|
206
|
+
get_command_cls(config.name)
|
|
207
|
+
]
|
|
208
|
+
task_cls = Task.get_task_class(config.name)
|
|
209
|
+
task_category = get_command_category(task_cls)
|
|
210
|
+
input_type = task_cls.input_type or 'targets'
|
|
211
|
+
name = config.name
|
|
212
|
+
short_help = f'[magenta]{task_category:<15}[/]{task_cls.__doc__}'
|
|
213
|
+
fmt_opts['print_item_count'] = True
|
|
214
|
+
runner_cls = Task
|
|
215
|
+
no_args_is_help = False
|
|
216
|
+
input_required = False
|
|
217
|
+
|
|
218
|
+
options = get_command_options(*tasks)
|
|
219
|
+
|
|
220
|
+
# TODO: maybe allow this in the future
|
|
221
|
+
# def get_unknown_opts(ctx):
|
|
222
|
+
# return {
|
|
223
|
+
# (ctx.args[i][2:]
|
|
224
|
+
# if str(ctx.args[i]).startswith("--") \
|
|
225
|
+
# else ctx.args[i][1:]): ctx.args[i+1]
|
|
226
|
+
# for i in range(0, len(ctx.args), 2)
|
|
227
|
+
# }
|
|
228
|
+
|
|
229
|
+
@click.argument(input_type, required=input_required)
|
|
230
|
+
@decorate_command_options(options)
|
|
231
|
+
@click.pass_context
|
|
232
|
+
def func(ctx, **opts):
|
|
233
|
+
opts.update(fmt_opts)
|
|
234
|
+
sync = opts['sync']
|
|
235
|
+
worker = opts['worker']
|
|
236
|
+
# debug = opts['debug']
|
|
237
|
+
ws = opts.pop('workspace')
|
|
238
|
+
driver = opts.pop('driver', '')
|
|
239
|
+
show = opts['show']
|
|
240
|
+
context = {'workspace_name': ws}
|
|
241
|
+
# TODO: maybe allow this in the future
|
|
242
|
+
# unknown_opts = get_unknown_opts(ctx)
|
|
243
|
+
# opts.update(unknown_opts)
|
|
244
|
+
targets = opts.pop(input_type)
|
|
245
|
+
targets = expand_input(targets)
|
|
246
|
+
if sync or show:
|
|
247
|
+
sync = True
|
|
248
|
+
elif worker:
|
|
249
|
+
sync = False
|
|
250
|
+
else: # automatically run in worker if it's alive
|
|
251
|
+
sync = not is_celery_worker_alive()
|
|
252
|
+
opts['sync'] = sync
|
|
253
|
+
opts.update({
|
|
254
|
+
'print_item': not sync,
|
|
255
|
+
'print_line': sync,
|
|
256
|
+
'print_remote_status': not sync,
|
|
257
|
+
'print_start': not sync
|
|
258
|
+
})
|
|
259
|
+
|
|
260
|
+
# Build hooks from driver name
|
|
261
|
+
hooks = {}
|
|
262
|
+
if driver == 'mongodb':
|
|
263
|
+
from secator.hooks.mongodb import MONGODB_HOOKS
|
|
264
|
+
hooks = MONGODB_HOOKS
|
|
265
|
+
|
|
266
|
+
# Build exporters
|
|
267
|
+
runner = runner_cls(config, targets, run_opts=opts, hooks=hooks, context=context)
|
|
268
|
+
runner.run()
|
|
269
|
+
|
|
270
|
+
settings = {'ignore_unknown_options': False, 'allow_extra_args': False}
|
|
271
|
+
cli_endpoint.command(
|
|
272
|
+
name=config.name,
|
|
273
|
+
context_settings=settings,
|
|
274
|
+
no_args_is_help=no_args_is_help,
|
|
275
|
+
short_help=short_help)(func)
|
|
276
|
+
|
|
277
|
+
generate_rich_click_opt_groups(cli_endpoint, name, input_type, options)
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def generate_rich_click_opt_groups(cli_endpoint, name, input_type, options):
|
|
281
|
+
sortorder = {
|
|
282
|
+
'Execution': 0,
|
|
283
|
+
'Output': 1,
|
|
284
|
+
'Meta': 2,
|
|
285
|
+
}
|
|
286
|
+
prefixes = deduplicate([opt['prefix'] for opt in options.values()])
|
|
287
|
+
prefixes = sorted(prefixes, key=lambda x: sortorder.get(x, 3))
|
|
288
|
+
opt_group = [
|
|
289
|
+
{
|
|
290
|
+
'name': 'Targets',
|
|
291
|
+
'options': [input_type],
|
|
292
|
+
},
|
|
293
|
+
]
|
|
294
|
+
for prefix in prefixes:
|
|
295
|
+
prefix_opts = [
|
|
296
|
+
opt for opt, conf in options.items()
|
|
297
|
+
if conf['prefix'] == prefix
|
|
298
|
+
]
|
|
299
|
+
opt_names = [f'--{opt_name}' for opt_name in prefix_opts]
|
|
300
|
+
if prefix == 'Execution':
|
|
301
|
+
opt_names.append('--help')
|
|
302
|
+
opt_group.append({
|
|
303
|
+
'name': prefix + ' options',
|
|
304
|
+
'options': opt_names
|
|
305
|
+
})
|
|
306
|
+
aliases = [cli_endpoint.name, *cli_endpoint.aliases]
|
|
307
|
+
for alias in aliases:
|
|
308
|
+
endpoint_name = f'secator {alias} {name}'
|
|
309
|
+
click.rich_click.OPTION_GROUPS[endpoint_name] = opt_group
|
secator/definitions.py
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
from pkg_resources import get_distribution
|
|
6
|
+
from dotenv import load_dotenv, find_dotenv
|
|
7
|
+
load_dotenv(find_dotenv(usecwd=True), override=False)
|
|
8
|
+
|
|
9
|
+
# Globals
|
|
10
|
+
VERSION = get_distribution('secator').version
|
|
11
|
+
ASCII = f"""
|
|
12
|
+
__
|
|
13
|
+
________ _________ _/ /_____ _____
|
|
14
|
+
/ ___/ _ \/ ___/ __ `/ __/ __ \/ ___/
|
|
15
|
+
(__ / __/ /__/ /_/ / /_/ /_/ / /
|
|
16
|
+
/____/\___/\___/\__,_/\__/\____/_/ v{VERSION}
|
|
17
|
+
|
|
18
|
+
freelabz.com
|
|
19
|
+
""" # noqa: W605,W291
|
|
20
|
+
|
|
21
|
+
# Secator folders
|
|
22
|
+
ROOT_FOLDER = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
23
|
+
CONFIGS_FOLDER = ROOT_FOLDER + '/secator/configs'
|
|
24
|
+
EXTRA_CONFIGS_FOLDER = os.environ.get('SECATOR_EXTRA_CONFIGS_FOLDER')
|
|
25
|
+
DATA_FOLDER = os.environ.get('SECATOR_DATA_FOLDER', f'{os.path.expanduser("~")}/.secator')
|
|
26
|
+
TASKS_FOLDER = os.environ.get('SECATOR_TASKS_FOLDER', f'{DATA_FOLDER}/tasks')
|
|
27
|
+
REPORTS_FOLDER = os.environ.get('SECATOR_REPORTS_FOLDER', f'{DATA_FOLDER}/reports')
|
|
28
|
+
WORDLISTS_FOLDER = os.environ.get('SECATOR_WORDLISTS_FOLDER', '/usr/share/seclists')
|
|
29
|
+
SCRIPTS_FOLDER = f'{ROOT_FOLDER}/scripts'
|
|
30
|
+
CVES_FOLDER = f'{DATA_FOLDER}/cves'
|
|
31
|
+
PAYLOADS_FOLDER = f'{DATA_FOLDER}/payloads'
|
|
32
|
+
REVSHELLS_FOLDER = f'{DATA_FOLDER}/revshells'
|
|
33
|
+
os.makedirs(DATA_FOLDER, exist_ok=True)
|
|
34
|
+
os.makedirs(TASKS_FOLDER, exist_ok=True)
|
|
35
|
+
os.makedirs(REPORTS_FOLDER, exist_ok=True)
|
|
36
|
+
os.makedirs(WORDLISTS_FOLDER, exist_ok=True)
|
|
37
|
+
os.makedirs(SCRIPTS_FOLDER, exist_ok=True)
|
|
38
|
+
os.makedirs(CVES_FOLDER, exist_ok=True)
|
|
39
|
+
os.makedirs(PAYLOADS_FOLDER, exist_ok=True)
|
|
40
|
+
os.makedirs(REVSHELLS_FOLDER, exist_ok=True)
|
|
41
|
+
|
|
42
|
+
# Celery local fs folders
|
|
43
|
+
CELERY_DATA_FOLDER = f'{DATA_FOLDER}/celery/data'
|
|
44
|
+
CELERY_RESULTS_FOLDER = f'{DATA_FOLDER}/celery/results'
|
|
45
|
+
os.makedirs(CELERY_DATA_FOLDER, exist_ok=True)
|
|
46
|
+
os.makedirs(CELERY_RESULTS_FOLDER, exist_ok=True)
|
|
47
|
+
|
|
48
|
+
# Environment variables
|
|
49
|
+
DEBUG = int(os.environ.get('DEBUG', '0'))
|
|
50
|
+
DEBUG_COMPONENT = os.environ.get('DEBUG_COMPONENT', '').split(',')
|
|
51
|
+
RECORD = bool(int(os.environ.get('RECORD', 0)))
|
|
52
|
+
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'filesystem://')
|
|
53
|
+
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', f'file://{CELERY_RESULTS_FOLDER}')
|
|
54
|
+
CELERY_BROKER_POOL_LIMIT = int(os.environ.get('CELERY_BROKER_POOL_LIMIT', 10))
|
|
55
|
+
CELERY_BROKER_CONNECTION_TIMEOUT = float(os.environ.get('CELERY_BROKER_CONNECTION_TIMEOUT', 4.0))
|
|
56
|
+
CELERY_BROKER_VISIBILITY_TIMEOUT = int(os.environ.get('CELERY_BROKER_VISIBILITY_TIMEOUT', 3600))
|
|
57
|
+
CELERY_OVERRIDE_DEFAULT_LOGGING = bool(int(os.environ.get('CELERY_OVERRIDE_DEFAULT_LOGGING', 1)))
|
|
58
|
+
GOOGLE_DRIVE_PARENT_FOLDER_ID = os.environ.get('GOOGLE_DRIVE_PARENT_FOLDER_ID')
|
|
59
|
+
GOOGLE_CREDENTIALS_PATH = os.environ.get('GOOGLE_CREDENTIALS_PATH')
|
|
60
|
+
|
|
61
|
+
# Defaults HTTP and Proxy settings
|
|
62
|
+
DEFAULT_SOCKS5_PROXY = os.environ.get('SOCKS5_PROXY', "socks5://127.0.0.1:9050")
|
|
63
|
+
DEFAULT_HTTP_PROXY = os.environ.get('HTTP_PROXY', "https://127.0.0.1:9080")
|
|
64
|
+
DEFAULT_STORE_HTTP_RESPONSES = bool(int(os.environ.get('STORE_HTTP_RESPONSES', 1)))
|
|
65
|
+
DEFAULT_PROXYCHAINS_COMMAND = "proxychains"
|
|
66
|
+
DEFAULT_FREEPROXY_TIMEOUT = 1 # seconds
|
|
67
|
+
|
|
68
|
+
# Default worker settings
|
|
69
|
+
DEFAULT_INPUT_CHUNK_SIZE = int(os.environ.get('DEFAULT_INPUT_CHUNK_SIZE', 1000))
|
|
70
|
+
DEFAULT_STDIN_TIMEOUT = 1000 # seconds
|
|
71
|
+
|
|
72
|
+
# Default tasks settings
|
|
73
|
+
DEFAULT_HTTPX_FLAGS = os.environ.get('DEFAULT_HTTPX_FLAGS', '-td')
|
|
74
|
+
DEFAULT_KATANA_FLAGS = os.environ.get('DEFAULT_KATANA_FLAGS', '-jc -js-crawl -known-files all -or -ob')
|
|
75
|
+
DEFAULT_NUCLEI_FLAGS = os.environ.get('DEFAULT_NUCLEI_FLAGS', '-stats -sj -si 20 -hm -or')
|
|
76
|
+
DEFAULT_FEROXBUSTER_FLAGS = os.environ.get('DEFAULT_FEROXBUSTER_FLAGS', '--auto-bail --no-state')
|
|
77
|
+
DEFAULT_PROGRESS_UPDATE_FREQUENCY = 10
|
|
78
|
+
|
|
79
|
+
# Default wordlists
|
|
80
|
+
DEFAULT_HTTP_WORDLIST = os.environ.get('DEFAULT_HTTP_WORDLIST', f'{WORDLISTS_FOLDER}/Fuzzing/fuzz-Bo0oM.txt')
|
|
81
|
+
DEFAULT_DNS_WORDLIST = os.environ.get('DEFAULT_DNS_WORDLIST', f'{WORDLISTS_FOLDER}/Discovery/DNS/combined_subdomains.txt') # noqa:E501
|
|
82
|
+
|
|
83
|
+
# Constants
|
|
84
|
+
OPT_NOT_SUPPORTED = -1
|
|
85
|
+
OPT_PIPE_INPUT = -1
|
|
86
|
+
|
|
87
|
+
# Vocab
|
|
88
|
+
ALIVE = 'alive'
|
|
89
|
+
AUTO_CALIBRATION = 'auto_calibration'
|
|
90
|
+
COOKIES = 'cookies'
|
|
91
|
+
CONTENT_TYPE = 'content_type'
|
|
92
|
+
CONTENT_LENGTH = 'content_length'
|
|
93
|
+
CIDR_RANGE = 'cidr_range'
|
|
94
|
+
CPES = 'cpes'
|
|
95
|
+
CVES = 'cves'
|
|
96
|
+
DELAY = 'delay'
|
|
97
|
+
DOMAIN = 'domain'
|
|
98
|
+
DEPTH = 'depth'
|
|
99
|
+
EXTRA_DATA = 'extra_data'
|
|
100
|
+
EMAIL = 'email'
|
|
101
|
+
FAILED_HTTP_STATUS = -1
|
|
102
|
+
FILTER_CODES = 'filter_codes'
|
|
103
|
+
FILTER_WORDS = 'filter_words'
|
|
104
|
+
FOLLOW_REDIRECT = 'follow_redirect'
|
|
105
|
+
FILTER_REGEX = 'filter_regex'
|
|
106
|
+
FILTER_SIZE = 'filter_size'
|
|
107
|
+
HEADER = 'header'
|
|
108
|
+
HOST = 'host'
|
|
109
|
+
INPUT = 'input'
|
|
110
|
+
IP = 'ip'
|
|
111
|
+
JSON = 'json'
|
|
112
|
+
LINES = 'lines'
|
|
113
|
+
METHOD = 'method'
|
|
114
|
+
MATCH_CODES = 'match_codes'
|
|
115
|
+
MATCH_REGEX = 'match_regex'
|
|
116
|
+
MATCH_SIZE = 'match_size'
|
|
117
|
+
MATCH_WORDS = 'match_words'
|
|
118
|
+
OUTPUT_PATH = 'output_path'
|
|
119
|
+
PATH = 'path'
|
|
120
|
+
PAYLOAD = 'payload'
|
|
121
|
+
PERCENT = 'percent'
|
|
122
|
+
PROBE = 'probe'
|
|
123
|
+
PORTS = 'ports'
|
|
124
|
+
PORT = 'port'
|
|
125
|
+
PROXY = 'proxy'
|
|
126
|
+
QUIET = 'quiet'
|
|
127
|
+
RATE_LIMIT = 'rate_limit'
|
|
128
|
+
RETRIES = 'retries'
|
|
129
|
+
TAGS = 'tags'
|
|
130
|
+
THREADS = 'threads'
|
|
131
|
+
TIME = 'time'
|
|
132
|
+
TIMEOUT = 'timeout'
|
|
133
|
+
TOP_PORTS = 'top_ports'
|
|
134
|
+
TYPE = 'type'
|
|
135
|
+
URL = 'url'
|
|
136
|
+
USER_AGENT = 'user_agent'
|
|
137
|
+
USERNAME = 'username'
|
|
138
|
+
SCREENSHOT_PATH = 'screenshot_path'
|
|
139
|
+
STORED_RESPONSE_PATH = 'stored_response_path'
|
|
140
|
+
SCRIPT = 'script'
|
|
141
|
+
SERVICE_NAME = 'service_name'
|
|
142
|
+
SOURCES = 'sources'
|
|
143
|
+
STATE = 'state'
|
|
144
|
+
STATUS_CODE = 'status_code'
|
|
145
|
+
SUBDOMAIN = 'subdomain'
|
|
146
|
+
TECH = 'tech'
|
|
147
|
+
TITLE = 'title'
|
|
148
|
+
SITE_NAME = 'site_name'
|
|
149
|
+
SERVICE_NAME = 'service_name'
|
|
150
|
+
VULN = 'vulnerability'
|
|
151
|
+
CONFIDENCE = 'confidence'
|
|
152
|
+
CVSS_SCORE = 'cvss_score'
|
|
153
|
+
DESCRIPTION = 'description'
|
|
154
|
+
ID = 'id'
|
|
155
|
+
MATCHED_AT = 'matched_at'
|
|
156
|
+
NAME = 'name'
|
|
157
|
+
PROVIDER = 'provider'
|
|
158
|
+
REFERENCE = 'reference'
|
|
159
|
+
REFERENCES = 'references'
|
|
160
|
+
SEVERITY = 'severity'
|
|
161
|
+
TAGS = 'tags'
|
|
162
|
+
VULN_TYPE = 'type'
|
|
163
|
+
WEBSERVER = 'webserver'
|
|
164
|
+
WORDLIST = 'wordlist'
|
|
165
|
+
WORDS = 'words'
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
__all__ = [
|
|
2
|
+
'CsvExporter',
|
|
3
|
+
'GdriveExporter',
|
|
4
|
+
'JsonExporter',
|
|
5
|
+
'TableExporter',
|
|
6
|
+
'TxtExporter'
|
|
7
|
+
]
|
|
8
|
+
from secator.exporters.csv import CsvExporter
|
|
9
|
+
from secator.exporters.gdrive import GdriveExporter
|
|
10
|
+
from secator.exporters.json import JsonExporter
|
|
11
|
+
from secator.exporters.table import TableExporter
|
|
12
|
+
from secator.exporters.txt import TxtExporter
|
secator/exporters/csv.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import csv as _csv
|
|
2
|
+
|
|
3
|
+
from secator.exporters._base import Exporter
|
|
4
|
+
from secator.rich import console
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class CsvExporter(Exporter):
|
|
8
|
+
def send(self):
|
|
9
|
+
title = self.report.data['info']['title']
|
|
10
|
+
results = self.report.data['results']
|
|
11
|
+
csv_paths = []
|
|
12
|
+
|
|
13
|
+
for output_type, items in results.items():
|
|
14
|
+
items = [i.toDict() for i in items]
|
|
15
|
+
if not items:
|
|
16
|
+
continue
|
|
17
|
+
keys = list(items[0].keys())
|
|
18
|
+
csv_path = f'{self.report.output_folder}/{title}_{output_type}_{self.report.timestamp}.csv'
|
|
19
|
+
csv_paths.append(csv_path)
|
|
20
|
+
with open(csv_path, 'w', newline='') as output_file:
|
|
21
|
+
dict_writer = _csv.DictWriter(output_file, keys)
|
|
22
|
+
dict_writer.writeheader()
|
|
23
|
+
dict_writer.writerows(items)
|
|
24
|
+
|
|
25
|
+
if len(csv_paths) == 1:
|
|
26
|
+
csv_paths_str = csv_paths[0]
|
|
27
|
+
else:
|
|
28
|
+
csv_paths_str = '\n • ' + '\n • '.join(csv_paths)
|
|
29
|
+
|
|
30
|
+
console.print(f':file_cabinet: Saved CSV reports to {csv_paths_str}')
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import csv
|
|
3
|
+
import yaml
|
|
4
|
+
|
|
5
|
+
from secator.definitions import GOOGLE_CREDENTIALS_PATH, GOOGLE_DRIVE_PARENT_FOLDER_ID
|
|
6
|
+
from secator.exporters._base import Exporter
|
|
7
|
+
from secator.rich import console
|
|
8
|
+
from secator.utils import pluralize
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class GdriveExporter(Exporter):
|
|
12
|
+
def send(self):
|
|
13
|
+
import gspread
|
|
14
|
+
ws = self.report.workspace_name
|
|
15
|
+
info = self.report.data['info']
|
|
16
|
+
title = self.report.data['info']['title']
|
|
17
|
+
sheet_title = f'{self.report.data["info"]["title"]}_{self.report.timestamp}'
|
|
18
|
+
results = self.report.data['results']
|
|
19
|
+
if not GOOGLE_CREDENTIALS_PATH:
|
|
20
|
+
console.print(':file_cabinet: Missing GOOGLE_CREDENTIALS_PATH to save to Google Sheets', style='red')
|
|
21
|
+
return
|
|
22
|
+
if not GOOGLE_DRIVE_PARENT_FOLDER_ID:
|
|
23
|
+
console.print(':file_cabinet: Missing GOOGLE_DRIVE_PARENT_FOLDER_ID to save to Google Sheets.', style='red')
|
|
24
|
+
return
|
|
25
|
+
client = gspread.service_account(GOOGLE_CREDENTIALS_PATH)
|
|
26
|
+
|
|
27
|
+
# Create workspace folder if it doesn't exist
|
|
28
|
+
folder_id = self.get_folder_by_name(ws, parent_id=GOOGLE_DRIVE_PARENT_FOLDER_ID)
|
|
29
|
+
if ws and not folder_id:
|
|
30
|
+
folder_id = self.create_folder(
|
|
31
|
+
folder_name=ws,
|
|
32
|
+
parent_id=GOOGLE_DRIVE_PARENT_FOLDER_ID)
|
|
33
|
+
|
|
34
|
+
# Create worksheet
|
|
35
|
+
sheet = client.create(title, folder_id=folder_id)
|
|
36
|
+
|
|
37
|
+
# Add options worksheet for input data
|
|
38
|
+
info = self.report.data['info']
|
|
39
|
+
info['targets'] = '\n'.join(info['targets'])
|
|
40
|
+
info['opts'] = yaml.dump(info['opts'])
|
|
41
|
+
keys = [k.replace('_', ' ').upper() for k in list(info.keys())]
|
|
42
|
+
ws = sheet.add_worksheet('OPTIONS', rows=2, cols=len(keys))
|
|
43
|
+
sheet.values_update(
|
|
44
|
+
ws.title,
|
|
45
|
+
params={'valueInputOption': 'USER_ENTERED'},
|
|
46
|
+
body={'values': [keys, list(info.values())]}
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Add one worksheet per output type
|
|
50
|
+
for output_type, items in results.items():
|
|
51
|
+
items = [i.toDict() for i in items]
|
|
52
|
+
if not items:
|
|
53
|
+
continue
|
|
54
|
+
keys = [
|
|
55
|
+
k.replace('_', ' ').upper()
|
|
56
|
+
for k in list(items[0].keys())
|
|
57
|
+
]
|
|
58
|
+
csv_path = f'{self.report.output_folder}/{title}_{output_type}_{self.report.timestamp}.csv'
|
|
59
|
+
if not os.path.exists(csv_path):
|
|
60
|
+
console.print(
|
|
61
|
+
f'Unable to find CSV at {csv_path}. For Google sheets reports, please enable CSV reports as well.')
|
|
62
|
+
return
|
|
63
|
+
sheet_title = pluralize(output_type).upper()
|
|
64
|
+
ws = sheet.add_worksheet(sheet_title, rows=len(items), cols=len(keys))
|
|
65
|
+
with open(csv_path, 'r') as f:
|
|
66
|
+
data = csv.reader(f)
|
|
67
|
+
data = list(data)
|
|
68
|
+
data[0] = [
|
|
69
|
+
k.replace('_', ' ').upper()
|
|
70
|
+
for k in data[0]
|
|
71
|
+
]
|
|
72
|
+
sheet.values_update(
|
|
73
|
+
ws.title,
|
|
74
|
+
params={'valueInputOption': 'USER_ENTERED'},
|
|
75
|
+
body={'values': data}
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
# Delete 'default' worksheet
|
|
79
|
+
ws = sheet.get_worksheet(0)
|
|
80
|
+
sheet.del_worksheet(ws)
|
|
81
|
+
|
|
82
|
+
console.print(f':file_cabinet: Saved Google Sheets reports to [u magenta]{sheet.url}[/]')
|
|
83
|
+
|
|
84
|
+
def create_folder(self, folder_name, parent_id=None):
|
|
85
|
+
from googleapiclient.discovery import build
|
|
86
|
+
from google.oauth2 import service_account
|
|
87
|
+
creds = service_account.Credentials.from_service_account_file(GOOGLE_CREDENTIALS_PATH)
|
|
88
|
+
service = build('drive', 'v3', credentials=creds)
|
|
89
|
+
body = {
|
|
90
|
+
'name': folder_name,
|
|
91
|
+
'mimeType': "application/vnd.google-apps.folder"
|
|
92
|
+
}
|
|
93
|
+
if parent_id:
|
|
94
|
+
body['parents'] = [parent_id]
|
|
95
|
+
folder = service.files().create(body=body, fields='id').execute()
|
|
96
|
+
return folder['id']
|
|
97
|
+
|
|
98
|
+
def list_folders(self, parent_id):
|
|
99
|
+
from googleapiclient.discovery import build
|
|
100
|
+
from google.oauth2 import service_account
|
|
101
|
+
creds = service_account.Credentials.from_service_account_file(GOOGLE_CREDENTIALS_PATH)
|
|
102
|
+
service = build('drive', 'v3', credentials=creds)
|
|
103
|
+
driveid = service.files().get(fileId='root').execute()['id']
|
|
104
|
+
response = service.files().list(
|
|
105
|
+
q=f"'{parent_id}' in parents and mimeType='application/vnd.google-apps.folder'",
|
|
106
|
+
driveId=driveid,
|
|
107
|
+
corpora='drive',
|
|
108
|
+
includeItemsFromAllDrives=True,
|
|
109
|
+
supportsAllDrives=True
|
|
110
|
+
).execute()
|
|
111
|
+
return response
|
|
112
|
+
|
|
113
|
+
def get_folder_by_name(self, name, parent_id=None):
|
|
114
|
+
response = self.list_folders(parent_id=parent_id)
|
|
115
|
+
existing = [i for i in response['files'] if i['name'] == name]
|
|
116
|
+
if existing:
|
|
117
|
+
return existing[0]['id']
|
|
118
|
+
return None
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from secator.exporters._base import Exporter
|
|
2
|
+
from secator.rich import console
|
|
3
|
+
from secator.serializers.dataclass import dumps_dataclass
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class JsonExporter(Exporter):
|
|
7
|
+
def send(self):
|
|
8
|
+
title = self.report.data['info']['title']
|
|
9
|
+
json_path = f'{self.report.output_folder}/{title}_{self.report.timestamp}.json'
|
|
10
|
+
|
|
11
|
+
# Save JSON report to file
|
|
12
|
+
with open(json_path, 'w') as f:
|
|
13
|
+
f.write(dumps_dataclass(self.report.data, indent=2))
|
|
14
|
+
|
|
15
|
+
console.print(f':file_cabinet: Saved JSON report to {json_path}')
|
secator/exporters/txt.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from secator.exporters._base import Exporter
|
|
2
|
+
from secator.rich import console
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class TxtExporter(Exporter):
|
|
6
|
+
def send(self):
|
|
7
|
+
title = self.report.data['info']['title']
|
|
8
|
+
results = self.report.data['results']
|
|
9
|
+
txt_paths = []
|
|
10
|
+
|
|
11
|
+
for output_type, items in results.items():
|
|
12
|
+
items = [str(i) for i in items]
|
|
13
|
+
if not items:
|
|
14
|
+
continue
|
|
15
|
+
txt_path = f'{self.report.output_folder}/{title}_{output_type}_{self.report.timestamp}.txt'
|
|
16
|
+
with open(txt_path, 'w') as f:
|
|
17
|
+
f.write('\n'.join(items))
|
|
18
|
+
txt_paths.append(txt_path)
|
|
19
|
+
|
|
20
|
+
if len(txt_paths) == 1:
|
|
21
|
+
txt_paths_str = txt_paths[0]
|
|
22
|
+
else:
|
|
23
|
+
txt_paths_str = '\n • ' + '\n • '.join(txt_paths)
|
|
24
|
+
|
|
25
|
+
console.print(f':file_cabinet: Saved TXT reports to {txt_paths_str}')
|
|
File without changes
|