secator 0.1.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +421 -0
- secator/cli.py +927 -0
- secator/config.py +137 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +7 -0
- secator/configs/profiles/default.yaml +9 -0
- secator/configs/profiles/stealth.yaml +7 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +18 -0
- secator/configs/scans/host.yaml +14 -0
- secator/configs/scans/network.yaml +17 -0
- secator/configs/scans/subdomain.yaml +8 -0
- secator/configs/scans/url.yaml +12 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +28 -0
- secator/configs/workflows/code_scan.yaml +11 -0
- secator/configs/workflows/host_recon.yaml +41 -0
- secator/configs/workflows/port_scan.yaml +34 -0
- secator/configs/workflows/subdomain_recon.yaml +33 -0
- secator/configs/workflows/url_crawl.yaml +29 -0
- secator/configs/workflows/url_dirsearch.yaml +29 -0
- secator/configs/workflows/url_fuzz.yaml +35 -0
- secator/configs/workflows/url_nuclei.yaml +11 -0
- secator/configs/workflows/url_vuln.yaml +55 -0
- secator/configs/workflows/user_hunt.yaml +10 -0
- secator/configs/workflows/wordpress.yaml +14 -0
- secator/decorators.py +346 -0
- secator/definitions.py +183 -0
- secator/exporters/__init__.py +12 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/csv.py +29 -0
- secator/exporters/gdrive.py +118 -0
- secator/exporters/json.py +14 -0
- secator/exporters/table.py +7 -0
- secator/exporters/txt.py +24 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/mongodb.py +212 -0
- secator/output_types/__init__.py +24 -0
- secator/output_types/_base.py +95 -0
- secator/output_types/exploit.py +50 -0
- secator/output_types/ip.py +33 -0
- secator/output_types/port.py +45 -0
- secator/output_types/progress.py +35 -0
- secator/output_types/record.py +34 -0
- secator/output_types/subdomain.py +42 -0
- secator/output_types/tag.py +46 -0
- secator/output_types/target.py +30 -0
- secator/output_types/url.py +76 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +97 -0
- secator/report.py +95 -0
- secator/rich.py +123 -0
- secator/runners/__init__.py +12 -0
- secator/runners/_base.py +873 -0
- secator/runners/_helpers.py +154 -0
- secator/runners/command.py +674 -0
- secator/runners/scan.py +67 -0
- secator/runners/task.py +107 -0
- secator/runners/workflow.py +137 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +33 -0
- secator/serializers/json.py +15 -0
- secator/serializers/regex.py +17 -0
- secator/tasks/__init__.py +10 -0
- secator/tasks/_categories.py +304 -0
- secator/tasks/cariddi.py +102 -0
- secator/tasks/dalfox.py +66 -0
- secator/tasks/dirsearch.py +88 -0
- secator/tasks/dnsx.py +56 -0
- secator/tasks/dnsxbrute.py +34 -0
- secator/tasks/feroxbuster.py +89 -0
- secator/tasks/ffuf.py +85 -0
- secator/tasks/fping.py +44 -0
- secator/tasks/gau.py +43 -0
- secator/tasks/gf.py +34 -0
- secator/tasks/gospider.py +71 -0
- secator/tasks/grype.py +78 -0
- secator/tasks/h8mail.py +80 -0
- secator/tasks/httpx.py +104 -0
- secator/tasks/katana.py +128 -0
- secator/tasks/maigret.py +78 -0
- secator/tasks/mapcidr.py +32 -0
- secator/tasks/msfconsole.py +176 -0
- secator/tasks/naabu.py +52 -0
- secator/tasks/nmap.py +341 -0
- secator/tasks/nuclei.py +97 -0
- secator/tasks/searchsploit.py +53 -0
- secator/tasks/subfinder.py +40 -0
- secator/tasks/wpscan.py +177 -0
- secator/utils.py +404 -0
- secator/utils_test.py +183 -0
- secator-0.1.0.dist-info/METADATA +379 -0
- secator-0.1.0.dist-info/RECORD +99 -0
- secator-0.1.0.dist-info/WHEEL +5 -0
- secator-0.1.0.dist-info/entry_points.txt +2 -0
- secator-0.1.0.dist-info/licenses/LICENSE +60 -0
secator/decorators.py
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from collections import OrderedDict
|
|
3
|
+
|
|
4
|
+
import rich_click as click
|
|
5
|
+
from rich_click.rich_click import _get_rich_console
|
|
6
|
+
from rich_click.rich_group import RichGroup
|
|
7
|
+
|
|
8
|
+
from secator.definitions import (MONGODB_ADDON_ENABLED, OPT_NOT_SUPPORTED,
|
|
9
|
+
WORKER_ADDON_ENABLED)
|
|
10
|
+
from secator.runners import Scan, Task, Workflow
|
|
11
|
+
from secator.utils import (deduplicate, expand_input, get_command_category,
|
|
12
|
+
get_command_cls)
|
|
13
|
+
|
|
14
|
+
RUNNER_OPTS = {
|
|
15
|
+
'output': {'type': str, 'default': '', 'help': 'Output options (-o table,json,csv,gdrive)', 'short': 'o'},
|
|
16
|
+
'workspace': {'type': str, 'default': 'default', 'help': 'Workspace', 'short': 'ws'},
|
|
17
|
+
'json': {'is_flag': True, 'default': False, 'help': 'Enable JSON mode'},
|
|
18
|
+
'orig': {'is_flag': True, 'default': False, 'help': 'Enable original output (no schema conversion)'},
|
|
19
|
+
'raw': {'is_flag': True, 'default': False, 'help': 'Enable text output for piping to other tools'},
|
|
20
|
+
'show': {'is_flag': True, 'default': False, 'help': 'Show command that will be run (tasks only)'},
|
|
21
|
+
'format': {'default': '', 'short': 'fmt', 'help': 'Output formatting string'},
|
|
22
|
+
# 'filter': {'default': '', 'short': 'f', 'help': 'Results filter', 'short': 'of'}, # TODO add this
|
|
23
|
+
'quiet': {'is_flag': True, 'default': False, 'help': 'Enable quiet mode'},
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
RUNNER_GLOBAL_OPTS = {
|
|
27
|
+
'sync': {'is_flag': True, 'help': 'Run tasks synchronously (automatic if no worker is alive)'},
|
|
28
|
+
'worker': {'is_flag': True, 'help': 'Run tasks in worker (automatic if worker is alive)'},
|
|
29
|
+
'proxy': {'type': str, 'help': 'HTTP proxy'},
|
|
30
|
+
'driver': {'type': str, 'help': 'Export real-time results. E.g: "mongodb"'}
|
|
31
|
+
# 'debug': {'type': int, 'default': 0, 'help': 'Debug mode'},
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
DEFAULT_CLI_OPTIONS = list(RUNNER_OPTS.keys()) + list(RUNNER_GLOBAL_OPTS.keys())
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class OrderedGroup(RichGroup):
|
|
38
|
+
def __init__(self, name=None, commands=None, **attrs):
|
|
39
|
+
super(OrderedGroup, self).__init__(name, commands, **attrs)
|
|
40
|
+
self.commands = commands or OrderedDict()
|
|
41
|
+
|
|
42
|
+
def command(self, *args, **kwargs):
|
|
43
|
+
"""Behaves the same as `click.Group.command()` but supports aliases.
|
|
44
|
+
"""
|
|
45
|
+
def decorator(f):
|
|
46
|
+
aliases = kwargs.pop("aliases", None)
|
|
47
|
+
if aliases:
|
|
48
|
+
max_width = _get_rich_console().width
|
|
49
|
+
aliases_str = ', '.join(f'[bold cyan]{alias}[/]' for alias in aliases)
|
|
50
|
+
padding = max_width // 4
|
|
51
|
+
|
|
52
|
+
name = kwargs.pop("name", None)
|
|
53
|
+
if not name:
|
|
54
|
+
raise click.UsageError("`name` command argument is required when using aliases.")
|
|
55
|
+
|
|
56
|
+
f.__doc__ = f.__doc__ or 'N/A'
|
|
57
|
+
f.__doc__ = f'{f.__doc__:<{padding}}[dim](aliases)[/] {aliases_str}'
|
|
58
|
+
base_command = super(OrderedGroup, self).command(
|
|
59
|
+
name, *args, **kwargs
|
|
60
|
+
)(f)
|
|
61
|
+
for alias in aliases:
|
|
62
|
+
cmd = super(OrderedGroup, self).command(alias, *args, hidden=True, **kwargs)(f)
|
|
63
|
+
cmd.help = f"Alias for '{name}'.\n\n{cmd.help}"
|
|
64
|
+
cmd.params = base_command.params
|
|
65
|
+
|
|
66
|
+
else:
|
|
67
|
+
cmd = super(OrderedGroup, self).command(*args, **kwargs)(f)
|
|
68
|
+
|
|
69
|
+
return cmd
|
|
70
|
+
return decorator
|
|
71
|
+
|
|
72
|
+
def group(self, *args, **kwargs):
|
|
73
|
+
"""Behaves the same as `click.Group.group()` but supports aliases.
|
|
74
|
+
"""
|
|
75
|
+
def decorator(f):
|
|
76
|
+
aliases = kwargs.pop('aliases', [])
|
|
77
|
+
aliased_group = []
|
|
78
|
+
if aliases:
|
|
79
|
+
max_width = _get_rich_console().width
|
|
80
|
+
aliases_str = ', '.join(f'[bold cyan]{alias}[/]' for alias in aliases)
|
|
81
|
+
padding = max_width // 4
|
|
82
|
+
f.__doc__ = f.__doc__ or 'N/A'
|
|
83
|
+
f.__doc__ = f'{f.__doc__:<{padding}}[dim](aliases)[/] {aliases_str}'
|
|
84
|
+
for alias in aliases:
|
|
85
|
+
grp = super(OrderedGroup, self).group(
|
|
86
|
+
alias, *args, hidden=True, **kwargs)(f)
|
|
87
|
+
aliased_group.append(grp)
|
|
88
|
+
|
|
89
|
+
# create the main group
|
|
90
|
+
grp = super(OrderedGroup, self).group(*args, **kwargs)(f)
|
|
91
|
+
grp.aliases = aliases
|
|
92
|
+
|
|
93
|
+
# for all of the aliased groups, share the main group commands
|
|
94
|
+
for aliased in aliased_group:
|
|
95
|
+
aliased.commands = grp.commands
|
|
96
|
+
|
|
97
|
+
return grp
|
|
98
|
+
return decorator
|
|
99
|
+
|
|
100
|
+
def list_commands(self, ctx):
|
|
101
|
+
return self.commands
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def get_command_options(*tasks):
|
|
105
|
+
"""Get unified list of command options from a list of secator tasks classes.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
tasks (list): List of secator command classes.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
list: List of deduplicated options.
|
|
112
|
+
"""
|
|
113
|
+
opt_cache = []
|
|
114
|
+
all_opts = OrderedDict({})
|
|
115
|
+
|
|
116
|
+
for cls in tasks:
|
|
117
|
+
opts = OrderedDict(RUNNER_GLOBAL_OPTS, **RUNNER_OPTS, **cls.meta_opts, **cls.opts)
|
|
118
|
+
for opt, opt_conf in opts.items():
|
|
119
|
+
|
|
120
|
+
# Get opt key map if any
|
|
121
|
+
opt_key_map = getattr(cls, 'opt_key_map', {})
|
|
122
|
+
|
|
123
|
+
# Opt is not supported by this task
|
|
124
|
+
if opt not in opt_key_map\
|
|
125
|
+
and opt not in cls.opts\
|
|
126
|
+
and opt not in RUNNER_OPTS\
|
|
127
|
+
and opt not in RUNNER_GLOBAL_OPTS:
|
|
128
|
+
continue
|
|
129
|
+
|
|
130
|
+
if opt_key_map.get(opt) == OPT_NOT_SUPPORTED:
|
|
131
|
+
continue
|
|
132
|
+
|
|
133
|
+
# Get opt prefix
|
|
134
|
+
prefix = None
|
|
135
|
+
if opt in cls.opts:
|
|
136
|
+
prefix = cls.__name__
|
|
137
|
+
elif opt in cls.meta_opts:
|
|
138
|
+
# TODO: Add options categories
|
|
139
|
+
# category = get_command_category(cls)
|
|
140
|
+
# prefix = category
|
|
141
|
+
prefix = 'Meta'
|
|
142
|
+
elif opt in RUNNER_OPTS:
|
|
143
|
+
prefix = 'Output'
|
|
144
|
+
elif opt in RUNNER_GLOBAL_OPTS:
|
|
145
|
+
prefix = 'Execution'
|
|
146
|
+
|
|
147
|
+
# Check if opt already processed before
|
|
148
|
+
opt = opt.replace('_', '-')
|
|
149
|
+
if opt in opt_cache:
|
|
150
|
+
continue
|
|
151
|
+
|
|
152
|
+
# Build help
|
|
153
|
+
conf = opt_conf.copy()
|
|
154
|
+
conf['show_default'] = True
|
|
155
|
+
conf['prefix'] = prefix
|
|
156
|
+
all_opts[opt] = conf
|
|
157
|
+
opt_cache.append(opt)
|
|
158
|
+
|
|
159
|
+
return all_opts
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def decorate_command_options(opts):
|
|
163
|
+
"""Add click.option decorator to decorate click command.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
opts (dict): Dict of command options.
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
function: Decorator.
|
|
170
|
+
"""
|
|
171
|
+
def decorator(f):
|
|
172
|
+
reversed_opts = OrderedDict(list(opts.items())[::-1])
|
|
173
|
+
for opt_name, opt_conf in reversed_opts.items():
|
|
174
|
+
conf = opt_conf.copy()
|
|
175
|
+
short = conf.pop('short', None)
|
|
176
|
+
conf.pop('internal', False)
|
|
177
|
+
conf.pop('prefix', None)
|
|
178
|
+
long = f'--{opt_name}'
|
|
179
|
+
short = f'-{short}' if short else f'-{opt_name}'
|
|
180
|
+
f = click.option(long, short, **conf)(f)
|
|
181
|
+
return f
|
|
182
|
+
return decorator
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def task():
|
|
186
|
+
def decorator(cls):
|
|
187
|
+
cls.__task__ = True
|
|
188
|
+
return cls
|
|
189
|
+
return decorator
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def register_runner(cli_endpoint, config):
|
|
193
|
+
fmt_opts = {
|
|
194
|
+
'print_cmd': True,
|
|
195
|
+
}
|
|
196
|
+
short_help = ''
|
|
197
|
+
input_type = 'targets'
|
|
198
|
+
input_required = True
|
|
199
|
+
runner_cls = None
|
|
200
|
+
tasks = []
|
|
201
|
+
no_args_is_help = True
|
|
202
|
+
|
|
203
|
+
if cli_endpoint.name == 'scan':
|
|
204
|
+
# TODO: this should be refactored to scan.get_tasks_from_conf() or scan.tasks
|
|
205
|
+
from secator.cli import ALL_CONFIGS
|
|
206
|
+
tasks = [
|
|
207
|
+
get_command_cls(task)
|
|
208
|
+
for workflow in ALL_CONFIGS.workflow
|
|
209
|
+
for task in Task.get_tasks_from_conf(workflow.tasks)
|
|
210
|
+
if workflow.name in list(config.workflows.keys())
|
|
211
|
+
]
|
|
212
|
+
input_type = 'targets'
|
|
213
|
+
name = config.name
|
|
214
|
+
short_help = config.description or ''
|
|
215
|
+
if config.alias:
|
|
216
|
+
short_help += f' [dim]alias: {config.alias}'
|
|
217
|
+
fmt_opts['print_start'] = True
|
|
218
|
+
fmt_opts['print_run_summary'] = True
|
|
219
|
+
fmt_opts['print_progress'] = False
|
|
220
|
+
runner_cls = Scan
|
|
221
|
+
|
|
222
|
+
elif cli_endpoint.name == 'workflow':
|
|
223
|
+
# TODO: this should be refactored to workflow.get_tasks_from_conf() or workflow.tasks
|
|
224
|
+
tasks = [
|
|
225
|
+
get_command_cls(task) for task in Task.get_tasks_from_conf(config.tasks)
|
|
226
|
+
]
|
|
227
|
+
input_type = 'targets'
|
|
228
|
+
name = config.name
|
|
229
|
+
short_help = config.description or ''
|
|
230
|
+
if config.alias:
|
|
231
|
+
short_help = f'{short_help:<55} [dim](alias)[/][bold cyan] {config.alias}'
|
|
232
|
+
fmt_opts['print_start'] = True
|
|
233
|
+
fmt_opts['print_run_summary'] = True
|
|
234
|
+
fmt_opts['print_progress'] = False
|
|
235
|
+
runner_cls = Workflow
|
|
236
|
+
|
|
237
|
+
elif cli_endpoint.name == 'task':
|
|
238
|
+
tasks = [
|
|
239
|
+
get_command_cls(config.name)
|
|
240
|
+
]
|
|
241
|
+
task_cls = Task.get_task_class(config.name)
|
|
242
|
+
task_category = get_command_category(task_cls)
|
|
243
|
+
input_type = task_cls.input_type or 'targets'
|
|
244
|
+
name = config.name
|
|
245
|
+
short_help = f'[magenta]{task_category:<15}[/]{task_cls.__doc__}'
|
|
246
|
+
fmt_opts['print_item_count'] = True
|
|
247
|
+
runner_cls = Task
|
|
248
|
+
no_args_is_help = False
|
|
249
|
+
input_required = False
|
|
250
|
+
|
|
251
|
+
options = get_command_options(*tasks)
|
|
252
|
+
|
|
253
|
+
# TODO: maybe allow this in the future
|
|
254
|
+
# def get_unknown_opts(ctx):
|
|
255
|
+
# return {
|
|
256
|
+
# (ctx.args[i][2:]
|
|
257
|
+
# if str(ctx.args[i]).startswith("--") \
|
|
258
|
+
# else ctx.args[i][1:]): ctx.args[i+1]
|
|
259
|
+
# for i in range(0, len(ctx.args), 2)
|
|
260
|
+
# }
|
|
261
|
+
|
|
262
|
+
@click.argument(input_type, required=input_required)
|
|
263
|
+
@decorate_command_options(options)
|
|
264
|
+
@click.pass_context
|
|
265
|
+
def func(ctx, **opts):
|
|
266
|
+
opts.update(fmt_opts)
|
|
267
|
+
sync = opts['sync']
|
|
268
|
+
worker = opts['worker']
|
|
269
|
+
# debug = opts['debug']
|
|
270
|
+
ws = opts.pop('workspace')
|
|
271
|
+
driver = opts.pop('driver', '')
|
|
272
|
+
show = opts['show']
|
|
273
|
+
context = {'workspace_name': ws}
|
|
274
|
+
# TODO: maybe allow this in the future
|
|
275
|
+
# unknown_opts = get_unknown_opts(ctx)
|
|
276
|
+
# opts.update(unknown_opts)
|
|
277
|
+
targets = opts.pop(input_type)
|
|
278
|
+
targets = expand_input(targets)
|
|
279
|
+
if sync or show or not WORKER_ADDON_ENABLED:
|
|
280
|
+
sync = True
|
|
281
|
+
elif worker:
|
|
282
|
+
sync = False
|
|
283
|
+
else: # automatically run in worker if it's alive
|
|
284
|
+
from secator.celery import is_celery_worker_alive
|
|
285
|
+
sync = not is_celery_worker_alive()
|
|
286
|
+
opts['sync'] = sync
|
|
287
|
+
opts.update({
|
|
288
|
+
'print_item': not sync,
|
|
289
|
+
'print_line': sync,
|
|
290
|
+
'print_remote_status': not sync,
|
|
291
|
+
'print_start': not sync
|
|
292
|
+
})
|
|
293
|
+
|
|
294
|
+
# Build hooks from driver name
|
|
295
|
+
hooks = {}
|
|
296
|
+
if driver == 'mongodb':
|
|
297
|
+
if not MONGODB_ADDON_ENABLED:
|
|
298
|
+
_get_rich_console().print('[bold red]Missing MongoDB dependencies: please run `secator install addons mongodb`[/].')
|
|
299
|
+
sys.exit(1)
|
|
300
|
+
from secator.hooks.mongodb import MONGODB_HOOKS
|
|
301
|
+
hooks = MONGODB_HOOKS
|
|
302
|
+
|
|
303
|
+
# Build exporters
|
|
304
|
+
runner = runner_cls(config, targets, run_opts=opts, hooks=hooks, context=context)
|
|
305
|
+
runner.run()
|
|
306
|
+
|
|
307
|
+
settings = {'ignore_unknown_options': False, 'allow_extra_args': False}
|
|
308
|
+
cli_endpoint.command(
|
|
309
|
+
name=config.name,
|
|
310
|
+
context_settings=settings,
|
|
311
|
+
no_args_is_help=no_args_is_help,
|
|
312
|
+
short_help=short_help)(func)
|
|
313
|
+
|
|
314
|
+
generate_rich_click_opt_groups(cli_endpoint, name, input_type, options)
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def generate_rich_click_opt_groups(cli_endpoint, name, input_type, options):
|
|
318
|
+
sortorder = {
|
|
319
|
+
'Execution': 0,
|
|
320
|
+
'Output': 1,
|
|
321
|
+
'Meta': 2,
|
|
322
|
+
}
|
|
323
|
+
prefixes = deduplicate([opt['prefix'] for opt in options.values()])
|
|
324
|
+
prefixes = sorted(prefixes, key=lambda x: sortorder.get(x, 3))
|
|
325
|
+
opt_group = [
|
|
326
|
+
{
|
|
327
|
+
'name': 'Targets',
|
|
328
|
+
'options': [input_type],
|
|
329
|
+
},
|
|
330
|
+
]
|
|
331
|
+
for prefix in prefixes:
|
|
332
|
+
prefix_opts = [
|
|
333
|
+
opt for opt, conf in options.items()
|
|
334
|
+
if conf['prefix'] == prefix
|
|
335
|
+
]
|
|
336
|
+
opt_names = [f'--{opt_name}' for opt_name in prefix_opts]
|
|
337
|
+
if prefix == 'Execution':
|
|
338
|
+
opt_names.append('--help')
|
|
339
|
+
opt_group.append({
|
|
340
|
+
'name': prefix + ' options',
|
|
341
|
+
'options': opt_names
|
|
342
|
+
})
|
|
343
|
+
aliases = [cli_endpoint.name, *cli_endpoint.aliases]
|
|
344
|
+
for alias in aliases:
|
|
345
|
+
endpoint_name = f'secator {alias} {name}'
|
|
346
|
+
click.rich_click.OPTION_GROUPS[endpoint_name] = opt_group
|
secator/definitions.py
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
from dotenv import find_dotenv, load_dotenv
|
|
6
|
+
from pkg_resources import get_distribution
|
|
7
|
+
|
|
8
|
+
load_dotenv(find_dotenv(usecwd=True), override=False)
|
|
9
|
+
|
|
10
|
+
# Globals
|
|
11
|
+
VERSION = get_distribution('secator').version
|
|
12
|
+
ASCII = f"""
|
|
13
|
+
__
|
|
14
|
+
________ _________ _/ /_____ _____
|
|
15
|
+
/ ___/ _ \/ ___/ __ `/ __/ __ \/ ___/
|
|
16
|
+
(__ / __/ /__/ /_/ / /_/ /_/ / /
|
|
17
|
+
/____/\___/\___/\__,_/\__/\____/_/ v{VERSION}
|
|
18
|
+
|
|
19
|
+
freelabz.com
|
|
20
|
+
""" # noqa: W605,W291
|
|
21
|
+
|
|
22
|
+
# Secator folders
|
|
23
|
+
ROOT_FOLDER = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
24
|
+
LIB_FOLDER = ROOT_FOLDER + '/secator'
|
|
25
|
+
CONFIGS_FOLDER = LIB_FOLDER + '/configs'
|
|
26
|
+
EXTRA_CONFIGS_FOLDER = os.environ.get('SECATOR_EXTRA_CONFIGS_FOLDER')
|
|
27
|
+
DATA_FOLDER = os.environ.get('SECATOR_DATA_FOLDER', f'{os.path.expanduser("~")}/.secator')
|
|
28
|
+
REPORTS_FOLDER = os.environ.get('SECATOR_REPORTS_FOLDER', f'{DATA_FOLDER}/reports')
|
|
29
|
+
WORDLISTS_FOLDER = os.environ.get('SECATOR_WORDLISTS_FOLDER', f'{DATA_FOLDER}/wordlists')
|
|
30
|
+
SCRIPTS_FOLDER = f'{ROOT_FOLDER}/scripts'
|
|
31
|
+
CVES_FOLDER = f'{DATA_FOLDER}/cves'
|
|
32
|
+
PAYLOADS_FOLDER = f'{DATA_FOLDER}/payloads'
|
|
33
|
+
REVSHELLS_FOLDER = f'{DATA_FOLDER}/revshells'
|
|
34
|
+
TESTS_FOLDER = f'{ROOT_FOLDER}/tests'
|
|
35
|
+
os.makedirs(DATA_FOLDER, exist_ok=True)
|
|
36
|
+
os.makedirs(REPORTS_FOLDER, exist_ok=True)
|
|
37
|
+
os.makedirs(WORDLISTS_FOLDER, exist_ok=True)
|
|
38
|
+
os.makedirs(SCRIPTS_FOLDER, exist_ok=True)
|
|
39
|
+
os.makedirs(CVES_FOLDER, exist_ok=True)
|
|
40
|
+
os.makedirs(PAYLOADS_FOLDER, exist_ok=True)
|
|
41
|
+
os.makedirs(REVSHELLS_FOLDER, exist_ok=True)
|
|
42
|
+
|
|
43
|
+
# Celery local fs folders
|
|
44
|
+
CELERY_DATA_FOLDER = f'{DATA_FOLDER}/celery/data'
|
|
45
|
+
CELERY_RESULTS_FOLDER = f'{DATA_FOLDER}/celery/results'
|
|
46
|
+
os.makedirs(CELERY_DATA_FOLDER, exist_ok=True)
|
|
47
|
+
os.makedirs(CELERY_RESULTS_FOLDER, exist_ok=True)
|
|
48
|
+
|
|
49
|
+
# Environment variables
|
|
50
|
+
DEBUG = int(os.environ.get('DEBUG', '0'))
|
|
51
|
+
DEBUG_COMPONENT = os.environ.get('DEBUG_COMPONENT', '').split(',')
|
|
52
|
+
RECORD = bool(int(os.environ.get('RECORD', 0)))
|
|
53
|
+
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'filesystem://')
|
|
54
|
+
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', f'file://{CELERY_RESULTS_FOLDER}')
|
|
55
|
+
CELERY_BROKER_POOL_LIMIT = int(os.environ.get('CELERY_BROKER_POOL_LIMIT', 10))
|
|
56
|
+
CELERY_BROKER_CONNECTION_TIMEOUT = float(os.environ.get('CELERY_BROKER_CONNECTION_TIMEOUT', 4.0))
|
|
57
|
+
CELERY_BROKER_VISIBILITY_TIMEOUT = int(os.environ.get('CELERY_BROKER_VISIBILITY_TIMEOUT', 3600))
|
|
58
|
+
CELERY_OVERRIDE_DEFAULT_LOGGING = bool(int(os.environ.get('CELERY_OVERRIDE_DEFAULT_LOGGING', 1)))
|
|
59
|
+
GOOGLE_DRIVE_PARENT_FOLDER_ID = os.environ.get('GOOGLE_DRIVE_PARENT_FOLDER_ID')
|
|
60
|
+
GOOGLE_CREDENTIALS_PATH = os.environ.get('GOOGLE_CREDENTIALS_PATH')
|
|
61
|
+
|
|
62
|
+
# Defaults HTTP and Proxy settings
|
|
63
|
+
DEFAULT_SOCKS5_PROXY = os.environ.get('SOCKS5_PROXY', "socks5://127.0.0.1:9050")
|
|
64
|
+
DEFAULT_HTTP_PROXY = os.environ.get('HTTP_PROXY', "https://127.0.0.1:9080")
|
|
65
|
+
DEFAULT_STORE_HTTP_RESPONSES = bool(int(os.environ.get('DEFAULT_STORE_HTTP_RESPONSES', 1)))
|
|
66
|
+
DEFAULT_PROXYCHAINS_COMMAND = "proxychains"
|
|
67
|
+
DEFAULT_FREEPROXY_TIMEOUT = 1 # seconds
|
|
68
|
+
|
|
69
|
+
# Default worker settings
|
|
70
|
+
DEFAULT_INPUT_CHUNK_SIZE = int(os.environ.get('DEFAULT_INPUT_CHUNK_SIZE', 1000))
|
|
71
|
+
DEFAULT_STDIN_TIMEOUT = 1000 # seconds
|
|
72
|
+
|
|
73
|
+
# Default tasks settings
|
|
74
|
+
DEFAULT_HTTPX_FLAGS = os.environ.get('DEFAULT_HTTPX_FLAGS', '-td')
|
|
75
|
+
DEFAULT_KATANA_FLAGS = os.environ.get('DEFAULT_KATANA_FLAGS', '-jc -js-crawl -known-files all -or -ob')
|
|
76
|
+
DEFAULT_NUCLEI_FLAGS = os.environ.get('DEFAULT_NUCLEI_FLAGS', '-stats -sj -si 20 -hm -or')
|
|
77
|
+
DEFAULT_FEROXBUSTER_FLAGS = os.environ.get('DEFAULT_FEROXBUSTER_FLAGS', '--auto-bail --no-state')
|
|
78
|
+
DEFAULT_PROGRESS_UPDATE_FREQUENCY = int(os.environ.get('DEFAULT_PROGRESS_UPDATE_FREQUENCY', 60))
|
|
79
|
+
DEFAULT_SKIP_CVE_SEARCH = bool(int(os.environ.get('DEFAULT_SKIP_CVE_SEARCH', 0)))
|
|
80
|
+
|
|
81
|
+
# Default wordlists
|
|
82
|
+
DEFAULT_HTTP_WORDLIST = os.environ.get('DEFAULT_HTTP_WORDLIST', f'{WORDLISTS_FOLDER}/Fuzzing/fuzz-Bo0oM.txt')
|
|
83
|
+
DEFAULT_DNS_WORDLIST = os.environ.get('DEFAULT_DNS_WORDLIST', f'{WORDLISTS_FOLDER}/Discovery/DNS/combined_subdomains.txt') # noqa:E501
|
|
84
|
+
|
|
85
|
+
# Constants
|
|
86
|
+
OPT_NOT_SUPPORTED = -1
|
|
87
|
+
OPT_PIPE_INPUT = -1
|
|
88
|
+
|
|
89
|
+
# Vocab
|
|
90
|
+
ALIVE = 'alive'
|
|
91
|
+
AUTO_CALIBRATION = 'auto_calibration'
|
|
92
|
+
CONTENT_TYPE = 'content_type'
|
|
93
|
+
CONTENT_LENGTH = 'content_length'
|
|
94
|
+
CIDR_RANGE = 'cidr_range'
|
|
95
|
+
CPES = 'cpes'
|
|
96
|
+
CVES = 'cves'
|
|
97
|
+
DELAY = 'delay'
|
|
98
|
+
DOMAIN = 'domain'
|
|
99
|
+
DEPTH = 'depth'
|
|
100
|
+
EXTRA_DATA = 'extra_data'
|
|
101
|
+
EMAIL = 'email'
|
|
102
|
+
FILTER_CODES = 'filter_codes'
|
|
103
|
+
FILTER_WORDS = 'filter_words'
|
|
104
|
+
FOLLOW_REDIRECT = 'follow_redirect'
|
|
105
|
+
FILTER_REGEX = 'filter_regex'
|
|
106
|
+
FILTER_SIZE = 'filter_size'
|
|
107
|
+
HEADER = 'header'
|
|
108
|
+
HOST = 'host'
|
|
109
|
+
IP = 'ip'
|
|
110
|
+
LINES = 'lines'
|
|
111
|
+
METHOD = 'method'
|
|
112
|
+
MATCH_CODES = 'match_codes'
|
|
113
|
+
MATCH_REGEX = 'match_regex'
|
|
114
|
+
MATCH_SIZE = 'match_size'
|
|
115
|
+
MATCH_WORDS = 'match_words'
|
|
116
|
+
OUTPUT_PATH = 'output_path'
|
|
117
|
+
PATH = 'path'
|
|
118
|
+
PERCENT = 'percent'
|
|
119
|
+
PORTS = 'ports'
|
|
120
|
+
PORT = 'port'
|
|
121
|
+
PROXY = 'proxy'
|
|
122
|
+
RATE_LIMIT = 'rate_limit'
|
|
123
|
+
RETRIES = 'retries'
|
|
124
|
+
TAGS = 'tags'
|
|
125
|
+
THREADS = 'threads'
|
|
126
|
+
TIME = 'time'
|
|
127
|
+
TIMEOUT = 'timeout'
|
|
128
|
+
TOP_PORTS = 'top_ports'
|
|
129
|
+
TYPE = 'type'
|
|
130
|
+
URL = 'url'
|
|
131
|
+
USER_AGENT = 'user_agent'
|
|
132
|
+
USERNAME = 'username'
|
|
133
|
+
STORED_RESPONSE_PATH = 'stored_response_path'
|
|
134
|
+
SCRIPT = 'script'
|
|
135
|
+
SERVICE_NAME = 'service_name'
|
|
136
|
+
SOURCES = 'sources'
|
|
137
|
+
STATE = 'state'
|
|
138
|
+
STATUS_CODE = 'status_code'
|
|
139
|
+
TECH = 'tech'
|
|
140
|
+
TITLE = 'title'
|
|
141
|
+
SITE_NAME = 'site_name'
|
|
142
|
+
SERVICE_NAME = 'service_name'
|
|
143
|
+
CONFIDENCE = 'confidence'
|
|
144
|
+
CVSS_SCORE = 'cvss_score'
|
|
145
|
+
DESCRIPTION = 'description'
|
|
146
|
+
ID = 'id'
|
|
147
|
+
MATCHED_AT = 'matched_at'
|
|
148
|
+
NAME = 'name'
|
|
149
|
+
PROVIDER = 'provider'
|
|
150
|
+
REFERENCE = 'reference'
|
|
151
|
+
REFERENCES = 'references'
|
|
152
|
+
SEVERITY = 'severity'
|
|
153
|
+
TAGS = 'tags'
|
|
154
|
+
WEBSERVER = 'webserver'
|
|
155
|
+
WORDLIST = 'wordlist'
|
|
156
|
+
WORDS = 'words'
|
|
157
|
+
|
|
158
|
+
# Check worker addon
|
|
159
|
+
try:
|
|
160
|
+
import eventlet # noqa: F401
|
|
161
|
+
WORKER_ADDON_ENABLED = 1
|
|
162
|
+
except ModuleNotFoundError:
|
|
163
|
+
WORKER_ADDON_ENABLED = 0
|
|
164
|
+
|
|
165
|
+
# Check mongodb addon
|
|
166
|
+
try:
|
|
167
|
+
import pymongo # noqa: F401
|
|
168
|
+
MONGODB_ADDON_ENABLED = 1
|
|
169
|
+
except ModuleNotFoundError:
|
|
170
|
+
MONGODB_ADDON_ENABLED = 0
|
|
171
|
+
|
|
172
|
+
# Check dev addon
|
|
173
|
+
try:
|
|
174
|
+
import flake8 # noqa: F401
|
|
175
|
+
DEV_ADDON_ENABLED = 1
|
|
176
|
+
except ModuleNotFoundError:
|
|
177
|
+
DEV_ADDON_ENABLED = 0
|
|
178
|
+
|
|
179
|
+
# Check dev package
|
|
180
|
+
if not os.path.exists(TESTS_FOLDER):
|
|
181
|
+
DEV_PACKAGE = 0
|
|
182
|
+
else:
|
|
183
|
+
DEV_PACKAGE = 1
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
__all__ = [
|
|
2
|
+
'CsvExporter',
|
|
3
|
+
'GdriveExporter',
|
|
4
|
+
'JsonExporter',
|
|
5
|
+
'TableExporter',
|
|
6
|
+
'TxtExporter'
|
|
7
|
+
]
|
|
8
|
+
from secator.exporters.csv import CsvExporter
|
|
9
|
+
from secator.exporters.gdrive import GdriveExporter
|
|
10
|
+
from secator.exporters.json import JsonExporter
|
|
11
|
+
from secator.exporters.table import TableExporter
|
|
12
|
+
from secator.exporters.txt import TxtExporter
|
secator/exporters/csv.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import csv as _csv
|
|
2
|
+
|
|
3
|
+
from secator.exporters._base import Exporter
|
|
4
|
+
from secator.rich import console
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class CsvExporter(Exporter):
|
|
8
|
+
def send(self):
|
|
9
|
+
results = self.report.data['results']
|
|
10
|
+
csv_paths = []
|
|
11
|
+
|
|
12
|
+
for output_type, items in results.items():
|
|
13
|
+
items = [i.toDict() for i in items]
|
|
14
|
+
if not items:
|
|
15
|
+
continue
|
|
16
|
+
keys = list(items[0].keys())
|
|
17
|
+
csv_path = f'{self.report.output_folder}/report_{output_type}.csv'
|
|
18
|
+
csv_paths.append(csv_path)
|
|
19
|
+
with open(csv_path, 'w', newline='') as output_file:
|
|
20
|
+
dict_writer = _csv.DictWriter(output_file, keys)
|
|
21
|
+
dict_writer.writeheader()
|
|
22
|
+
dict_writer.writerows(items)
|
|
23
|
+
|
|
24
|
+
if len(csv_paths) == 1:
|
|
25
|
+
csv_paths_str = csv_paths[0]
|
|
26
|
+
else:
|
|
27
|
+
csv_paths_str = '\n • ' + '\n • '.join(csv_paths)
|
|
28
|
+
|
|
29
|
+
console.print(f':file_cabinet: Saved CSV reports to {csv_paths_str}')
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import csv
|
|
3
|
+
import yaml
|
|
4
|
+
|
|
5
|
+
from secator.definitions import GOOGLE_CREDENTIALS_PATH, GOOGLE_DRIVE_PARENT_FOLDER_ID
|
|
6
|
+
from secator.exporters._base import Exporter
|
|
7
|
+
from secator.rich import console
|
|
8
|
+
from secator.utils import pluralize
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class GdriveExporter(Exporter):
|
|
12
|
+
def send(self):
|
|
13
|
+
import gspread
|
|
14
|
+
ws = self.report.workspace_name
|
|
15
|
+
info = self.report.data['info']
|
|
16
|
+
title = self.report.data['info']['title']
|
|
17
|
+
sheet_title = f'{self.report.data["info"]["title"]}_{self.report.timestamp}'
|
|
18
|
+
results = self.report.data['results']
|
|
19
|
+
if not GOOGLE_CREDENTIALS_PATH:
|
|
20
|
+
console.print(':file_cabinet: Missing GOOGLE_CREDENTIALS_PATH to save to Google Sheets', style='red')
|
|
21
|
+
return
|
|
22
|
+
if not GOOGLE_DRIVE_PARENT_FOLDER_ID:
|
|
23
|
+
console.print(':file_cabinet: Missing GOOGLE_DRIVE_PARENT_FOLDER_ID to save to Google Sheets.', style='red')
|
|
24
|
+
return
|
|
25
|
+
client = gspread.service_account(GOOGLE_CREDENTIALS_PATH)
|
|
26
|
+
|
|
27
|
+
# Create workspace folder if it doesn't exist
|
|
28
|
+
folder_id = self.get_folder_by_name(ws, parent_id=GOOGLE_DRIVE_PARENT_FOLDER_ID)
|
|
29
|
+
if ws and not folder_id:
|
|
30
|
+
folder_id = self.create_folder(
|
|
31
|
+
folder_name=ws,
|
|
32
|
+
parent_id=GOOGLE_DRIVE_PARENT_FOLDER_ID)
|
|
33
|
+
|
|
34
|
+
# Create worksheet
|
|
35
|
+
sheet = client.create(title, folder_id=folder_id)
|
|
36
|
+
|
|
37
|
+
# Add options worksheet for input data
|
|
38
|
+
info = self.report.data['info']
|
|
39
|
+
info['targets'] = '\n'.join(info['targets'])
|
|
40
|
+
info['opts'] = yaml.dump(info['opts'])
|
|
41
|
+
keys = [k.replace('_', ' ').upper() for k in list(info.keys())]
|
|
42
|
+
ws = sheet.add_worksheet('OPTIONS', rows=2, cols=len(keys))
|
|
43
|
+
sheet.values_update(
|
|
44
|
+
ws.title,
|
|
45
|
+
params={'valueInputOption': 'USER_ENTERED'},
|
|
46
|
+
body={'values': [keys, list(info.values())]}
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Add one worksheet per output type
|
|
50
|
+
for output_type, items in results.items():
|
|
51
|
+
items = [i.toDict() for i in items]
|
|
52
|
+
if not items:
|
|
53
|
+
continue
|
|
54
|
+
keys = [
|
|
55
|
+
k.replace('_', ' ').upper()
|
|
56
|
+
for k in list(items[0].keys())
|
|
57
|
+
]
|
|
58
|
+
csv_path = f'{self.report.output_folder}/report_{output_type}.csv'
|
|
59
|
+
if not os.path.exists(csv_path):
|
|
60
|
+
console.print(
|
|
61
|
+
f'Unable to find CSV at {csv_path}. For Google sheets reports, please enable CSV reports as well.')
|
|
62
|
+
return
|
|
63
|
+
sheet_title = pluralize(output_type).upper()
|
|
64
|
+
ws = sheet.add_worksheet(sheet_title, rows=len(items), cols=len(keys))
|
|
65
|
+
with open(csv_path, 'r') as f:
|
|
66
|
+
data = csv.reader(f)
|
|
67
|
+
data = list(data)
|
|
68
|
+
data[0] = [
|
|
69
|
+
k.replace('_', ' ').upper()
|
|
70
|
+
for k in data[0]
|
|
71
|
+
]
|
|
72
|
+
sheet.values_update(
|
|
73
|
+
ws.title,
|
|
74
|
+
params={'valueInputOption': 'USER_ENTERED'},
|
|
75
|
+
body={'values': data}
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
# Delete 'default' worksheet
|
|
79
|
+
ws = sheet.get_worksheet(0)
|
|
80
|
+
sheet.del_worksheet(ws)
|
|
81
|
+
|
|
82
|
+
console.print(f':file_cabinet: Saved Google Sheets reports to [u magenta]{sheet.url}[/]')
|
|
83
|
+
|
|
84
|
+
def create_folder(self, folder_name, parent_id=None):
|
|
85
|
+
from googleapiclient.discovery import build
|
|
86
|
+
from google.oauth2 import service_account
|
|
87
|
+
creds = service_account.Credentials.from_service_account_file(GOOGLE_CREDENTIALS_PATH)
|
|
88
|
+
service = build('drive', 'v3', credentials=creds)
|
|
89
|
+
body = {
|
|
90
|
+
'name': folder_name,
|
|
91
|
+
'mimeType': "application/vnd.google-apps.folder"
|
|
92
|
+
}
|
|
93
|
+
if parent_id:
|
|
94
|
+
body['parents'] = [parent_id]
|
|
95
|
+
folder = service.files().create(body=body, fields='id').execute()
|
|
96
|
+
return folder['id']
|
|
97
|
+
|
|
98
|
+
def list_folders(self, parent_id):
|
|
99
|
+
from googleapiclient.discovery import build
|
|
100
|
+
from google.oauth2 import service_account
|
|
101
|
+
creds = service_account.Credentials.from_service_account_file(GOOGLE_CREDENTIALS_PATH)
|
|
102
|
+
service = build('drive', 'v3', credentials=creds)
|
|
103
|
+
driveid = service.files().get(fileId='root').execute()['id']
|
|
104
|
+
response = service.files().list(
|
|
105
|
+
q=f"'{parent_id}' in parents and mimeType='application/vnd.google-apps.folder'",
|
|
106
|
+
driveId=driveid,
|
|
107
|
+
corpora='drive',
|
|
108
|
+
includeItemsFromAllDrives=True,
|
|
109
|
+
supportsAllDrives=True
|
|
110
|
+
).execute()
|
|
111
|
+
return response
|
|
112
|
+
|
|
113
|
+
def get_folder_by_name(self, name, parent_id=None):
|
|
114
|
+
response = self.list_folders(parent_id=parent_id)
|
|
115
|
+
existing = [i for i in response['files'] if i['name'] == name]
|
|
116
|
+
if existing:
|
|
117
|
+
return existing[0]['id']
|
|
118
|
+
return None
|