secator 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/__init__.py +0 -0
- secator/celery.py +482 -0
- secator/cli.py +617 -0
- secator/config.py +137 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +7 -0
- secator/configs/profiles/default.yaml +9 -0
- secator/configs/profiles/stealth.yaml +7 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +18 -0
- secator/configs/scans/host.yaml +14 -0
- secator/configs/scans/network.yaml +17 -0
- secator/configs/scans/subdomain.yaml +8 -0
- secator/configs/scans/url.yaml +12 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +28 -0
- secator/configs/workflows/code_scan.yaml +11 -0
- secator/configs/workflows/host_recon.yaml +41 -0
- secator/configs/workflows/port_scan.yaml +34 -0
- secator/configs/workflows/subdomain_recon.yaml +33 -0
- secator/configs/workflows/url_crawl.yaml +29 -0
- secator/configs/workflows/url_dirsearch.yaml +29 -0
- secator/configs/workflows/url_fuzz.yaml +35 -0
- secator/configs/workflows/url_nuclei.yaml +11 -0
- secator/configs/workflows/url_vuln.yaml +55 -0
- secator/configs/workflows/user_hunt.yaml +10 -0
- secator/configs/workflows/wordpress.yaml +14 -0
- secator/decorators.py +309 -0
- secator/definitions.py +165 -0
- secator/exporters/__init__.py +12 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/csv.py +30 -0
- secator/exporters/gdrive.py +118 -0
- secator/exporters/json.py +15 -0
- secator/exporters/table.py +7 -0
- secator/exporters/txt.py +25 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/mongodb.py +212 -0
- secator/output_types/__init__.py +24 -0
- secator/output_types/_base.py +95 -0
- secator/output_types/exploit.py +50 -0
- secator/output_types/ip.py +33 -0
- secator/output_types/port.py +45 -0
- secator/output_types/progress.py +35 -0
- secator/output_types/record.py +34 -0
- secator/output_types/subdomain.py +42 -0
- secator/output_types/tag.py +46 -0
- secator/output_types/target.py +30 -0
- secator/output_types/url.py +76 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +97 -0
- secator/report.py +107 -0
- secator/rich.py +124 -0
- secator/runners/__init__.py +12 -0
- secator/runners/_base.py +833 -0
- secator/runners/_helpers.py +153 -0
- secator/runners/command.py +638 -0
- secator/runners/scan.py +65 -0
- secator/runners/task.py +106 -0
- secator/runners/workflow.py +135 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +33 -0
- secator/serializers/json.py +15 -0
- secator/serializers/regex.py +17 -0
- secator/tasks/__init__.py +10 -0
- secator/tasks/_categories.py +304 -0
- secator/tasks/cariddi.py +102 -0
- secator/tasks/dalfox.py +65 -0
- secator/tasks/dirsearch.py +90 -0
- secator/tasks/dnsx.py +56 -0
- secator/tasks/dnsxbrute.py +34 -0
- secator/tasks/feroxbuster.py +91 -0
- secator/tasks/ffuf.py +86 -0
- secator/tasks/fping.py +44 -0
- secator/tasks/gau.py +47 -0
- secator/tasks/gf.py +33 -0
- secator/tasks/gospider.py +71 -0
- secator/tasks/grype.py +79 -0
- secator/tasks/h8mail.py +81 -0
- secator/tasks/httpx.py +99 -0
- secator/tasks/katana.py +133 -0
- secator/tasks/maigret.py +78 -0
- secator/tasks/mapcidr.py +32 -0
- secator/tasks/msfconsole.py +174 -0
- secator/tasks/naabu.py +52 -0
- secator/tasks/nmap.py +344 -0
- secator/tasks/nuclei.py +97 -0
- secator/tasks/searchsploit.py +52 -0
- secator/tasks/subfinder.py +40 -0
- secator/tasks/wpscan.py +179 -0
- secator/utils.py +445 -0
- secator/utils_test.py +183 -0
- secator-0.0.1.dist-info/LICENSE +60 -0
- secator-0.0.1.dist-info/METADATA +199 -0
- secator-0.0.1.dist-info/RECORD +114 -0
- secator-0.0.1.dist-info/WHEEL +5 -0
- secator-0.0.1.dist-info/entry_points.txt +2 -0
- secator-0.0.1.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/inputs.py +42 -0
- tests/integration/outputs.py +392 -0
- tests/integration/test_scans.py +82 -0
- tests/integration/test_tasks.py +103 -0
- tests/integration/test_workflows.py +163 -0
- tests/performance/__init__.py +0 -0
- tests/performance/loadtester.py +56 -0
- tests/unit/__init__.py +0 -0
- tests/unit/test_celery.py +39 -0
- tests/unit/test_scans.py +0 -0
- tests/unit/test_serializers.py +51 -0
- tests/unit/test_tasks.py +348 -0
- tests/unit/test_workflows.py +96 -0
|
@@ -0,0 +1,638 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
import shlex
|
|
5
|
+
import subprocess
|
|
6
|
+
import sys
|
|
7
|
+
|
|
8
|
+
from time import sleep
|
|
9
|
+
|
|
10
|
+
from celery.result import AsyncResult
|
|
11
|
+
from fp.fp import FreeProxy
|
|
12
|
+
|
|
13
|
+
from secator.config import ConfigLoader
|
|
14
|
+
from secator.definitions import (DEBUG, DEFAULT_HTTP_PROXY,
|
|
15
|
+
DEFAULT_FREEPROXY_TIMEOUT,
|
|
16
|
+
DEFAULT_PROXYCHAINS_COMMAND,
|
|
17
|
+
DEFAULT_SOCKS5_PROXY, OPT_NOT_SUPPORTED,
|
|
18
|
+
OPT_PIPE_INPUT, DATA_FOLDER, DEFAULT_INPUT_CHUNK_SIZE)
|
|
19
|
+
from secator.rich import console
|
|
20
|
+
from secator.runners import Runner
|
|
21
|
+
from secator.serializers import JSONSerializer
|
|
22
|
+
from secator.utils import get_file_timestamp, debug
|
|
23
|
+
|
|
24
|
+
# from rich.markup import escape
|
|
25
|
+
# from rich.text import Text
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Command(Runner):
|
|
32
|
+
"""Base class to execute an external command."""
|
|
33
|
+
# Base cmd
|
|
34
|
+
cmd = None
|
|
35
|
+
|
|
36
|
+
# Meta options
|
|
37
|
+
meta_opts = {}
|
|
38
|
+
|
|
39
|
+
# Additional command options
|
|
40
|
+
opts = {}
|
|
41
|
+
|
|
42
|
+
# Option prefix char
|
|
43
|
+
opt_prefix = '-'
|
|
44
|
+
|
|
45
|
+
# Option key map to transform option names
|
|
46
|
+
opt_key_map = {}
|
|
47
|
+
|
|
48
|
+
# Option value map to transform option values
|
|
49
|
+
opt_value_map = {}
|
|
50
|
+
|
|
51
|
+
# Output map to transform JSON output keys
|
|
52
|
+
output_map = {}
|
|
53
|
+
|
|
54
|
+
# Run in shell if True (not recommended)
|
|
55
|
+
shell = False
|
|
56
|
+
|
|
57
|
+
# Current working directory
|
|
58
|
+
cwd = None
|
|
59
|
+
|
|
60
|
+
# Output encoding
|
|
61
|
+
encoding = 'utf-8'
|
|
62
|
+
|
|
63
|
+
# Environment variables
|
|
64
|
+
env = {}
|
|
65
|
+
|
|
66
|
+
# Flag to take the input
|
|
67
|
+
input_flag = None
|
|
68
|
+
|
|
69
|
+
# Input path (if a file is constructed)
|
|
70
|
+
input_path = None
|
|
71
|
+
|
|
72
|
+
# Input chunk size (default None)
|
|
73
|
+
input_chunk_size = DEFAULT_INPUT_CHUNK_SIZE
|
|
74
|
+
|
|
75
|
+
# Flag to take a file as input
|
|
76
|
+
file_flag = None
|
|
77
|
+
|
|
78
|
+
# Flag to enable output JSON
|
|
79
|
+
json_flag = None
|
|
80
|
+
|
|
81
|
+
# Install command
|
|
82
|
+
install_cmd = None
|
|
83
|
+
|
|
84
|
+
# Serializer
|
|
85
|
+
item_loader = None
|
|
86
|
+
item_loaders = [JSONSerializer(),]
|
|
87
|
+
|
|
88
|
+
# Ignore return code
|
|
89
|
+
ignore_return_code = False
|
|
90
|
+
|
|
91
|
+
# Return code
|
|
92
|
+
return_code = -1
|
|
93
|
+
|
|
94
|
+
# Error
|
|
95
|
+
error = ''
|
|
96
|
+
|
|
97
|
+
# Output
|
|
98
|
+
output = ''
|
|
99
|
+
|
|
100
|
+
# Default run opts
|
|
101
|
+
default_run_opts = {}
|
|
102
|
+
|
|
103
|
+
# Proxy options
|
|
104
|
+
proxychains = False
|
|
105
|
+
proxy_socks5 = False
|
|
106
|
+
proxy_http = False
|
|
107
|
+
|
|
108
|
+
# Profile
|
|
109
|
+
profile = 'cpu'
|
|
110
|
+
|
|
111
|
+
def __init__(self, input=None, **run_opts):
|
|
112
|
+
# Build runnerconfig on-the-fly
|
|
113
|
+
config = ConfigLoader(input={
|
|
114
|
+
'name': self.__class__.__name__,
|
|
115
|
+
'type': 'task',
|
|
116
|
+
'description': run_opts.get('description', None)
|
|
117
|
+
})
|
|
118
|
+
|
|
119
|
+
# Run parent init
|
|
120
|
+
hooks = run_opts.pop('hooks', {})
|
|
121
|
+
results = run_opts.pop('results', [])
|
|
122
|
+
context = run_opts.pop('context', {})
|
|
123
|
+
super().__init__(
|
|
124
|
+
config=config,
|
|
125
|
+
targets=input,
|
|
126
|
+
results=results,
|
|
127
|
+
run_opts=run_opts,
|
|
128
|
+
hooks=hooks,
|
|
129
|
+
context=context)
|
|
130
|
+
|
|
131
|
+
# Current working directory for cmd
|
|
132
|
+
self.cwd = self.run_opts.get('cwd', None)
|
|
133
|
+
|
|
134
|
+
# No capturing of stdout / stderr.
|
|
135
|
+
self.no_capture = self.run_opts.get('no_capture', False)
|
|
136
|
+
|
|
137
|
+
# Proxy config (global)
|
|
138
|
+
self.proxy = self.run_opts.pop('proxy', False)
|
|
139
|
+
self.configure_proxy()
|
|
140
|
+
|
|
141
|
+
# Build command input
|
|
142
|
+
self._build_cmd_input()
|
|
143
|
+
|
|
144
|
+
# Build command
|
|
145
|
+
self._build_cmd()
|
|
146
|
+
|
|
147
|
+
# Build item loaders
|
|
148
|
+
instance_func = getattr(self, 'item_loader', None)
|
|
149
|
+
item_loaders = self.item_loaders.copy()
|
|
150
|
+
if instance_func:
|
|
151
|
+
item_loaders.append(instance_func)
|
|
152
|
+
self.item_loaders = item_loaders
|
|
153
|
+
|
|
154
|
+
# Print built cmd
|
|
155
|
+
if self.print_cmd and not self.has_children:
|
|
156
|
+
if self.sync and self.description:
|
|
157
|
+
self._print(f'\n:wrench: {self.description} ...', color='bold gold3', rich=True)
|
|
158
|
+
self._print(self.cmd, color='bold cyan', rich=True)
|
|
159
|
+
|
|
160
|
+
# Print built input
|
|
161
|
+
if self.print_input_file and self.input_path:
|
|
162
|
+
input_str = '\n '.join(self.input).strip()
|
|
163
|
+
debug(f'[dim magenta]File input:[/]\n [italic medium_turquoise]{input_str}[/]')
|
|
164
|
+
|
|
165
|
+
# Print run options
|
|
166
|
+
if self.print_run_opts:
|
|
167
|
+
input_str = '\n '.join([
|
|
168
|
+
f'[dim blue]{k}[/] -> [dim green]{v}[/]' for k, v in self.run_opts.items() if v is not None]).strip()
|
|
169
|
+
debug(f'[dim magenta]Run opts:[/]\n {input_str}')
|
|
170
|
+
|
|
171
|
+
# Print format options
|
|
172
|
+
if self.print_fmt_opts:
|
|
173
|
+
input_str = '\n '.join([
|
|
174
|
+
f'[dim blue]{k}[/] -> [dim green]{v}[/]' for k, v in self.opts_to_print.items() if v is not None]).strip()
|
|
175
|
+
debug(f'[dim magenta]Print opts:[/]\n {input_str}')
|
|
176
|
+
|
|
177
|
+
# Print hooks
|
|
178
|
+
if self.print_hooks:
|
|
179
|
+
input_str = ''
|
|
180
|
+
for hook_name, hook_funcs in self.hooks.items():
|
|
181
|
+
hook_funcs_str = ', '.join([f'[dim green]{h.__module__}.{h.__qualname__}[/]' for h in hook_funcs])
|
|
182
|
+
if hook_funcs:
|
|
183
|
+
input_str += f'[dim blue]{hook_name}[/] -> {hook_funcs_str}\n '
|
|
184
|
+
input_str = input_str.strip()
|
|
185
|
+
if input_str:
|
|
186
|
+
debug(f'[dim magenta]Hooks:[/]\n {input_str}')
|
|
187
|
+
|
|
188
|
+
def toDict(self):
|
|
189
|
+
res = super().toDict()
|
|
190
|
+
res.update({
|
|
191
|
+
'cmd': self.cmd,
|
|
192
|
+
'cwd': self.cwd,
|
|
193
|
+
'return_code': self.return_code
|
|
194
|
+
})
|
|
195
|
+
return res
|
|
196
|
+
|
|
197
|
+
@classmethod
|
|
198
|
+
def delay(cls, *args, **kwargs):
|
|
199
|
+
# TODO: Move this to TaskBase
|
|
200
|
+
from secator.celery import run_command
|
|
201
|
+
results = kwargs.get('results', [])
|
|
202
|
+
name = cls.__name__
|
|
203
|
+
return run_command.apply_async(args=[results, name] + list(args), kwargs={'opts': kwargs}, queue=cls.profile)
|
|
204
|
+
|
|
205
|
+
@classmethod
|
|
206
|
+
def s(cls, *args, **kwargs):
|
|
207
|
+
# TODO: Move this to TaskBase
|
|
208
|
+
from secator.celery import run_command
|
|
209
|
+
return run_command.s(cls.__name__, *args, opts=kwargs).set(queue=cls.profile)
|
|
210
|
+
|
|
211
|
+
@classmethod
|
|
212
|
+
def si(cls, results, *args, **kwargs):
|
|
213
|
+
# TODO: Move this to TaskBase
|
|
214
|
+
from secator.celery import run_command
|
|
215
|
+
return run_command.si(results, cls.__name__, *args, opts=kwargs).set(queue=cls.profile)
|
|
216
|
+
|
|
217
|
+
@classmethod
|
|
218
|
+
def poll(cls, result):
|
|
219
|
+
# TODO: Move this to TaskBase
|
|
220
|
+
while not result.ready():
|
|
221
|
+
data = AsyncResult(result.id).info
|
|
222
|
+
if DEBUG > 1 and isinstance(data, dict):
|
|
223
|
+
print(data)
|
|
224
|
+
sleep(1)
|
|
225
|
+
return result.get()
|
|
226
|
+
|
|
227
|
+
def get_opt_value(self, opt_name):
|
|
228
|
+
return Command._get_opt_value(
|
|
229
|
+
self.run_opts,
|
|
230
|
+
opt_name,
|
|
231
|
+
dict(self.opts, **self.meta_opts),
|
|
232
|
+
opt_prefix=self.config.name)
|
|
233
|
+
|
|
234
|
+
@classmethod
|
|
235
|
+
def get_supported_opts(cls):
|
|
236
|
+
def convert(d):
|
|
237
|
+
for k, v in d.items():
|
|
238
|
+
if hasattr(v, '__name__') and v.__name__ in ['str', 'int', 'float']:
|
|
239
|
+
d[k] = v.__name__
|
|
240
|
+
return d
|
|
241
|
+
|
|
242
|
+
opts = {k: convert(v) for k, v in cls.opts.items()}
|
|
243
|
+
for k, v in opts.items():
|
|
244
|
+
v['meta'] = cls.__name__
|
|
245
|
+
v['supported'] = True
|
|
246
|
+
|
|
247
|
+
meta_opts = {k: convert(v) for k, v in cls.meta_opts.items() if cls.opt_key_map.get(k) is not OPT_NOT_SUPPORTED}
|
|
248
|
+
for k, v in meta_opts.items():
|
|
249
|
+
v['meta'] = 'meta'
|
|
250
|
+
if cls.opt_key_map.get(k) is OPT_NOT_SUPPORTED:
|
|
251
|
+
v['supported'] = False
|
|
252
|
+
else:
|
|
253
|
+
v['supported'] = True
|
|
254
|
+
opts = dict(opts)
|
|
255
|
+
opts.update(meta_opts)
|
|
256
|
+
return opts
|
|
257
|
+
|
|
258
|
+
#---------------#
|
|
259
|
+
# Class methods #
|
|
260
|
+
#---------------#
|
|
261
|
+
|
|
262
|
+
@classmethod
|
|
263
|
+
def install(cls):
|
|
264
|
+
"""Install command by running the content of cls.install_cmd."""
|
|
265
|
+
console.log(f':pill: Installing {cls.__name__}...', style='bold yellow')
|
|
266
|
+
if not cls.install_cmd:
|
|
267
|
+
console.log(f'{cls.__name__} install is not supported yet. Please install it manually.', style='bold red')
|
|
268
|
+
return
|
|
269
|
+
ret = cls.run_command(
|
|
270
|
+
cls.install_cmd,
|
|
271
|
+
name=cls.__name__,
|
|
272
|
+
print_cmd=True,
|
|
273
|
+
print_line=True,
|
|
274
|
+
cls_attributes={'shell': True}
|
|
275
|
+
)
|
|
276
|
+
if ret.return_code != 0:
|
|
277
|
+
console.log(f'Failed to install {cls.__name__}.', style='bold red')
|
|
278
|
+
else:
|
|
279
|
+
console.log(f'{cls.__name__} installed successfully !', style='bold green')
|
|
280
|
+
return ret
|
|
281
|
+
|
|
282
|
+
@classmethod
|
|
283
|
+
def run_command(cls, cmd, name='helperClass', cls_attributes={}, **kwargs):
|
|
284
|
+
"""Run adhoc command. Can be used without defining an inherited class to run a command, while still enjoying
|
|
285
|
+
all the good stuff in this class.
|
|
286
|
+
"""
|
|
287
|
+
cmd_instance = type(name, (Command,), {'cmd': cmd})(**kwargs)
|
|
288
|
+
for k, v in cls_attributes.items():
|
|
289
|
+
setattr(cmd_instance, k, v)
|
|
290
|
+
cmd_instance.print_line = not kwargs.get('quiet', False)
|
|
291
|
+
cmd_instance.print_item = not kwargs.get('quiet', False)
|
|
292
|
+
cmd_instance.run()
|
|
293
|
+
return cmd_instance
|
|
294
|
+
|
|
295
|
+
def configure_proxy(self):
|
|
296
|
+
"""Configure proxy. Start with global settings like 'proxychains' or 'random', or fallback to tool-specific
|
|
297
|
+
proxy settings.
|
|
298
|
+
|
|
299
|
+
TODO: Move this to a subclass of Command, or to a configurable attribute to pass to derived classes as it's not
|
|
300
|
+
related to core functionality.
|
|
301
|
+
"""
|
|
302
|
+
opt_key_map = self.opt_key_map
|
|
303
|
+
proxy_opt = opt_key_map.get('proxy', False)
|
|
304
|
+
support_proxy_opt = proxy_opt and proxy_opt != OPT_NOT_SUPPORTED
|
|
305
|
+
proxychains_flavor = getattr(self, 'proxychains_flavor', DEFAULT_PROXYCHAINS_COMMAND)
|
|
306
|
+
proxy = False
|
|
307
|
+
|
|
308
|
+
if self.proxy in ['auto', 'proxychains'] and self.proxychains:
|
|
309
|
+
self.cmd = f'{proxychains_flavor} {self.cmd}'
|
|
310
|
+
proxy = 'proxychains'
|
|
311
|
+
|
|
312
|
+
elif self.proxy and support_proxy_opt:
|
|
313
|
+
if self.proxy in ['auto', 'socks5'] and self.proxy_socks5 and DEFAULT_SOCKS5_PROXY:
|
|
314
|
+
proxy = DEFAULT_SOCKS5_PROXY
|
|
315
|
+
elif self.proxy in ['auto', 'http'] and self.proxy_http and DEFAULT_HTTP_PROXY:
|
|
316
|
+
proxy = DEFAULT_HTTP_PROXY
|
|
317
|
+
elif self.proxy == 'random':
|
|
318
|
+
proxy = FreeProxy(timeout=DEFAULT_FREEPROXY_TIMEOUT, rand=True, anonym=True).get()
|
|
319
|
+
elif self.proxy.startswith(('http://', 'socks5://')):
|
|
320
|
+
proxy = self.proxy
|
|
321
|
+
|
|
322
|
+
if proxy != 'proxychains':
|
|
323
|
+
self.run_opts['proxy'] = proxy
|
|
324
|
+
|
|
325
|
+
if proxy != 'proxychains' and self.proxy and not proxy:
|
|
326
|
+
self._print(
|
|
327
|
+
f'[bold red]Ignoring proxy "{self.proxy}" for {self.__class__.__name__} (not supported).[/]', rich=True)
|
|
328
|
+
|
|
329
|
+
#----------#
|
|
330
|
+
# Internal #
|
|
331
|
+
#----------#
|
|
332
|
+
def yielder(self):
|
|
333
|
+
"""Run command and yields its output in real-time. Also saves the command line, return code and output to the
|
|
334
|
+
database.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
cmd (str): Command to run.
|
|
338
|
+
cwd (str, Optional): Working directory to run from.
|
|
339
|
+
shell (bool, Optional): Run command in a shell.
|
|
340
|
+
history_file (str): History file path.
|
|
341
|
+
mapper_func (Callable, Optional): Function to map output before yielding.
|
|
342
|
+
encoding (str, Optional): Output encoding.
|
|
343
|
+
ctx (dict, Optional): Scan context.
|
|
344
|
+
|
|
345
|
+
Yields:
|
|
346
|
+
str: Command stdout / stderr.
|
|
347
|
+
dict: Parsed JSONLine object.
|
|
348
|
+
"""
|
|
349
|
+
# Set status to 'RUNNING'
|
|
350
|
+
self.status = 'RUNNING'
|
|
351
|
+
|
|
352
|
+
# Callback before running command
|
|
353
|
+
self.run_hooks('on_start')
|
|
354
|
+
|
|
355
|
+
# Prepare cmds
|
|
356
|
+
command = self.cmd if self.shell else shlex.split(self.cmd)
|
|
357
|
+
|
|
358
|
+
# Output and results
|
|
359
|
+
self.return_code = 0
|
|
360
|
+
self.killed = False
|
|
361
|
+
|
|
362
|
+
# Run the command using subprocess
|
|
363
|
+
try:
|
|
364
|
+
env = os.environ
|
|
365
|
+
env.update(self.env)
|
|
366
|
+
process = subprocess.Popen(
|
|
367
|
+
command,
|
|
368
|
+
stdout=sys.stdout if self.no_capture else subprocess.PIPE,
|
|
369
|
+
stderr=sys.stderr if self.no_capture else subprocess.STDOUT,
|
|
370
|
+
universal_newlines=True,
|
|
371
|
+
shell=self.shell,
|
|
372
|
+
env=env,
|
|
373
|
+
cwd=self.cwd)
|
|
374
|
+
|
|
375
|
+
except FileNotFoundError as e:
|
|
376
|
+
if self.config.name in str(e):
|
|
377
|
+
error = 'Executable not found.'
|
|
378
|
+
if self.install_cmd:
|
|
379
|
+
error += f' Install it with `secator utils install {self.config.name}`.'
|
|
380
|
+
else:
|
|
381
|
+
error = str(e)
|
|
382
|
+
celery_id = self.context.get('celery_id', '')
|
|
383
|
+
if celery_id:
|
|
384
|
+
error += f' [{celery_id}]'
|
|
385
|
+
self.errors.append(error)
|
|
386
|
+
self.return_code = 1
|
|
387
|
+
if error:
|
|
388
|
+
self._print(error, color='bold red')
|
|
389
|
+
return
|
|
390
|
+
|
|
391
|
+
try:
|
|
392
|
+
# No capture mode, wait for command to finish and return
|
|
393
|
+
if self.no_capture:
|
|
394
|
+
self._wait_for_end(process)
|
|
395
|
+
return
|
|
396
|
+
|
|
397
|
+
# Process the output in real-time
|
|
398
|
+
for line in iter(lambda: process.stdout.readline(), b''):
|
|
399
|
+
sleep(0) # for async to give up control
|
|
400
|
+
if not line:
|
|
401
|
+
break
|
|
402
|
+
|
|
403
|
+
# Strip line
|
|
404
|
+
line = line.strip()
|
|
405
|
+
|
|
406
|
+
# Some commands output ANSI text, so we need to remove those ANSI chars
|
|
407
|
+
if self.encoding == 'ansi':
|
|
408
|
+
# ansi_regex = r'\x1b\[([0-9,A-Z]{1,2}(;[0-9]{1,2})?(;[0-9]{3})?)?[K]?'
|
|
409
|
+
# line = re.sub(ansi_regex, '', line.strip())
|
|
410
|
+
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
|
411
|
+
line = ansi_escape.sub('', line)
|
|
412
|
+
line = line.replace('\\x0d\\x0a', '\n')
|
|
413
|
+
|
|
414
|
+
# Run on_line hooks
|
|
415
|
+
line = self.run_hooks('on_line', line)
|
|
416
|
+
|
|
417
|
+
# Run item_loader to try parsing as dict
|
|
418
|
+
items = None
|
|
419
|
+
if self.output_json:
|
|
420
|
+
items = self.run_item_loaders(line)
|
|
421
|
+
|
|
422
|
+
# Yield line if no items parsed
|
|
423
|
+
if not items and not self.output_quiet:
|
|
424
|
+
yield line
|
|
425
|
+
|
|
426
|
+
# Turn results into list if not already a list
|
|
427
|
+
elif not isinstance(items, list):
|
|
428
|
+
items = [items]
|
|
429
|
+
|
|
430
|
+
# Yield items
|
|
431
|
+
if items:
|
|
432
|
+
yield from items
|
|
433
|
+
|
|
434
|
+
except KeyboardInterrupt:
|
|
435
|
+
process.kill()
|
|
436
|
+
self.killed = True
|
|
437
|
+
|
|
438
|
+
# Retrieve the return code and output
|
|
439
|
+
self._wait_for_end(process)
|
|
440
|
+
|
|
441
|
+
def run_item_loaders(self, line):
|
|
442
|
+
"""Run item loaders on a string."""
|
|
443
|
+
items = []
|
|
444
|
+
for item_loader in self.item_loaders:
|
|
445
|
+
result = None
|
|
446
|
+
if (callable(item_loader)):
|
|
447
|
+
result = item_loader(self, line)
|
|
448
|
+
elif item_loader:
|
|
449
|
+
result = item_loader.run(line)
|
|
450
|
+
if isinstance(result, dict):
|
|
451
|
+
result = [result]
|
|
452
|
+
if result:
|
|
453
|
+
items.extend(result)
|
|
454
|
+
return items
|
|
455
|
+
|
|
456
|
+
def _wait_for_end(self, process):
|
|
457
|
+
"""Wait for process to finish and process output and return code."""
|
|
458
|
+
process.wait()
|
|
459
|
+
self.return_code = process.returncode
|
|
460
|
+
|
|
461
|
+
if self.no_capture:
|
|
462
|
+
self.output = ''
|
|
463
|
+
else:
|
|
464
|
+
self.output = self.output.strip()
|
|
465
|
+
process.stdout.close()
|
|
466
|
+
|
|
467
|
+
if self.ignore_return_code:
|
|
468
|
+
self.return_code = 0
|
|
469
|
+
|
|
470
|
+
if self.return_code == -2 or self.killed:
|
|
471
|
+
error = 'Process was killed manually (CTRL+C / CTRL+X)'
|
|
472
|
+
self._print(error, color='bold red')
|
|
473
|
+
self.errors.append(error)
|
|
474
|
+
elif self.return_code != 0:
|
|
475
|
+
error = f'Command failed with return code {self.return_code}.'
|
|
476
|
+
self._print(error, color='bold red')
|
|
477
|
+
self.errors.append(error)
|
|
478
|
+
|
|
479
|
+
@staticmethod
|
|
480
|
+
def _process_opts(
|
|
481
|
+
opts,
|
|
482
|
+
opts_conf,
|
|
483
|
+
opt_key_map={},
|
|
484
|
+
opt_value_map={},
|
|
485
|
+
opt_prefix='-',
|
|
486
|
+
command_name=None):
|
|
487
|
+
"""Process a dict of options using a config, option key map / value map
|
|
488
|
+
and option character like '-' or '--'.
|
|
489
|
+
|
|
490
|
+
Args:
|
|
491
|
+
opts (dict): Command options as input on the CLI.
|
|
492
|
+
opts_conf (dict): Options config (Click options definition).
|
|
493
|
+
"""
|
|
494
|
+
opts_str = ''
|
|
495
|
+
for opt_name, opt_conf in opts_conf.items():
|
|
496
|
+
|
|
497
|
+
# Get opt value
|
|
498
|
+
default_val = opt_conf.get('default')
|
|
499
|
+
opt_val = Command._get_opt_value(
|
|
500
|
+
opts,
|
|
501
|
+
opt_name,
|
|
502
|
+
opts_conf,
|
|
503
|
+
opt_prefix=command_name,
|
|
504
|
+
default=default_val)
|
|
505
|
+
|
|
506
|
+
# Skip option if value is falsy
|
|
507
|
+
if opt_val in [None, False, []]:
|
|
508
|
+
# logger.debug(f'Option {opt_name} was passed but is falsy. Skipping.')
|
|
509
|
+
continue
|
|
510
|
+
|
|
511
|
+
# Convert opt value to expected command opt value
|
|
512
|
+
mapped_opt_val = opt_value_map.get(opt_name)
|
|
513
|
+
if callable(mapped_opt_val):
|
|
514
|
+
opt_val = mapped_opt_val(opt_val)
|
|
515
|
+
elif mapped_opt_val:
|
|
516
|
+
opt_val = mapped_opt_val
|
|
517
|
+
|
|
518
|
+
# Convert opt name to expected command opt name
|
|
519
|
+
mapped_opt_name = opt_key_map.get(opt_name)
|
|
520
|
+
if mapped_opt_name == OPT_NOT_SUPPORTED:
|
|
521
|
+
# logger.debug(f'Option {opt_name} was passed but is unsupported. Skipping.')
|
|
522
|
+
continue
|
|
523
|
+
elif mapped_opt_name is not None:
|
|
524
|
+
opt_name = mapped_opt_name
|
|
525
|
+
|
|
526
|
+
# Avoid shell injections and detect opt prefix
|
|
527
|
+
opt_name = str(opt_name).split(' ')[0] # avoid cmd injection
|
|
528
|
+
|
|
529
|
+
# Replace '_' with '-'
|
|
530
|
+
opt_name = opt_name.replace('_', '-')
|
|
531
|
+
|
|
532
|
+
# Add opt prefix if not already there
|
|
533
|
+
if len(opt_name) > 0 and opt_name[0] not in ['-', '--']:
|
|
534
|
+
opt_name = f'{opt_prefix}{opt_name}'
|
|
535
|
+
|
|
536
|
+
# Append opt name + opt value to option string.
|
|
537
|
+
# Note: does not append opt value if value is True (flag)
|
|
538
|
+
opts_str += f' {opt_name}'
|
|
539
|
+
if opt_val is not True:
|
|
540
|
+
opt_val = shlex.quote(str(opt_val))
|
|
541
|
+
opts_str += f' {opt_val}'
|
|
542
|
+
|
|
543
|
+
return opts_str.strip()
|
|
544
|
+
|
|
545
|
+
@staticmethod
|
|
546
|
+
def _get_opt_value(opts, opt_name, opts_conf={}, opt_prefix='', default=None):
|
|
547
|
+
aliases = [
|
|
548
|
+
opts.get(f'{opt_prefix}_{opt_name}'),
|
|
549
|
+
opts.get(f'{opt_prefix}.{opt_name}'),
|
|
550
|
+
opts.get(opt_name),
|
|
551
|
+
]
|
|
552
|
+
alias = [conf.get('short') for _, conf in opts_conf.items() if conf.get('short') in opts]
|
|
553
|
+
if alias:
|
|
554
|
+
aliases.append(opts.get(alias[0]))
|
|
555
|
+
if OPT_NOT_SUPPORTED in aliases:
|
|
556
|
+
return None
|
|
557
|
+
return next((v for v in aliases if v is not None), default)
|
|
558
|
+
|
|
559
|
+
def _build_cmd(self):
|
|
560
|
+
"""Build command string."""
|
|
561
|
+
|
|
562
|
+
# Add JSON flag to cmd
|
|
563
|
+
if self.output_json and self.json_flag:
|
|
564
|
+
self.cmd += f' {self.json_flag}'
|
|
565
|
+
|
|
566
|
+
# Add options to cmd
|
|
567
|
+
opts_str = Command._process_opts(
|
|
568
|
+
self.run_opts,
|
|
569
|
+
self.opts,
|
|
570
|
+
self.opt_key_map,
|
|
571
|
+
self.opt_value_map,
|
|
572
|
+
self.opt_prefix,
|
|
573
|
+
command_name=self.config.name)
|
|
574
|
+
if opts_str:
|
|
575
|
+
self.cmd += f' {opts_str}'
|
|
576
|
+
|
|
577
|
+
# Add meta options to cmd
|
|
578
|
+
meta_opts_str = Command._process_opts(
|
|
579
|
+
self.run_opts,
|
|
580
|
+
self.meta_opts,
|
|
581
|
+
self.opt_key_map,
|
|
582
|
+
self.opt_value_map,
|
|
583
|
+
self.opt_prefix,
|
|
584
|
+
command_name=self.config.name)
|
|
585
|
+
if meta_opts_str:
|
|
586
|
+
self.cmd += f' {meta_opts_str}'
|
|
587
|
+
|
|
588
|
+
def _build_cmd_input(self):
|
|
589
|
+
"""Many commands take as input a string or a list. This function facilitate this based on whether we pass a
|
|
590
|
+
string or a list to the cmd.
|
|
591
|
+
"""
|
|
592
|
+
cmd = self.cmd
|
|
593
|
+
input = self.input
|
|
594
|
+
|
|
595
|
+
# If input is None, return the previous command
|
|
596
|
+
if not input:
|
|
597
|
+
return
|
|
598
|
+
|
|
599
|
+
# If input is a list but has one element, use the standard string input
|
|
600
|
+
if isinstance(input, list) and len(input) == 1:
|
|
601
|
+
input = input[0]
|
|
602
|
+
|
|
603
|
+
# If input is a list and the tool has input_flag set to OPT_PIPE_INPUT, use cat-piped input.
|
|
604
|
+
# Otherwise pass the file path to the tool.
|
|
605
|
+
if isinstance(input, list):
|
|
606
|
+
timestr = get_file_timestamp()
|
|
607
|
+
cmd_name = cmd.split(' ')[0].split('/')[-1]
|
|
608
|
+
fpath = f'{DATA_FOLDER}/{cmd_name}_{timestr}.txt'
|
|
609
|
+
|
|
610
|
+
# Write the input to a file
|
|
611
|
+
with open(fpath, 'w') as f:
|
|
612
|
+
f.write('\n'.join(input))
|
|
613
|
+
|
|
614
|
+
if self.file_flag == OPT_PIPE_INPUT:
|
|
615
|
+
cmd = f'cat {fpath} | {cmd}'
|
|
616
|
+
elif self.file_flag:
|
|
617
|
+
cmd += f' {self.file_flag} {fpath}'
|
|
618
|
+
else:
|
|
619
|
+
self._print(f'{self.__class__.__name__} does not support multiple inputs.', color='bold red')
|
|
620
|
+
self.input_valid = False
|
|
621
|
+
|
|
622
|
+
self.input_path = fpath
|
|
623
|
+
|
|
624
|
+
# If input is a string but the tool does not support an input flag, use echo-piped input.
|
|
625
|
+
# If the tool's input flag is set to None, assume it is a positional argument at the end of the command.
|
|
626
|
+
# Otherwise use the input flag to pass the input.
|
|
627
|
+
else:
|
|
628
|
+
input = shlex.quote(input)
|
|
629
|
+
if self.input_flag == OPT_PIPE_INPUT:
|
|
630
|
+
cmd = f'echo {input} | {cmd}'
|
|
631
|
+
elif not self.input_flag:
|
|
632
|
+
cmd += f' {input}'
|
|
633
|
+
else:
|
|
634
|
+
cmd += f' {self.input_flag} {input}'
|
|
635
|
+
|
|
636
|
+
self.cmd = cmd
|
|
637
|
+
self.shell = ' | ' in self.cmd
|
|
638
|
+
self.input = input
|
secator/runners/scan.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from secator.config import ConfigLoader
|
|
4
|
+
from secator.exporters import CsvExporter, JsonExporter
|
|
5
|
+
from secator.runners._base import Runner
|
|
6
|
+
from secator.runners._helpers import run_extractors
|
|
7
|
+
from secator.runners.workflow import Workflow
|
|
8
|
+
from secator.rich import console
|
|
9
|
+
from secator.output_types import Target
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Scan(Runner):
|
|
15
|
+
|
|
16
|
+
default_exporters = [
|
|
17
|
+
JsonExporter,
|
|
18
|
+
CsvExporter
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def delay(cls, *args, **kwargs):
|
|
23
|
+
from secator.celery import run_scan
|
|
24
|
+
return run_scan.delay(args=args, kwargs=kwargs)
|
|
25
|
+
|
|
26
|
+
def yielder(self):
|
|
27
|
+
"""Run scan.
|
|
28
|
+
|
|
29
|
+
Yields:
|
|
30
|
+
dict: Item yielded from individual workflow tasks.
|
|
31
|
+
"""
|
|
32
|
+
# Yield targets
|
|
33
|
+
for target in self.targets:
|
|
34
|
+
yield Target(name=target, _source=self.config.name, _type='target', _context=self.context)
|
|
35
|
+
|
|
36
|
+
# Run workflows
|
|
37
|
+
for name, workflow_opts in self.config.workflows.items():
|
|
38
|
+
|
|
39
|
+
# Extract opts and and expand target from previous workflows results
|
|
40
|
+
targets, workflow_opts = run_extractors(self.results, workflow_opts or {}, self.targets)
|
|
41
|
+
if not targets:
|
|
42
|
+
console.log(f'No targets were specified for workflow {name}. Skipping.')
|
|
43
|
+
continue
|
|
44
|
+
|
|
45
|
+
# Workflow fmt options
|
|
46
|
+
run_opts = self.run_opts.copy()
|
|
47
|
+
fmt_opts = {
|
|
48
|
+
'json': run_opts.get('json', False),
|
|
49
|
+
'print_item': False,
|
|
50
|
+
'print_start': True,
|
|
51
|
+
'print_run_summary': True,
|
|
52
|
+
}
|
|
53
|
+
run_opts.update(fmt_opts)
|
|
54
|
+
|
|
55
|
+
# Run workflow
|
|
56
|
+
workflow = Workflow(
|
|
57
|
+
ConfigLoader(name=f'workflows/{name}'),
|
|
58
|
+
targets,
|
|
59
|
+
results=[],
|
|
60
|
+
run_opts=run_opts,
|
|
61
|
+
hooks=self._hooks,
|
|
62
|
+
context=self.context.copy())
|
|
63
|
+
|
|
64
|
+
# Get results
|
|
65
|
+
yield from workflow
|