secator 0.15.1__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +40 -24
- secator/celery_signals.py +71 -68
- secator/celery_utils.py +43 -27
- secator/cli.py +520 -280
- secator/cli_helper.py +394 -0
- secator/click.py +87 -0
- secator/config.py +67 -39
- secator/configs/profiles/http_headless.yaml +6 -0
- secator/configs/profiles/http_record.yaml +6 -0
- secator/configs/profiles/tor.yaml +1 -1
- secator/configs/scans/domain.yaml +4 -2
- secator/configs/scans/host.yaml +1 -1
- secator/configs/scans/network.yaml +1 -4
- secator/configs/scans/subdomain.yaml +13 -1
- secator/configs/scans/url.yaml +1 -2
- secator/configs/workflows/cidr_recon.yaml +6 -4
- secator/configs/workflows/code_scan.yaml +1 -1
- secator/configs/workflows/host_recon.yaml +29 -3
- secator/configs/workflows/subdomain_recon.yaml +67 -16
- secator/configs/workflows/url_crawl.yaml +44 -15
- secator/configs/workflows/url_dirsearch.yaml +4 -4
- secator/configs/workflows/url_fuzz.yaml +25 -17
- secator/configs/workflows/url_params_fuzz.yaml +7 -0
- secator/configs/workflows/url_vuln.yaml +33 -8
- secator/configs/workflows/user_hunt.yaml +2 -1
- secator/configs/workflows/wordpress.yaml +5 -3
- secator/cve.py +718 -0
- secator/decorators.py +0 -454
- secator/definitions.py +49 -30
- secator/exporters/_base.py +2 -2
- secator/exporters/console.py +2 -2
- secator/exporters/table.py +4 -3
- secator/exporters/txt.py +1 -1
- secator/hooks/mongodb.py +2 -4
- secator/installer.py +77 -49
- secator/loader.py +116 -0
- secator/output_types/_base.py +3 -0
- secator/output_types/certificate.py +63 -63
- secator/output_types/error.py +4 -5
- secator/output_types/info.py +2 -2
- secator/output_types/ip.py +3 -1
- secator/output_types/progress.py +5 -9
- secator/output_types/state.py +17 -17
- secator/output_types/tag.py +3 -0
- secator/output_types/target.py +10 -2
- secator/output_types/url.py +19 -7
- secator/output_types/vulnerability.py +11 -7
- secator/output_types/warning.py +2 -2
- secator/report.py +27 -15
- secator/rich.py +18 -10
- secator/runners/_base.py +446 -233
- secator/runners/_helpers.py +133 -24
- secator/runners/command.py +182 -102
- secator/runners/scan.py +33 -5
- secator/runners/task.py +13 -7
- secator/runners/workflow.py +105 -72
- secator/scans/__init__.py +2 -2
- secator/serializers/dataclass.py +20 -20
- secator/tasks/__init__.py +4 -4
- secator/tasks/_categories.py +39 -27
- secator/tasks/arjun.py +9 -5
- secator/tasks/bbot.py +53 -21
- secator/tasks/bup.py +19 -5
- secator/tasks/cariddi.py +24 -3
- secator/tasks/dalfox.py +26 -7
- secator/tasks/dirsearch.py +10 -4
- secator/tasks/dnsx.py +70 -25
- secator/tasks/feroxbuster.py +11 -3
- secator/tasks/ffuf.py +42 -6
- secator/tasks/fping.py +20 -8
- secator/tasks/gau.py +3 -1
- secator/tasks/gf.py +3 -3
- secator/tasks/gitleaks.py +2 -2
- secator/tasks/gospider.py +7 -1
- secator/tasks/grype.py +5 -4
- secator/tasks/h8mail.py +2 -1
- secator/tasks/httpx.py +18 -5
- secator/tasks/katana.py +35 -15
- secator/tasks/maigret.py +4 -4
- secator/tasks/mapcidr.py +3 -3
- secator/tasks/msfconsole.py +4 -4
- secator/tasks/naabu.py +2 -2
- secator/tasks/nmap.py +12 -14
- secator/tasks/nuclei.py +3 -3
- secator/tasks/searchsploit.py +4 -5
- secator/tasks/subfinder.py +2 -2
- secator/tasks/testssl.py +264 -263
- secator/tasks/trivy.py +5 -5
- secator/tasks/wafw00f.py +21 -3
- secator/tasks/wpprobe.py +90 -83
- secator/tasks/wpscan.py +6 -5
- secator/template.py +218 -104
- secator/thread.py +15 -15
- secator/tree.py +196 -0
- secator/utils.py +131 -123
- secator/utils_test.py +60 -19
- secator/workflows/__init__.py +2 -2
- {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/METADATA +36 -36
- secator-0.16.0.dist-info/RECORD +132 -0
- secator/configs/profiles/default.yaml +0 -8
- secator/configs/workflows/url_nuclei.yaml +0 -11
- secator/tasks/dnsxbrute.py +0 -42
- secator-0.15.1.dist-info/RECORD +0 -128
- {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/WHEEL +0 -0
- {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/entry_points.txt +0 -0
- {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/licenses/LICENSE +0 -0
secator/tasks/ffuf.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from secator.decorators import task
|
|
2
2
|
from secator.definitions import (AUTO_CALIBRATION, CONTENT_LENGTH,
|
|
3
|
-
CONTENT_TYPE, DELAY, DEPTH, EXTRA_DATA,
|
|
3
|
+
CONTENT_TYPE, DATA, DELAY, DEPTH, EXTRA_DATA,
|
|
4
4
|
FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
|
|
5
5
|
FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
|
|
6
6
|
MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
|
|
@@ -8,10 +8,11 @@ from secator.definitions import (AUTO_CALIBRATION, CONTENT_LENGTH,
|
|
|
8
8
|
PERCENT, PROXY, RATE_LIMIT, RETRIES,
|
|
9
9
|
STATUS_CODE, THREADS, TIME, TIMEOUT,
|
|
10
10
|
USER_AGENT, WORDLIST, URL)
|
|
11
|
-
from secator.output_types import Progress, Url
|
|
11
|
+
from secator.output_types import Progress, Url, Info, Warning
|
|
12
12
|
from secator.serializers import JSONSerializer, RegexSerializer
|
|
13
13
|
from secator.tasks._categories import HttpFuzzer
|
|
14
14
|
|
|
15
|
+
|
|
15
16
|
FFUF_PROGRESS_REGEX = r':: Progress: \[(?P<count>\d+)/(?P<total>\d+)\] :: Job \[\d/\d\] :: (?P<rps>\d+) req/sec :: Duration: \[(?P<duration>[\d:]+)\] :: Errors: (?P<errors>\d+) ::' # noqa: E501
|
|
16
17
|
|
|
17
18
|
|
|
@@ -19,8 +20,9 @@ FFUF_PROGRESS_REGEX = r':: Progress: \[(?P<count>\d+)/(?P<total>\d+)\] :: Job \[
|
|
|
19
20
|
class ffuf(HttpFuzzer):
|
|
20
21
|
"""Fast web fuzzer written in Go."""
|
|
21
22
|
cmd = 'ffuf -noninteractive'
|
|
22
|
-
tags = ['url', 'fuzz']
|
|
23
23
|
input_types = [URL]
|
|
24
|
+
output_types = [Url, Progress]
|
|
25
|
+
tags = ['url', 'fuzz']
|
|
24
26
|
input_flag = '-u'
|
|
25
27
|
input_chunk_size = 1
|
|
26
28
|
file_flag = None
|
|
@@ -32,10 +34,13 @@ class ffuf(HttpFuzzer):
|
|
|
32
34
|
]
|
|
33
35
|
opts = {
|
|
34
36
|
AUTO_CALIBRATION: {'is_flag': True, 'short': 'ac', 'help': 'Auto-calibration'},
|
|
35
|
-
'recursion': {'is_flag': True, 'default':
|
|
37
|
+
'recursion': {'is_flag': True, 'default': False, 'short': 'recursion', 'help': 'Recursion'},
|
|
38
|
+
'stop_on_error': {'is_flag': True, 'default': False, 'short': 'soe', 'help': 'Stop on error'},
|
|
39
|
+
'fuzz_host_header': {'is_flag': True, 'default': False, 'internal': True, 'short': 'fhh', 'help': 'Fuzz host header'},
|
|
36
40
|
}
|
|
37
41
|
opt_key_map = {
|
|
38
42
|
HEADER: 'H',
|
|
43
|
+
DATA: 'd',
|
|
39
44
|
DELAY: 'p',
|
|
40
45
|
DEPTH: 'recursion-depth',
|
|
41
46
|
FILTER_CODES: 'fc',
|
|
@@ -58,8 +63,8 @@ class ffuf(HttpFuzzer):
|
|
|
58
63
|
# ffuf opts
|
|
59
64
|
WORDLIST: 'w',
|
|
60
65
|
AUTO_CALIBRATION: 'ac',
|
|
66
|
+
'stop_on_error': 'sa',
|
|
61
67
|
}
|
|
62
|
-
output_types = [Url, Progress]
|
|
63
68
|
output_map = {
|
|
64
69
|
Url: {
|
|
65
70
|
STATUS_CODE: 'status',
|
|
@@ -69,7 +74,7 @@ class ffuf(HttpFuzzer):
|
|
|
69
74
|
},
|
|
70
75
|
Progress: {
|
|
71
76
|
PERCENT: lambda x: int(int(x['count']) * 100 / int(x['total'])),
|
|
72
|
-
EXTRA_DATA: lambda x:
|
|
77
|
+
EXTRA_DATA: lambda x: x
|
|
73
78
|
},
|
|
74
79
|
}
|
|
75
80
|
encoding = 'ansi'
|
|
@@ -81,8 +86,39 @@ class ffuf(HttpFuzzer):
|
|
|
81
86
|
proxy_http = True
|
|
82
87
|
profile = 'io'
|
|
83
88
|
|
|
89
|
+
@staticmethod
|
|
90
|
+
def before_init(self):
|
|
91
|
+
# Add /FUZZ to URL if recursion is enabled
|
|
92
|
+
if self.get_opt_value('recursion') and not len(self.inputs) > 1 and not self.inputs[0].endswith('FUZZ'):
|
|
93
|
+
self._print(Info(message='Adding /FUZZ to URL as it is needed when recursion is enabled'), rich=True)
|
|
94
|
+
self.inputs[0] = self.inputs[0].rstrip('/') + '/FUZZ'
|
|
95
|
+
|
|
96
|
+
@staticmethod
|
|
97
|
+
def on_cmd_opts(self, opts):
|
|
98
|
+
# Fuzz host header
|
|
99
|
+
if self.get_opt_value('fuzz_host_header') and len(self.inputs) > 0:
|
|
100
|
+
host = self.inputs[0].split('://')[1].split('/')[0]
|
|
101
|
+
opts['header']['value']['Host'] = f'FUZZ.{host}'
|
|
102
|
+
self.headers = opts['header']['value'].copy()
|
|
103
|
+
|
|
104
|
+
# Check FUZZ keyword
|
|
105
|
+
data = self.get_opt_value('data') or ''
|
|
106
|
+
fuzz_in_headers = any('FUZZ' in v for v in self.headers.values())
|
|
107
|
+
if len(self.inputs) > 0 and 'FUZZ' not in self.inputs[0] and not fuzz_in_headers and 'FUZZ' not in data:
|
|
108
|
+
self.add_result(Warning(message='Keyword FUZZ is not present in the URL, header or body'))
|
|
109
|
+
return opts
|
|
110
|
+
|
|
111
|
+
@staticmethod
|
|
112
|
+
def on_item_pre_convert(self, item):
|
|
113
|
+
if 'host' in item:
|
|
114
|
+
self.current_host = item['host']
|
|
115
|
+
return item
|
|
116
|
+
|
|
84
117
|
@staticmethod
|
|
85
118
|
def on_item(self, item):
|
|
86
119
|
if isinstance(item, Url):
|
|
87
120
|
item.method = self.get_opt_value(METHOD) or 'GET'
|
|
121
|
+
item.request_headers = self.headers.copy()
|
|
122
|
+
if 'FUZZ' in self.headers.get('Host', ''):
|
|
123
|
+
item.request_headers['Host'] = self.current_host
|
|
88
124
|
return item
|
secator/tasks/fping.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import validators
|
|
2
2
|
|
|
3
3
|
from secator.decorators import task
|
|
4
|
-
from secator.definitions import (DELAY, IP, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT,
|
|
4
|
+
from secator.definitions import (DELAY, IP, HOST, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT,
|
|
5
5
|
RETRIES, THREADS, TIMEOUT)
|
|
6
6
|
from secator.output_types import Ip
|
|
7
7
|
from secator.tasks._categories import ReconIp
|
|
@@ -10,11 +10,15 @@ from secator.tasks._categories import ReconIp
|
|
|
10
10
|
@task()
|
|
11
11
|
class fping(ReconIp):
|
|
12
12
|
"""Send ICMP echo probes to network hosts, similar to ping, but much better."""
|
|
13
|
-
cmd = 'fping -a'
|
|
13
|
+
cmd = 'fping -a -A'
|
|
14
|
+
input_types = [IP, HOST]
|
|
15
|
+
output_types = [Ip]
|
|
14
16
|
tags = ['ip', 'recon']
|
|
15
17
|
file_flag = '-f'
|
|
16
18
|
input_flag = None
|
|
17
|
-
|
|
19
|
+
opts = {
|
|
20
|
+
'reverse_dns': {'is_flag': True, 'default': False, 'short': 'r', 'help': 'Reverse DNS lookup (slower)'}
|
|
21
|
+
}
|
|
18
22
|
opt_prefix = '--'
|
|
19
23
|
opt_key_map = {
|
|
20
24
|
DELAY: 'period',
|
|
@@ -22,22 +26,30 @@ class fping(ReconIp):
|
|
|
22
26
|
RATE_LIMIT: OPT_NOT_SUPPORTED,
|
|
23
27
|
RETRIES: 'retry',
|
|
24
28
|
TIMEOUT: 'timeout',
|
|
25
|
-
THREADS: OPT_NOT_SUPPORTED
|
|
29
|
+
THREADS: OPT_NOT_SUPPORTED,
|
|
30
|
+
'reverse_dns': 'r'
|
|
26
31
|
}
|
|
27
32
|
opt_value_map = {
|
|
28
33
|
DELAY: lambda x: x * 1000, # convert s to ms
|
|
29
34
|
TIMEOUT: lambda x: x * 1000 # convert s to ms
|
|
30
35
|
}
|
|
31
|
-
|
|
32
|
-
|
|
36
|
+
install_github_handle = 'schweikert/fping'
|
|
37
|
+
install_version = 'v5.1'
|
|
33
38
|
install_pre = {'*': ['fping']}
|
|
34
39
|
ignore_return_code = True
|
|
35
40
|
|
|
36
41
|
@staticmethod
|
|
37
42
|
def item_loader(self, line):
|
|
38
|
-
if
|
|
43
|
+
if '(' in line:
|
|
44
|
+
host, ip = tuple(t.strip() for t in line.rstrip(')').split('('))
|
|
45
|
+
if (validators.ipv4(host) or validators.ipv6(host)):
|
|
46
|
+
host = ''
|
|
47
|
+
else:
|
|
48
|
+
ip = line.strip()
|
|
49
|
+
host = ''
|
|
50
|
+
if not (validators.ipv4(ip) or validators.ipv6(ip)):
|
|
39
51
|
return
|
|
40
|
-
yield {'ip':
|
|
52
|
+
yield {'ip': ip, 'alive': True, 'host': host}
|
|
41
53
|
|
|
42
54
|
@staticmethod
|
|
43
55
|
def on_line(self, line):
|
secator/tasks/gau.py
CHANGED
|
@@ -5,6 +5,7 @@ from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX,
|
|
|
5
5
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
|
|
6
6
|
OPT_PIPE_INPUT, PROXY, RATE_LIMIT, RETRIES,
|
|
7
7
|
THREADS, TIMEOUT, USER_AGENT, URL)
|
|
8
|
+
from secator.output_types.url import Url
|
|
8
9
|
from secator.serializers import JSONSerializer
|
|
9
10
|
from secator.tasks._categories import HttpCrawler
|
|
10
11
|
|
|
@@ -13,8 +14,9 @@ from secator.tasks._categories import HttpCrawler
|
|
|
13
14
|
class gau(HttpCrawler):
|
|
14
15
|
"""Fetch known URLs from AlienVault's Open Threat Exchange, the Wayback Machine, Common Crawl, and URLScan."""
|
|
15
16
|
cmd = 'gau'
|
|
16
|
-
tags = ['pattern', 'scan']
|
|
17
17
|
input_types = [URL]
|
|
18
|
+
output_types = [Url]
|
|
19
|
+
tags = ['pattern', 'scan']
|
|
18
20
|
file_flag = OPT_PIPE_INPUT
|
|
19
21
|
json_flag = '--json'
|
|
20
22
|
opt_prefix = '--'
|
secator/tasks/gf.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from secator.decorators import task
|
|
2
|
-
from secator.definitions import OPT_PIPE_INPUT, OPT_NOT_SUPPORTED
|
|
2
|
+
from secator.definitions import OPT_PIPE_INPUT, OPT_NOT_SUPPORTED
|
|
3
3
|
from secator.output_types import Tag
|
|
4
4
|
from secator.tasks._categories import Tagger
|
|
5
5
|
|
|
@@ -8,6 +8,8 @@ from secator.tasks._categories import Tagger
|
|
|
8
8
|
class gf(Tagger):
|
|
9
9
|
"""Wrapper around grep, to help you grep for things."""
|
|
10
10
|
cmd = 'gf'
|
|
11
|
+
input_types = None # anything
|
|
12
|
+
output_types = [Tag]
|
|
11
13
|
tags = ['pattern', 'scan']
|
|
12
14
|
file_flag = OPT_PIPE_INPUT
|
|
13
15
|
input_flag = OPT_PIPE_INPUT
|
|
@@ -18,12 +20,10 @@ class gf(Tagger):
|
|
|
18
20
|
opt_key_map = {
|
|
19
21
|
'pattern': ''
|
|
20
22
|
}
|
|
21
|
-
input_types = [URL]
|
|
22
23
|
install_cmd = (
|
|
23
24
|
'go install -v github.com/tomnomnom/gf@latest && '
|
|
24
25
|
'git clone https://github.com/1ndianl33t/Gf-Patterns $HOME/.gf || true'
|
|
25
26
|
)
|
|
26
|
-
output_types = [Tag]
|
|
27
27
|
|
|
28
28
|
@staticmethod
|
|
29
29
|
def item_loader(self, line):
|
secator/tasks/gitleaks.py
CHANGED
|
@@ -5,7 +5,7 @@ import yaml
|
|
|
5
5
|
from secator.config import CONFIG
|
|
6
6
|
from secator.decorators import task
|
|
7
7
|
from secator.runners import Command
|
|
8
|
-
from secator.definitions import (OUTPUT_PATH, PATH
|
|
8
|
+
from secator.definitions import (OUTPUT_PATH, PATH)
|
|
9
9
|
from secator.utils import caml_to_snake
|
|
10
10
|
from secator.output_types import Tag, Info, Error
|
|
11
11
|
|
|
@@ -15,7 +15,7 @@ class gitleaks(Command):
|
|
|
15
15
|
"""Tool for detecting secrets like passwords, API keys, and tokens in git repos, files, and stdin."""
|
|
16
16
|
cmd = 'gitleaks'
|
|
17
17
|
tags = ['secret', 'scan']
|
|
18
|
-
input_types = [PATH
|
|
18
|
+
input_types = [PATH]
|
|
19
19
|
input_flag = None
|
|
20
20
|
json_flag = '-f json'
|
|
21
21
|
opt_prefix = '--'
|
secator/tasks/gospider.py
CHANGED
|
@@ -16,10 +16,11 @@ from secator.tasks._categories import HttpCrawler
|
|
|
16
16
|
class gospider(HttpCrawler):
|
|
17
17
|
"""Fast web spider written in Go."""
|
|
18
18
|
cmd = 'gospider'
|
|
19
|
+
input_types = [URL]
|
|
20
|
+
output_types = [Url]
|
|
19
21
|
tags = ['url', 'crawl']
|
|
20
22
|
file_flag = '-S'
|
|
21
23
|
input_flag = '-s'
|
|
22
|
-
input_types = [URL]
|
|
23
24
|
json_flag = '--json'
|
|
24
25
|
opt_prefix = '--'
|
|
25
26
|
opt_key_map = {
|
|
@@ -76,3 +77,8 @@ class gospider(HttpCrawler):
|
|
|
76
77
|
except ValueError: # gospider returns invalid URLs for output sometimes
|
|
77
78
|
return False
|
|
78
79
|
return True
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
def on_json_loaded(self, item):
|
|
83
|
+
item['request_headers'] = self.get_opt_value('header', preprocess=True)
|
|
84
|
+
yield item
|
secator/tasks/grype.py
CHANGED
|
@@ -2,7 +2,7 @@ from secator.config import CONFIG
|
|
|
2
2
|
from secator.decorators import task
|
|
3
3
|
from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER,
|
|
4
4
|
OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
|
|
5
|
-
THREADS, TIMEOUT, USER_AGENT, PATH,
|
|
5
|
+
THREADS, TIMEOUT, USER_AGENT, PATH, STRING, URL)
|
|
6
6
|
from secator.output_types import Vulnerability
|
|
7
7
|
from secator.tasks._categories import VulnCode
|
|
8
8
|
|
|
@@ -11,10 +11,12 @@ from secator.tasks._categories import VulnCode
|
|
|
11
11
|
class grype(VulnCode):
|
|
12
12
|
"""Vulnerability scanner for container images and filesystems."""
|
|
13
13
|
cmd = 'grype --quiet'
|
|
14
|
+
input_types = [PATH, URL, STRING]
|
|
15
|
+
output_types = [Vulnerability]
|
|
14
16
|
tags = ['vuln', 'scan']
|
|
15
|
-
input_types = [PATH, DOCKER_IMAGE]
|
|
16
17
|
input_flag = ''
|
|
17
|
-
|
|
18
|
+
input_chunk_size = 1
|
|
19
|
+
file_flag = None
|
|
18
20
|
json_flag = None
|
|
19
21
|
opt_prefix = '--'
|
|
20
22
|
opt_key_map = {
|
|
@@ -28,7 +30,6 @@ class grype(VulnCode):
|
|
|
28
30
|
TIMEOUT: OPT_NOT_SUPPORTED,
|
|
29
31
|
USER_AGENT: OPT_NOT_SUPPORTED
|
|
30
32
|
}
|
|
31
|
-
output_types = [Vulnerability]
|
|
32
33
|
install_pre = {
|
|
33
34
|
'*': ['curl']
|
|
34
35
|
}
|
secator/tasks/h8mail.py
CHANGED
|
@@ -11,10 +11,11 @@ from secator.output_types import UserAccount, Info, Error
|
|
|
11
11
|
class h8mail(OSInt):
|
|
12
12
|
"""Email information and password lookup tool."""
|
|
13
13
|
cmd = 'h8mail'
|
|
14
|
+
input_types = [EMAIL]
|
|
15
|
+
output_types = [UserAccount]
|
|
14
16
|
tags = ['user', 'recon', 'email']
|
|
15
17
|
json_flag = '--json '
|
|
16
18
|
input_flag = '--targets'
|
|
17
|
-
input_types = [EMAIL]
|
|
18
19
|
file_flag = '-domain'
|
|
19
20
|
version_flag = '--help'
|
|
20
21
|
opt_prefix = '--'
|
secator/tasks/httpx.py
CHANGED
|
@@ -3,7 +3,7 @@ import os
|
|
|
3
3
|
from secator.decorators import task
|
|
4
4
|
from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
|
|
5
5
|
HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
|
|
6
|
-
PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, URL, USER_AGENT, HOST, IP)
|
|
6
|
+
PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, URL, USER_AGENT, HOST, IP, HOST_PORT)
|
|
7
7
|
from secator.config import CONFIG
|
|
8
8
|
from secator.output_types import Url, Subdomain
|
|
9
9
|
from secator.serializers import JSONSerializer
|
|
@@ -14,11 +14,12 @@ from secator.utils import (sanitize_url, extract_domain_info, extract_subdomains
|
|
|
14
14
|
@task()
|
|
15
15
|
class httpx(Http):
|
|
16
16
|
"""Fast and multi-purpose HTTP toolkit."""
|
|
17
|
-
cmd = 'httpx'
|
|
17
|
+
cmd = 'httpx -irh'
|
|
18
|
+
input_types = [HOST, HOST_PORT, IP, URL]
|
|
19
|
+
output_types = [Url, Subdomain]
|
|
18
20
|
tags = ['url', 'probe']
|
|
19
21
|
file_flag = '-l'
|
|
20
22
|
input_flag = '-u'
|
|
21
|
-
input_types = [HOST, IP, URL]
|
|
22
23
|
json_flag = '-json'
|
|
23
24
|
opts = {
|
|
24
25
|
# 'silent': {'is_flag': True, 'default': False, 'help': 'Silent mode'},
|
|
@@ -66,7 +67,6 @@ class httpx(Http):
|
|
|
66
67
|
DELAY: lambda x: str(x) + 's' if x else None,
|
|
67
68
|
}
|
|
68
69
|
item_loaders = [JSONSerializer()]
|
|
69
|
-
output_types = [Url, Subdomain]
|
|
70
70
|
install_pre = {
|
|
71
71
|
'apk': ['chromium']
|
|
72
72
|
}
|
|
@@ -77,6 +77,17 @@ class httpx(Http):
|
|
|
77
77
|
proxy_socks5 = True
|
|
78
78
|
proxy_http = True
|
|
79
79
|
profile = 'io'
|
|
80
|
+
profile = lambda opts: httpx.dynamic_profile(opts) # noqa: E731
|
|
81
|
+
|
|
82
|
+
@staticmethod
|
|
83
|
+
def dynamic_profile(opts):
|
|
84
|
+
screenshot = httpx._get_opt_value(
|
|
85
|
+
opts,
|
|
86
|
+
'screenshot',
|
|
87
|
+
opts_conf=dict(httpx.opts, **httpx.meta_opts),
|
|
88
|
+
opt_aliases=opts.get('aliases', [])
|
|
89
|
+
)
|
|
90
|
+
return 'cpu' if screenshot is True else 'io'
|
|
80
91
|
|
|
81
92
|
@staticmethod
|
|
82
93
|
def on_init(self):
|
|
@@ -109,7 +120,7 @@ class httpx(Http):
|
|
|
109
120
|
|
|
110
121
|
@staticmethod
|
|
111
122
|
def on_end(self):
|
|
112
|
-
store_responses = self.get_opt_value('store_responses')
|
|
123
|
+
store_responses = self.get_opt_value('store_responses') or CONFIG.http.store_responses
|
|
113
124
|
response_dir = f'{self.reports_folder}/.outputs'
|
|
114
125
|
if store_responses:
|
|
115
126
|
index_rpath = f'{response_dir}/response/index.txt'
|
|
@@ -133,6 +144,8 @@ class httpx(Http):
|
|
|
133
144
|
elif k == URL:
|
|
134
145
|
item[k] = sanitize_url(v)
|
|
135
146
|
item[URL] = item.get('final_url') or item[URL]
|
|
147
|
+
item['request_headers'] = self.get_opt_value('header', preprocess=True)
|
|
148
|
+
item['response_headers'] = item.get('header', {})
|
|
136
149
|
return item
|
|
137
150
|
|
|
138
151
|
def _create_subdomain_from_tls_cert(self, domain, url):
|
secator/tasks/katana.py
CHANGED
|
@@ -17,10 +17,11 @@ from secator.tasks._categories import HttpCrawler
|
|
|
17
17
|
class katana(HttpCrawler):
|
|
18
18
|
"""Next-generation crawling and spidering framework."""
|
|
19
19
|
cmd = 'katana'
|
|
20
|
+
input_types = [URL]
|
|
21
|
+
output_types = [Url]
|
|
20
22
|
tags = ['url', 'crawl']
|
|
21
23
|
file_flag = '-list'
|
|
22
24
|
input_flag = '-u'
|
|
23
|
-
input_types = [URL]
|
|
24
25
|
json_flag = '-jsonl'
|
|
25
26
|
opts = {
|
|
26
27
|
'headless': {'is_flag': True, 'short': 'hl', 'help': 'Headless mode'},
|
|
@@ -73,7 +74,8 @@ class katana(HttpCrawler):
|
|
|
73
74
|
CONTENT_LENGTH: lambda x: x['response'].get('headers', {}).get('content_length', 0),
|
|
74
75
|
WEBSERVER: lambda x: x['response'].get('headers', {}).get('server', ''),
|
|
75
76
|
TECH: lambda x: x['response'].get('technologies', []),
|
|
76
|
-
STORED_RESPONSE_PATH: lambda x: x['response'].get('stored_response_path', '')
|
|
77
|
+
STORED_RESPONSE_PATH: lambda x: x['response'].get('stored_response_path', ''),
|
|
78
|
+
'response_headers': lambda x: x['response'].get('headers', {}),
|
|
77
79
|
# TAGS: lambda x: x['response'].get('server')
|
|
78
80
|
}
|
|
79
81
|
}
|
|
@@ -86,39 +88,57 @@ class katana(HttpCrawler):
|
|
|
86
88
|
proxychains = False
|
|
87
89
|
proxy_socks5 = True
|
|
88
90
|
proxy_http = True
|
|
89
|
-
profile =
|
|
91
|
+
profile = lambda opts: katana.dynamic_profile(opts) # noqa: E731
|
|
92
|
+
|
|
93
|
+
@staticmethod
|
|
94
|
+
def dynamic_profile(opts):
|
|
95
|
+
headless = katana._get_opt_value(
|
|
96
|
+
opts,
|
|
97
|
+
'headless',
|
|
98
|
+
opts_conf=dict(katana.opts, **katana.meta_opts),
|
|
99
|
+
opt_aliases=opts.get('aliases', [])
|
|
100
|
+
)
|
|
101
|
+
return 'cpu' if headless is True else 'io'
|
|
102
|
+
|
|
103
|
+
@staticmethod
|
|
104
|
+
def on_init(self):
|
|
105
|
+
form_fill = self.get_opt_value('form_fill')
|
|
106
|
+
form_extraction = self.get_opt_value('form_extraction')
|
|
107
|
+
store_responses = self.get_opt_value('store_responses')
|
|
108
|
+
if form_fill or form_extraction or store_responses:
|
|
109
|
+
self.cmd += f' -srd {self.reports_folder}/.outputs'
|
|
90
110
|
|
|
91
111
|
@staticmethod
|
|
92
112
|
def on_json_loaded(self, item):
|
|
93
113
|
# form detection
|
|
94
|
-
|
|
114
|
+
response = item.get('response', {})
|
|
115
|
+
forms = response.get('forms', [])
|
|
95
116
|
if forms:
|
|
96
117
|
for form in forms:
|
|
97
118
|
method = form['method']
|
|
98
|
-
yield Url(
|
|
119
|
+
yield Url(
|
|
120
|
+
form['action'],
|
|
121
|
+
host=urlparse(item['request']['endpoint']).netloc,
|
|
122
|
+
method=method,
|
|
123
|
+
stored_response_path=response["stored_response_path"],
|
|
124
|
+
request_headers=self.get_opt_value('header', preprocess=True)
|
|
125
|
+
)
|
|
99
126
|
yield Tag(
|
|
100
127
|
name='form',
|
|
101
128
|
match=form['action'],
|
|
129
|
+
stored_response_path=response["stored_response_path"],
|
|
102
130
|
extra_data={
|
|
103
131
|
'method': form['method'],
|
|
104
132
|
'enctype': form.get('enctype', ''),
|
|
105
133
|
'parameters': ','.join(form.get('parameters', []))
|
|
106
134
|
}
|
|
107
135
|
)
|
|
136
|
+
item['request_headers'] = self.get_opt_value('header', preprocess=True)
|
|
108
137
|
yield item
|
|
109
138
|
|
|
110
|
-
@staticmethod
|
|
111
|
-
def on_init(self):
|
|
112
|
-
debug_resp = self.get_opt_value('debug_resp')
|
|
113
|
-
if debug_resp:
|
|
114
|
-
self.cmd = self.cmd.replace('-silent', '')
|
|
115
|
-
store_responses = self.get_opt_value('store_responses')
|
|
116
|
-
if store_responses:
|
|
117
|
-
self.cmd += f' -srd {self.reports_folder}/.outputs'
|
|
118
|
-
|
|
119
139
|
@staticmethod
|
|
120
140
|
def on_item(self, item):
|
|
121
|
-
if not isinstance(item, Url):
|
|
141
|
+
if not isinstance(item, (Url, Tag)):
|
|
122
142
|
return item
|
|
123
143
|
store_responses = self.get_opt_value('store_responses')
|
|
124
144
|
if store_responses and os.path.exists(item.stored_response_path):
|
secator/tasks/maigret.py
CHANGED
|
@@ -6,7 +6,7 @@ import re
|
|
|
6
6
|
from secator.decorators import task
|
|
7
7
|
from secator.definitions import (DELAY, EXTRA_DATA, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY,
|
|
8
8
|
RATE_LIMIT, RETRIES, SITE_NAME, THREADS,
|
|
9
|
-
TIMEOUT, URL,
|
|
9
|
+
TIMEOUT, URL, STRING)
|
|
10
10
|
from secator.output_types import UserAccount, Info, Error
|
|
11
11
|
from secator.tasks._categories import ReconUser
|
|
12
12
|
|
|
@@ -17,10 +17,11 @@ logger = logging.getLogger(__name__)
|
|
|
17
17
|
class maigret(ReconUser):
|
|
18
18
|
"""Collect a dossier on a person by username."""
|
|
19
19
|
cmd = 'maigret'
|
|
20
|
+
input_types = [STRING]
|
|
21
|
+
output_types = [UserAccount]
|
|
20
22
|
tags = ['user', 'recon', 'username']
|
|
21
23
|
file_flag = None
|
|
22
24
|
input_flag = None
|
|
23
|
-
input_types = [USERNAME]
|
|
24
25
|
json_flag = '--json ndjson'
|
|
25
26
|
opt_prefix = '--'
|
|
26
27
|
opts = {
|
|
@@ -34,8 +35,6 @@ class maigret(ReconUser):
|
|
|
34
35
|
TIMEOUT: 'timeout',
|
|
35
36
|
THREADS: OPT_NOT_SUPPORTED
|
|
36
37
|
}
|
|
37
|
-
input_types = [USERNAME]
|
|
38
|
-
output_types = [UserAccount]
|
|
39
38
|
output_map = {
|
|
40
39
|
UserAccount: {
|
|
41
40
|
SITE_NAME: 'sitename',
|
|
@@ -43,6 +42,7 @@ class maigret(ReconUser):
|
|
|
43
42
|
EXTRA_DATA: lambda x: x['status'].get('ids', {})
|
|
44
43
|
}
|
|
45
44
|
}
|
|
45
|
+
install_version = '0.5.0a'
|
|
46
46
|
install_cmd = 'pipx install git+https://github.com/soxoj/maigret --force'
|
|
47
47
|
socks5_proxy = True
|
|
48
48
|
profile = 'io'
|
secator/tasks/mapcidr.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import validators
|
|
2
2
|
|
|
3
3
|
from secator.decorators import task
|
|
4
|
-
from secator.definitions import (CIDR_RANGE, OPT_NOT_SUPPORTED, PROXY,
|
|
4
|
+
from secator.definitions import (CIDR_RANGE, IP, OPT_NOT_SUPPORTED, PROXY,
|
|
5
5
|
RATE_LIMIT, RETRIES, THREADS, TIMEOUT)
|
|
6
6
|
from secator.output_types import Ip
|
|
7
7
|
from secator.tasks._categories import ReconIp
|
|
@@ -11,6 +11,8 @@ from secator.tasks._categories import ReconIp
|
|
|
11
11
|
class mapcidr(ReconIp):
|
|
12
12
|
"""Utility program to perform multiple operations for a given subnet/cidr ranges."""
|
|
13
13
|
cmd = 'mapcidr'
|
|
14
|
+
input_types = [CIDR_RANGE, IP]
|
|
15
|
+
output_types = [Ip]
|
|
14
16
|
tags = ['ip', 'recon']
|
|
15
17
|
input_flag = '-cidr'
|
|
16
18
|
file_flag = '-cl'
|
|
@@ -20,8 +22,6 @@ class mapcidr(ReconIp):
|
|
|
20
22
|
install_version = 'v1.1.34'
|
|
21
23
|
install_cmd = 'go install -v github.com/projectdiscovery/mapcidr/cmd/mapcidr@[install_version]'
|
|
22
24
|
install_github_handle = 'projectdiscovery/mapcidr'
|
|
23
|
-
input_types = [CIDR_RANGE]
|
|
24
|
-
output_types = [Ip]
|
|
25
25
|
opt_key_map = {
|
|
26
26
|
THREADS: OPT_NOT_SUPPORTED,
|
|
27
27
|
PROXY: OPT_NOT_SUPPORTED,
|
secator/tasks/msfconsole.py
CHANGED
|
@@ -6,8 +6,8 @@ from rich.panel import Panel
|
|
|
6
6
|
|
|
7
7
|
from secator.config import CONFIG
|
|
8
8
|
from secator.decorators import task
|
|
9
|
-
from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, HOST,
|
|
10
|
-
RETRIES, THREADS, TIMEOUT, USER_AGENT
|
|
9
|
+
from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, HOST, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT,
|
|
10
|
+
RETRIES, THREADS, TIMEOUT, USER_AGENT)
|
|
11
11
|
from secator.tasks._categories import VulnMulti
|
|
12
12
|
from secator.utils import get_file_timestamp
|
|
13
13
|
|
|
@@ -18,11 +18,11 @@ logger = logging.getLogger(__name__)
|
|
|
18
18
|
class msfconsole(VulnMulti):
|
|
19
19
|
"""CLI to access and work with the Metasploit Framework."""
|
|
20
20
|
cmd = 'msfconsole --quiet'
|
|
21
|
+
input_types = [HOST]
|
|
22
|
+
output_types = []
|
|
21
23
|
tags = ['exploit', 'attack']
|
|
22
24
|
version_flag = OPT_NOT_SUPPORTED
|
|
23
|
-
input_types = [HOST, IP, URL]
|
|
24
25
|
input_chunk_size = 1
|
|
25
|
-
output_types = []
|
|
26
26
|
opt_prefix = '--'
|
|
27
27
|
opts = {
|
|
28
28
|
'resource': {'type': str, 'help': 'Metasploit resource script.', 'short': 'r'},
|
secator/tasks/naabu.py
CHANGED
|
@@ -11,9 +11,10 @@ from secator.tasks._categories import ReconPort
|
|
|
11
11
|
class naabu(ReconPort):
|
|
12
12
|
"""Port scanning tool written in Go."""
|
|
13
13
|
cmd = 'naabu'
|
|
14
|
+
input_types = [HOST, IP]
|
|
15
|
+
output_types = [Port]
|
|
14
16
|
tags = ['port', 'scan']
|
|
15
17
|
input_flag = '-host'
|
|
16
|
-
input_types = [HOST, IP]
|
|
17
18
|
file_flag = '-list'
|
|
18
19
|
json_flag = '-json'
|
|
19
20
|
opts = {
|
|
@@ -49,7 +50,6 @@ class naabu(ReconPort):
|
|
|
49
50
|
STATE: lambda x: 'open'
|
|
50
51
|
}
|
|
51
52
|
}
|
|
52
|
-
output_types = [Port]
|
|
53
53
|
install_version = 'v2.3.3'
|
|
54
54
|
install_cmd = 'go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@[install_version]'
|
|
55
55
|
install_github_handle = 'projectdiscovery/naabu'
|
secator/tasks/nmap.py
CHANGED
|
@@ -10,9 +10,8 @@ from secator.definitions import (CONFIDENCE, CVSS_SCORE, DELAY,
|
|
|
10
10
|
DESCRIPTION, EXTRA_DATA, FOLLOW_REDIRECT,
|
|
11
11
|
HEADER, HOST, ID, IP, PROTOCOL, MATCHED_AT, NAME,
|
|
12
12
|
OPT_NOT_SUPPORTED, OUTPUT_PATH, PORT, PORTS, PROVIDER,
|
|
13
|
-
PROXY, RATE_LIMIT, REFERENCE, REFERENCES,
|
|
14
|
-
|
|
15
|
-
THREADS, TIMEOUT, TOP_PORTS, USER_AGENT)
|
|
13
|
+
PROXY, RATE_LIMIT, REFERENCE, REFERENCES, RETRIES, SCRIPT, SERVICE_NAME,
|
|
14
|
+
SEVERITY, STATE, TAGS, THREADS, TIMEOUT, TOP_PORTS, USER_AGENT)
|
|
16
15
|
from secator.output_types import Exploit, Port, Vulnerability, Info, Error
|
|
17
16
|
from secator.tasks._categories import VulnMulti
|
|
18
17
|
from secator.utils import debug, traceback_as_string
|
|
@@ -24,13 +23,12 @@ logger = logging.getLogger(__name__)
|
|
|
24
23
|
class nmap(VulnMulti):
|
|
25
24
|
"""Network Mapper is a free and open source utility for network discovery and security auditing."""
|
|
26
25
|
cmd = 'nmap'
|
|
27
|
-
tags = ['port', 'scan']
|
|
28
|
-
input_flag = None
|
|
29
26
|
input_types = [HOST, IP]
|
|
27
|
+
output_types = [Port, Vulnerability, Exploit]
|
|
28
|
+
tags = ['port', 'scan']
|
|
30
29
|
input_chunk_size = 1
|
|
31
30
|
file_flag = '-iL'
|
|
32
31
|
opt_prefix = '--'
|
|
33
|
-
output_types = [Port, Vulnerability, Exploit]
|
|
34
32
|
opts = {
|
|
35
33
|
# Port specification and scan order
|
|
36
34
|
PORTS: {'type': str, 'short': 'p', 'help': 'Ports to scan (- to scan all)'},
|
|
@@ -248,7 +246,7 @@ class nmapData(dict):
|
|
|
248
246
|
EXTRA_DATA: extra_data,
|
|
249
247
|
}
|
|
250
248
|
if not func:
|
|
251
|
-
debug(f'Script output parser for "{script_id}" is not supported YET.', sub='cve')
|
|
249
|
+
debug(f'Script output parser for "{script_id}" is not supported YET.', sub='cve.nmap')
|
|
252
250
|
continue
|
|
253
251
|
for data in func(output, cpes=cpes):
|
|
254
252
|
data.update(metadata)
|
|
@@ -257,7 +255,7 @@ class nmapData(dict):
|
|
|
257
255
|
confidence = 'high' if version_exact else 'medium'
|
|
258
256
|
data[CONFIDENCE] = confidence
|
|
259
257
|
if (CONFIG.runners.skip_cve_low_confidence and data[CONFIDENCE] == 'low'):
|
|
260
|
-
debug(f'{data[ID]}: ignored (low confidence).', sub='cve')
|
|
258
|
+
debug(f'{data[ID]}: ignored (low confidence).', sub='cve.nmap')
|
|
261
259
|
continue
|
|
262
260
|
if data in datas:
|
|
263
261
|
continue
|
|
@@ -349,7 +347,7 @@ class nmapData(dict):
|
|
|
349
347
|
if not isinstance(cpes, list):
|
|
350
348
|
cpes = [cpes]
|
|
351
349
|
extra_data['cpe'] = cpes
|
|
352
|
-
debug(f'Found CPEs: {",".join(cpes)}', sub='cve')
|
|
350
|
+
debug(f'Found CPEs: {",".join(cpes)}', sub='cve.nmap')
|
|
353
351
|
|
|
354
352
|
# Grab confidence
|
|
355
353
|
conf = int(extra_data.get('conf', 0))
|
|
@@ -368,7 +366,7 @@ class nmapData(dict):
|
|
|
368
366
|
cpe = VulnMulti.create_cpe_string(product, version_cpe)
|
|
369
367
|
if cpe not in cpes:
|
|
370
368
|
cpes.append(cpe)
|
|
371
|
-
debug(f'Added new CPE from identified product and version: {cpe}', sub='cve')
|
|
369
|
+
debug(f'Added new CPE from identified product and version: {cpe}', sub='cve.nmap')
|
|
372
370
|
|
|
373
371
|
return extra_data
|
|
374
372
|
|
|
@@ -412,7 +410,7 @@ class nmapData(dict):
|
|
|
412
410
|
NAME: vuln_id,
|
|
413
411
|
DESCRIPTION: vuln_title,
|
|
414
412
|
PROVIDER: provider_name,
|
|
415
|
-
TAGS: [
|
|
413
|
+
TAGS: [provider_name]
|
|
416
414
|
}
|
|
417
415
|
if provider_name == 'MITRE CVE':
|
|
418
416
|
data = VulnMulti.lookup_cve(vuln['id'], *cpes)
|
|
@@ -436,7 +434,6 @@ class nmapData(dict):
|
|
|
436
434
|
elems = tuple(line.split('\t'))
|
|
437
435
|
|
|
438
436
|
if len(elems) == 4: # exploit
|
|
439
|
-
# TODO: Implement exploit processing
|
|
440
437
|
exploit_id, cvss_score, reference_url, _ = elems
|
|
441
438
|
name = exploit_id
|
|
442
439
|
# edb_id = name.split(':')[-1] if 'EDB-ID' in name else None
|
|
@@ -460,6 +457,7 @@ class nmapData(dict):
|
|
|
460
457
|
exploit[TAGS].extend(vuln[TAGS])
|
|
461
458
|
exploit[CONFIDENCE] = vuln[CONFIDENCE]
|
|
462
459
|
yield exploit
|
|
460
|
+
continue
|
|
463
461
|
|
|
464
462
|
elif len(elems) == 3: # vuln
|
|
465
463
|
vuln = {}
|
|
@@ -483,6 +481,6 @@ class nmapData(dict):
|
|
|
483
481
|
vuln.update(data)
|
|
484
482
|
yield vuln
|
|
485
483
|
else:
|
|
486
|
-
debug(f'Vulners parser for "{vuln_type}" is not implemented YET.', sub='cve')
|
|
484
|
+
debug(f'Vulners parser for "{vuln_type}" is not implemented YET.', sub='cve.nmap')
|
|
487
485
|
else:
|
|
488
|
-
debug(f'Unrecognized vulners output: {elems}', sub='cve')
|
|
486
|
+
debug(f'Unrecognized vulners output: {elems}', sub='cve.nmap')
|