secator 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +160 -185
- secator/celery_utils.py +268 -0
- secator/cli.py +427 -176
- secator/config.py +114 -68
- secator/configs/workflows/host_recon.yaml +5 -3
- secator/configs/workflows/port_scan.yaml +7 -3
- secator/configs/workflows/subdomain_recon.yaml +2 -2
- secator/configs/workflows/url_bypass.yaml +10 -0
- secator/configs/workflows/url_dirsearch.yaml +1 -1
- secator/configs/workflows/url_vuln.yaml +1 -1
- secator/decorators.py +170 -92
- secator/definitions.py +11 -4
- secator/exporters/__init__.py +7 -5
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +27 -19
- secator/exporters/gdrive.py +16 -11
- secator/exporters/json.py +3 -1
- secator/exporters/table.py +30 -2
- secator/exporters/txt.py +20 -16
- secator/hooks/gcs.py +53 -0
- secator/hooks/mongodb.py +53 -27
- secator/installer.py +277 -60
- secator/output_types/__init__.py +29 -11
- secator/output_types/_base.py +11 -1
- secator/output_types/error.py +36 -0
- secator/output_types/exploit.py +12 -8
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +8 -1
- secator/output_types/port.py +9 -2
- secator/output_types/progress.py +5 -0
- secator/output_types/record.py +5 -3
- secator/output_types/stat.py +33 -0
- secator/output_types/subdomain.py +1 -1
- secator/output_types/tag.py +8 -6
- secator/output_types/target.py +2 -2
- secator/output_types/url.py +14 -11
- secator/output_types/user_account.py +6 -6
- secator/output_types/vulnerability.py +8 -6
- secator/output_types/warning.py +24 -0
- secator/report.py +56 -23
- secator/rich.py +44 -39
- secator/runners/_base.py +629 -638
- secator/runners/_helpers.py +5 -91
- secator/runners/celery.py +18 -0
- secator/runners/command.py +404 -214
- secator/runners/scan.py +8 -24
- secator/runners/task.py +21 -55
- secator/runners/workflow.py +41 -40
- secator/scans/__init__.py +28 -0
- secator/serializers/dataclass.py +6 -0
- secator/serializers/json.py +10 -5
- secator/serializers/regex.py +12 -4
- secator/tasks/_categories.py +147 -42
- secator/tasks/bbot.py +295 -0
- secator/tasks/bup.py +99 -0
- secator/tasks/cariddi.py +38 -49
- secator/tasks/dalfox.py +3 -0
- secator/tasks/dirsearch.py +14 -25
- secator/tasks/dnsx.py +49 -30
- secator/tasks/dnsxbrute.py +4 -1
- secator/tasks/feroxbuster.py +10 -20
- secator/tasks/ffuf.py +3 -2
- secator/tasks/fping.py +4 -4
- secator/tasks/gau.py +5 -0
- secator/tasks/gf.py +2 -2
- secator/tasks/gospider.py +4 -0
- secator/tasks/grype.py +11 -13
- secator/tasks/h8mail.py +32 -42
- secator/tasks/httpx.py +58 -21
- secator/tasks/katana.py +19 -23
- secator/tasks/maigret.py +27 -25
- secator/tasks/mapcidr.py +2 -3
- secator/tasks/msfconsole.py +22 -19
- secator/tasks/naabu.py +18 -2
- secator/tasks/nmap.py +82 -55
- secator/tasks/nuclei.py +13 -3
- secator/tasks/searchsploit.py +26 -11
- secator/tasks/subfinder.py +5 -1
- secator/tasks/wpscan.py +91 -94
- secator/template.py +61 -45
- secator/thread.py +24 -0
- secator/utils.py +417 -78
- secator/utils_test.py +48 -23
- secator/workflows/__init__.py +28 -0
- {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/METADATA +59 -48
- secator-0.8.0.dist-info/RECORD +115 -0
- {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/WHEEL +1 -1
- secator-0.6.0.dist-info/RECORD +0 -101
- {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/entry_points.txt +0 -0
- {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/licenses/LICENSE +0 -0
secator/tasks/cariddi.py
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import json
|
|
2
|
-
|
|
3
1
|
from secator.decorators import task
|
|
4
2
|
from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX,
|
|
5
3
|
FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
|
|
@@ -8,6 +6,7 @@ from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX,
|
|
|
8
6
|
OPT_PIPE_INPUT, PROXY, RATE_LIMIT, RETRIES,
|
|
9
7
|
THREADS, TIMEOUT, URL, USER_AGENT)
|
|
10
8
|
from secator.output_types import Tag, Url
|
|
9
|
+
from secator.serializers import JSONSerializer
|
|
11
10
|
from secator.tasks._categories import HttpCrawler
|
|
12
11
|
|
|
13
12
|
|
|
@@ -41,7 +40,7 @@ class cariddi(HttpCrawler):
|
|
|
41
40
|
TIMEOUT: 't',
|
|
42
41
|
USER_AGENT: 'ua'
|
|
43
42
|
}
|
|
44
|
-
item_loaders = []
|
|
43
|
+
item_loaders = [JSONSerializer()]
|
|
45
44
|
install_cmd = 'go install -v github.com/edoardottt/cariddi/cmd/cariddi@latest'
|
|
46
45
|
install_github_handle = 'edoardottt/cariddi'
|
|
47
46
|
encoding = 'ansi'
|
|
@@ -51,53 +50,43 @@ class cariddi(HttpCrawler):
|
|
|
51
50
|
profile = 'cpu'
|
|
52
51
|
|
|
53
52
|
@staticmethod
|
|
54
|
-
def
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
errors = matches.get('errors', [])
|
|
64
|
-
secrets = matches.get('secrets', [])
|
|
65
|
-
infos = matches.get('infos', [])
|
|
66
|
-
|
|
67
|
-
for param in params:
|
|
68
|
-
param_name = param['name']
|
|
69
|
-
for attack in param['attacks']:
|
|
70
|
-
extra_data = {'param': param_name, 'source': 'url'}
|
|
71
|
-
item = {
|
|
72
|
-
'name': attack + ' param',
|
|
73
|
-
'match': url,
|
|
74
|
-
'extra_data': extra_data
|
|
75
|
-
}
|
|
76
|
-
items.append(item)
|
|
77
|
-
|
|
78
|
-
for error in errors:
|
|
79
|
-
match = error['match']
|
|
80
|
-
match = (match[:1000] + '...TRUNCATED') if len(match) > 1000 else match # truncate as this can be a very long match
|
|
81
|
-
error['extra_data'] = {'error': match, 'source': 'body'}
|
|
82
|
-
error['match'] = url
|
|
83
|
-
items.append(error)
|
|
53
|
+
def on_json_loaded(self, item):
|
|
54
|
+
url_item = {k: v for k, v in item.items() if k != 'matches'}
|
|
55
|
+
yield Url(**url_item)
|
|
56
|
+
url = url_item[URL]
|
|
57
|
+
matches = item.get('matches', {})
|
|
58
|
+
params = matches.get('parameters', [])
|
|
59
|
+
errors = matches.get('errors', [])
|
|
60
|
+
secrets = matches.get('secrets', [])
|
|
61
|
+
infos = matches.get('infos', [])
|
|
84
62
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
63
|
+
for param in params:
|
|
64
|
+
param_name = param['name']
|
|
65
|
+
for attack in param['attacks']:
|
|
66
|
+
extra_data = {'param': param_name, 'source': 'url'}
|
|
67
|
+
yield Tag(
|
|
68
|
+
name=f'{attack} param',
|
|
69
|
+
match=url,
|
|
70
|
+
extra_data=extra_data
|
|
71
|
+
)
|
|
90
72
|
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
info['extra_data'] = {'info': match, 'source': 'body'}
|
|
97
|
-
info['match'] = url
|
|
98
|
-
items.append(info)
|
|
73
|
+
for error in errors:
|
|
74
|
+
match = error['match']
|
|
75
|
+
error['extra_data'] = {'error': match, 'source': 'body'}
|
|
76
|
+
error['match'] = url
|
|
77
|
+
yield Tag(**error)
|
|
99
78
|
|
|
100
|
-
|
|
101
|
-
|
|
79
|
+
for secret in secrets:
|
|
80
|
+
match = secret['match']
|
|
81
|
+
secret['extra_data'] = {'secret': match, 'source': 'body'}
|
|
82
|
+
secret['match'] = url
|
|
83
|
+
yield Tag(**secret)
|
|
102
84
|
|
|
103
|
-
|
|
85
|
+
for info in infos:
|
|
86
|
+
CARIDDI_IGNORE_LIST = ['BTC address'] # TODO: make this a config option
|
|
87
|
+
if info['name'] in CARIDDI_IGNORE_LIST:
|
|
88
|
+
continue
|
|
89
|
+
match = info['match']
|
|
90
|
+
info['extra_data'] = {'info': match, 'source': 'body'}
|
|
91
|
+
info['match'] = url
|
|
92
|
+
yield Tag(**info)
|
secator/tasks/dalfox.py
CHANGED
|
@@ -7,6 +7,7 @@ from secator.definitions import (CONFIDENCE, DELAY, EXTRA_DATA, FOLLOW_REDIRECT,
|
|
|
7
7
|
SEVERITY, TAGS, THREADS, TIMEOUT, URL,
|
|
8
8
|
USER_AGENT)
|
|
9
9
|
from secator.output_types import Vulnerability
|
|
10
|
+
from secator.serializers import JSONSerializer
|
|
10
11
|
from secator.tasks._categories import VulnHttp
|
|
11
12
|
|
|
12
13
|
DALFOX_TYPE_MAP = {
|
|
@@ -23,6 +24,7 @@ class dalfox(VulnHttp):
|
|
|
23
24
|
input_type = URL
|
|
24
25
|
input_flag = 'url'
|
|
25
26
|
file_flag = 'file'
|
|
27
|
+
# input_chunk_size = 1
|
|
26
28
|
json_flag = '--format json'
|
|
27
29
|
version_flag = 'version'
|
|
28
30
|
opt_prefix = '--'
|
|
@@ -37,6 +39,7 @@ class dalfox(VulnHttp):
|
|
|
37
39
|
TIMEOUT: 'timeout',
|
|
38
40
|
USER_AGENT: 'user-agent'
|
|
39
41
|
}
|
|
42
|
+
item_loaders = [JSONSerializer()]
|
|
40
43
|
output_map = {
|
|
41
44
|
Vulnerability: {
|
|
42
45
|
ID: lambda x: None,
|
secator/tasks/dirsearch.py
CHANGED
|
@@ -10,7 +10,7 @@ from secator.definitions import (CONTENT_LENGTH, CONTENT_TYPE, DELAY, DEPTH,
|
|
|
10
10
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY,
|
|
11
11
|
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
12
12
|
THREADS, TIMEOUT, USER_AGENT, WORDLIST)
|
|
13
|
-
from secator.output_types import Url
|
|
13
|
+
from secator.output_types import Url, Info, Error
|
|
14
14
|
from secator.tasks._categories import HttpFuzzer
|
|
15
15
|
|
|
16
16
|
|
|
@@ -20,7 +20,7 @@ class dirsearch(HttpFuzzer):
|
|
|
20
20
|
cmd = 'dirsearch'
|
|
21
21
|
input_flag = '-u'
|
|
22
22
|
file_flag = '-l'
|
|
23
|
-
json_flag = '
|
|
23
|
+
json_flag = '-O json'
|
|
24
24
|
opt_prefix = '--'
|
|
25
25
|
encoding = 'ansi'
|
|
26
26
|
opt_key_map = {
|
|
@@ -52,37 +52,26 @@ class dirsearch(HttpFuzzer):
|
|
|
52
52
|
STATUS_CODE: 'status'
|
|
53
53
|
}
|
|
54
54
|
}
|
|
55
|
-
install_cmd = 'pipx install dirsearch'
|
|
55
|
+
install_cmd = 'pipx install --force git+https://github.com/maurosoria/dirsearch'
|
|
56
56
|
proxychains = True
|
|
57
57
|
proxy_socks5 = True
|
|
58
58
|
proxy_http = True
|
|
59
59
|
profile = 'io'
|
|
60
60
|
|
|
61
|
-
def yielder(self):
|
|
62
|
-
prev = self.print_item_count
|
|
63
|
-
self.print_item_count = False
|
|
64
|
-
list(super().yielder())
|
|
65
|
-
if self.return_code != 0:
|
|
66
|
-
return
|
|
67
|
-
self.results = []
|
|
68
|
-
if not self.output_json:
|
|
69
|
-
return
|
|
70
|
-
note = f'dirsearch JSON results saved to {self.output_path}'
|
|
71
|
-
if self.print_line:
|
|
72
|
-
self._print(note)
|
|
73
|
-
if os.path.exists(self.output_path):
|
|
74
|
-
with open(self.output_path, 'r') as f:
|
|
75
|
-
results = yaml.safe_load(f.read()).get('results', [])
|
|
76
|
-
for item in results:
|
|
77
|
-
item = self._process_item(item)
|
|
78
|
-
if not item:
|
|
79
|
-
continue
|
|
80
|
-
yield item
|
|
81
|
-
self.print_item_count = prev
|
|
82
|
-
|
|
83
61
|
@staticmethod
|
|
84
62
|
def on_init(self):
|
|
85
63
|
self.output_path = self.get_opt_value(OUTPUT_PATH)
|
|
86
64
|
if not self.output_path:
|
|
87
65
|
self.output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
88
66
|
self.cmd += f' -o {self.output_path}'
|
|
67
|
+
|
|
68
|
+
@staticmethod
|
|
69
|
+
def on_cmd_done(self):
|
|
70
|
+
if not os.path.exists(self.output_path):
|
|
71
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
75
|
+
with open(self.output_path, 'r') as f:
|
|
76
|
+
results = yaml.safe_load(f.read()).get('results', [])
|
|
77
|
+
yield from results
|
secator/tasks/dnsx.py
CHANGED
|
@@ -1,18 +1,20 @@
|
|
|
1
1
|
from secator.decorators import task
|
|
2
2
|
from secator.definitions import (OPT_PIPE_INPUT, RATE_LIMIT, RETRIES, THREADS)
|
|
3
|
-
from secator.output_types import Record
|
|
3
|
+
from secator.output_types import Record, Ip, Subdomain
|
|
4
|
+
from secator.output_types.ip import IpProtocol
|
|
4
5
|
from secator.tasks._categories import ReconDns
|
|
5
|
-
import
|
|
6
|
+
from secator.serializers import JSONSerializer
|
|
7
|
+
from secator.utils import extract_domain_info
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
@task()
|
|
9
11
|
class dnsx(ReconDns):
|
|
10
12
|
"""dnsx is a fast and multi-purpose DNS toolkit designed for running various retryabledns library."""
|
|
11
|
-
cmd = 'dnsx -resp -
|
|
13
|
+
cmd = 'dnsx -resp -recon'
|
|
12
14
|
json_flag = '-json'
|
|
13
15
|
input_flag = OPT_PIPE_INPUT
|
|
14
16
|
file_flag = OPT_PIPE_INPUT
|
|
15
|
-
output_types = [Record]
|
|
17
|
+
output_types = [Record, Ip, Subdomain]
|
|
16
18
|
opt_key_map = {
|
|
17
19
|
RATE_LIMIT: 'rate-limit',
|
|
18
20
|
RETRIES: 'retry',
|
|
@@ -23,35 +25,52 @@ class dnsx(ReconDns):
|
|
|
23
25
|
'resolver': {'type': str, 'short': 'r', 'help': 'List of resolvers to use (file or comma separated)'},
|
|
24
26
|
'wildcard_domain': {'type': str, 'short': 'wd', 'help': 'Domain name for wildcard filtering'},
|
|
25
27
|
}
|
|
26
|
-
|
|
28
|
+
item_loaders = [JSONSerializer()]
|
|
27
29
|
install_cmd = 'go install -v github.com/projectdiscovery/dnsx/cmd/dnsx@latest'
|
|
28
30
|
install_github_handle = 'projectdiscovery/dnsx'
|
|
29
31
|
profile = 'io'
|
|
30
32
|
|
|
31
33
|
@staticmethod
|
|
32
|
-
def
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
for _type in record_types:
|
|
41
|
-
values = item.get(_type, [])
|
|
42
|
-
for value in values:
|
|
43
|
-
name = value
|
|
44
|
-
extra_data = {}
|
|
45
|
-
if isinstance(value, dict):
|
|
46
|
-
name = value['name']
|
|
47
|
-
extra_data = {k: v for k, v in value.items() if k != 'name'}
|
|
48
|
-
items.append({
|
|
49
|
-
'host': host,
|
|
50
|
-
'name': name,
|
|
51
|
-
'type': _type.upper(),
|
|
52
|
-
'extra_data': extra_data
|
|
53
|
-
})
|
|
54
|
-
except json.decoder.JSONDecodeError:
|
|
55
|
-
pass
|
|
34
|
+
def on_json_loaded(self, item):
|
|
35
|
+
# Show full DNS response
|
|
36
|
+
quiet = self.get_opt_value('quiet')
|
|
37
|
+
if not quiet:
|
|
38
|
+
all = item['all']
|
|
39
|
+
for line in all:
|
|
40
|
+
yield line
|
|
41
|
+
yield '\n'
|
|
56
42
|
|
|
57
|
-
|
|
43
|
+
# Loop through record types and yield records
|
|
44
|
+
record_types = ['a', 'aaaa', 'cname', 'mx', 'ns', 'txt', 'srv', 'ptr', 'soa', 'axfr', 'caa']
|
|
45
|
+
host = item['host']
|
|
46
|
+
for _type in record_types:
|
|
47
|
+
values = item.get(_type, [])
|
|
48
|
+
for value in values:
|
|
49
|
+
name = value
|
|
50
|
+
extra_data = {}
|
|
51
|
+
if isinstance(value, dict):
|
|
52
|
+
name = value['name']
|
|
53
|
+
extra_data = {k: v for k, v in value.items() if k != 'name'}
|
|
54
|
+
if _type == 'a':
|
|
55
|
+
yield Ip(
|
|
56
|
+
host=host,
|
|
57
|
+
ip=name,
|
|
58
|
+
protocol=IpProtocol.IPv4
|
|
59
|
+
)
|
|
60
|
+
elif _type == 'aaaa':
|
|
61
|
+
yield Ip(
|
|
62
|
+
host=host,
|
|
63
|
+
ip=name,
|
|
64
|
+
protocol=IpProtocol.IPv6
|
|
65
|
+
)
|
|
66
|
+
elif _type == 'ptr':
|
|
67
|
+
yield Subdomain(
|
|
68
|
+
host=name,
|
|
69
|
+
domain=extract_domain_info(name, domain_only=True)
|
|
70
|
+
)
|
|
71
|
+
yield Record(
|
|
72
|
+
host=host,
|
|
73
|
+
name=name,
|
|
74
|
+
type=_type.upper(),
|
|
75
|
+
extra_data=extra_data
|
|
76
|
+
)
|
secator/tasks/dnsxbrute.py
CHANGED
|
@@ -2,7 +2,9 @@ from secator.decorators import task
|
|
|
2
2
|
from secator.definitions import (DOMAIN, HOST, RATE_LIMIT, RETRIES, THREADS, WORDLIST, EXTRA_DATA)
|
|
3
3
|
from secator.config import CONFIG
|
|
4
4
|
from secator.output_types import Subdomain
|
|
5
|
+
from secator.serializers import JSONSerializer
|
|
5
6
|
from secator.tasks._categories import ReconDns
|
|
7
|
+
from secator.utils import process_wordlist
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
@task()
|
|
@@ -18,9 +20,10 @@ class dnsxbrute(ReconDns):
|
|
|
18
20
|
THREADS: 'threads',
|
|
19
21
|
}
|
|
20
22
|
opts = {
|
|
21
|
-
WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.dns, 'help': 'Wordlist'},
|
|
23
|
+
WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.dns, 'process': process_wordlist, 'help': 'Wordlist to use'}, # noqa: E501
|
|
22
24
|
'trace': {'is_flag': True, 'default': False, 'help': 'Perform dns tracing'},
|
|
23
25
|
}
|
|
26
|
+
item_loaders = [JSONSerializer()]
|
|
24
27
|
output_map = {
|
|
25
28
|
Subdomain: {
|
|
26
29
|
HOST: 'host',
|
secator/tasks/feroxbuster.py
CHANGED
|
@@ -1,15 +1,14 @@
|
|
|
1
|
-
import
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
1
|
+
from secator.config import CONFIG
|
|
4
2
|
from secator.decorators import task
|
|
5
3
|
from secator.definitions import (CONTENT_TYPE, DELAY, DEPTH, FILTER_CODES,
|
|
6
4
|
FILTER_REGEX, FILTER_SIZE, FILTER_WORDS,
|
|
7
5
|
FOLLOW_REDIRECT, HEADER, LINES, MATCH_CODES,
|
|
8
6
|
MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD,
|
|
9
|
-
OPT_NOT_SUPPORTED, OPT_PIPE_INPUT,
|
|
7
|
+
OPT_NOT_SUPPORTED, OPT_PIPE_INPUT, PROXY,
|
|
10
8
|
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
11
9
|
THREADS, TIMEOUT, USER_AGENT, WORDLIST, WORDS, DEFAULT_FEROXBUSTER_FLAGS)
|
|
12
10
|
from secator.output_types import Url
|
|
11
|
+
from secator.serializers import JSONSerializer
|
|
13
12
|
from secator.tasks._categories import HttpFuzzer
|
|
14
13
|
|
|
15
14
|
|
|
@@ -20,7 +19,7 @@ class feroxbuster(HttpFuzzer):
|
|
|
20
19
|
input_flag = '--url'
|
|
21
20
|
input_chunk_size = 1
|
|
22
21
|
file_flag = OPT_PIPE_INPUT
|
|
23
|
-
json_flag = '--json'
|
|
22
|
+
json_flag = '--silent --json'
|
|
24
23
|
opt_prefix = '--'
|
|
25
24
|
opts = {
|
|
26
25
|
# 'auto_tune': {'is_flag': True, 'default': False, 'help': 'Automatically lower scan rate when too many errors'},
|
|
@@ -51,6 +50,7 @@ class feroxbuster(HttpFuzzer):
|
|
|
51
50
|
USER_AGENT: 'user-agent',
|
|
52
51
|
WORDLIST: 'wordlist'
|
|
53
52
|
}
|
|
53
|
+
item_loaders = [JSONSerializer()]
|
|
54
54
|
output_map = {
|
|
55
55
|
Url: {
|
|
56
56
|
STATUS_CODE: 'status',
|
|
@@ -60,9 +60,7 @@ class feroxbuster(HttpFuzzer):
|
|
|
60
60
|
}
|
|
61
61
|
}
|
|
62
62
|
install_cmd = (
|
|
63
|
-
'
|
|
64
|
-
'curl -sL https://raw.githubusercontent.com/epi052/feroxbuster/master/install-nix.sh | '
|
|
65
|
-
'bash && sudo mv feroxbuster /usr/local/bin'
|
|
63
|
+
f'cd /tmp && curl -sL https://raw.githubusercontent.com/epi052/feroxbuster/master/install-nix.sh | bash -s {CONFIG.dirs.bin}' # noqa: E501
|
|
66
64
|
)
|
|
67
65
|
install_github_handle = 'epi052/feroxbuster'
|
|
68
66
|
proxychains = False
|
|
@@ -70,21 +68,13 @@ class feroxbuster(HttpFuzzer):
|
|
|
70
68
|
proxy_http = True
|
|
71
69
|
profile = 'cpu'
|
|
72
70
|
|
|
73
|
-
@staticmethod
|
|
74
|
-
def on_init(self):
|
|
75
|
-
self.output_path = self.get_opt_value(OUTPUT_PATH)
|
|
76
|
-
if not self.output_path:
|
|
77
|
-
self.output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
78
|
-
Path(self.output_path).touch()
|
|
79
|
-
self.cmd += f' --output {self.output_path}'
|
|
80
|
-
|
|
81
71
|
@staticmethod
|
|
82
72
|
def on_start(self):
|
|
83
|
-
if self.
|
|
73
|
+
if self.inputs_path:
|
|
84
74
|
self.cmd += ' --stdin'
|
|
85
|
-
self.cmd += f' & tail --pid=$! -f {shlex.quote(self.output_path)}'
|
|
86
|
-
self.shell = True
|
|
87
75
|
|
|
88
76
|
@staticmethod
|
|
89
77
|
def validate_item(self, item):
|
|
90
|
-
|
|
78
|
+
if isinstance(item, dict):
|
|
79
|
+
return item['type'] == 'response'
|
|
80
|
+
return True
|
secator/tasks/ffuf.py
CHANGED
|
@@ -25,7 +25,7 @@ class ffuf(HttpFuzzer):
|
|
|
25
25
|
json_flag = '-json'
|
|
26
26
|
version_flag = '-V'
|
|
27
27
|
item_loaders = [
|
|
28
|
-
JSONSerializer(),
|
|
28
|
+
JSONSerializer(strict=True),
|
|
29
29
|
RegexSerializer(FFUF_PROGRESS_REGEX, fields=['count', 'total', 'rps', 'duration', 'errors'])
|
|
30
30
|
]
|
|
31
31
|
opts = {
|
|
@@ -79,5 +79,6 @@ class ffuf(HttpFuzzer):
|
|
|
79
79
|
|
|
80
80
|
@staticmethod
|
|
81
81
|
def on_item(self, item):
|
|
82
|
-
item
|
|
82
|
+
if isinstance(item, Url):
|
|
83
|
+
item.method = self.get_opt_value(METHOD) or 'GET'
|
|
83
84
|
return item
|
secator/tasks/fping.py
CHANGED
|
@@ -29,13 +29,13 @@ class fping(ReconIp):
|
|
|
29
29
|
}
|
|
30
30
|
input_type = IP
|
|
31
31
|
output_types = [Ip]
|
|
32
|
-
|
|
32
|
+
install_pre = {'*': ['fping']}
|
|
33
33
|
|
|
34
34
|
@staticmethod
|
|
35
35
|
def item_loader(self, line):
|
|
36
|
-
if validators.ipv4(line) or validators.ipv6(line):
|
|
37
|
-
return
|
|
38
|
-
|
|
36
|
+
if not (validators.ipv4(line) or validators.ipv6(line)):
|
|
37
|
+
return
|
|
38
|
+
yield {'ip': line, 'alive': True}
|
|
39
39
|
|
|
40
40
|
@staticmethod
|
|
41
41
|
def on_line(self, line):
|
secator/tasks/gau.py
CHANGED
|
@@ -5,6 +5,7 @@ from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX,
|
|
|
5
5
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
|
|
6
6
|
OPT_PIPE_INPUT, PROXY, RATE_LIMIT, RETRIES,
|
|
7
7
|
THREADS, TIMEOUT, USER_AGENT)
|
|
8
|
+
from secator.serializers import JSONSerializer
|
|
8
9
|
from secator.tasks._categories import HttpCrawler
|
|
9
10
|
|
|
10
11
|
|
|
@@ -15,6 +16,9 @@ class gau(HttpCrawler):
|
|
|
15
16
|
file_flag = OPT_PIPE_INPUT
|
|
16
17
|
json_flag = '--json'
|
|
17
18
|
opt_prefix = '--'
|
|
19
|
+
opts = {
|
|
20
|
+
'providers': {'type': str, 'default': None, 'help': 'List of providers to use (wayback,commoncrawl,otx,urlscan)'}
|
|
21
|
+
}
|
|
18
22
|
opt_key_map = {
|
|
19
23
|
HEADER: OPT_NOT_SUPPORTED,
|
|
20
24
|
DELAY: OPT_NOT_SUPPORTED,
|
|
@@ -36,6 +40,7 @@ class gau(HttpCrawler):
|
|
|
36
40
|
TIMEOUT: 'timeout',
|
|
37
41
|
USER_AGENT: OPT_NOT_SUPPORTED,
|
|
38
42
|
}
|
|
43
|
+
item_loaders = [JSONSerializer()]
|
|
39
44
|
install_cmd = 'go install -v github.com/lc/gau/v2/cmd/gau@latest'
|
|
40
45
|
install_github_handle = 'lc/gau'
|
|
41
46
|
proxychains = False
|
secator/tasks/gf.py
CHANGED
|
@@ -12,7 +12,7 @@ class gf(Tagger):
|
|
|
12
12
|
input_flag = OPT_PIPE_INPUT
|
|
13
13
|
version_flag = OPT_NOT_SUPPORTED
|
|
14
14
|
opts = {
|
|
15
|
-
'pattern': {'type': str, 'help': 'Pattern names to match against (comma-delimited)'}
|
|
15
|
+
'pattern': {'type': str, 'help': 'Pattern names to match against (comma-delimited)', 'required': True}
|
|
16
16
|
}
|
|
17
17
|
opt_key_map = {
|
|
18
18
|
'pattern': ''
|
|
@@ -26,7 +26,7 @@ class gf(Tagger):
|
|
|
26
26
|
|
|
27
27
|
@staticmethod
|
|
28
28
|
def item_loader(self, line):
|
|
29
|
-
|
|
29
|
+
yield {'match': line, 'name': self.get_opt_value('pattern').rstrip() + ' pattern'} # noqa: E731,E501
|
|
30
30
|
|
|
31
31
|
@staticmethod
|
|
32
32
|
def on_item(self, item):
|
secator/tasks/gospider.py
CHANGED
|
@@ -8,6 +8,7 @@ from secator.definitions import (CONTENT_LENGTH, DELAY, DEPTH, FILTER_CODES,
|
|
|
8
8
|
OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
|
|
9
9
|
STATUS_CODE, THREADS, TIMEOUT, URL, USER_AGENT)
|
|
10
10
|
from secator.output_types import Url
|
|
11
|
+
from secator.serializers import JSONSerializer
|
|
11
12
|
from secator.tasks._categories import HttpCrawler
|
|
12
13
|
|
|
13
14
|
|
|
@@ -44,6 +45,7 @@ class gospider(HttpCrawler):
|
|
|
44
45
|
FOLLOW_REDIRECT: lambda x: not x,
|
|
45
46
|
DELAY: lambda x: round(x) if isinstance(x, float) else x
|
|
46
47
|
}
|
|
48
|
+
item_loaders = [JSONSerializer()]
|
|
47
49
|
output_map = {
|
|
48
50
|
Url: {
|
|
49
51
|
URL: 'output',
|
|
@@ -62,6 +64,8 @@ class gospider(HttpCrawler):
|
|
|
62
64
|
@staticmethod
|
|
63
65
|
def validate_item(self, item):
|
|
64
66
|
"""Keep only items that match the same host."""
|
|
67
|
+
if not isinstance(item, dict):
|
|
68
|
+
return False
|
|
65
69
|
try:
|
|
66
70
|
netloc_in = furl(item['input']).netloc
|
|
67
71
|
netloc_out = furl(item['output']).netloc
|
secator/tasks/grype.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
|
|
1
|
+
from secator.config import CONFIG
|
|
2
2
|
from secator.decorators import task
|
|
3
3
|
from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER,
|
|
4
4
|
OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
|
|
@@ -28,32 +28,32 @@ class grype(VulnCode):
|
|
|
28
28
|
}
|
|
29
29
|
output_types = [Vulnerability]
|
|
30
30
|
install_cmd = (
|
|
31
|
-
'curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh |
|
|
31
|
+
f'curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b {CONFIG.dirs.bin}'
|
|
32
32
|
)
|
|
33
33
|
install_github_handle = 'anchore/grype'
|
|
34
34
|
|
|
35
35
|
@staticmethod
|
|
36
36
|
def item_loader(self, line):
|
|
37
37
|
"""Load vulnerabilty dicts from grype line output."""
|
|
38
|
-
split = [i for i in line.split('
|
|
39
|
-
if
|
|
40
|
-
return
|
|
41
|
-
|
|
38
|
+
split = [i for i in line.split(' ') if i]
|
|
39
|
+
if len(split) not in [5, 6] or split[0] == 'NAME':
|
|
40
|
+
return
|
|
41
|
+
versions_fixed = None
|
|
42
42
|
if len(split) == 5: # no version fixed
|
|
43
43
|
product, version, product_type, vuln_id, severity = tuple(split)
|
|
44
44
|
elif len(split) == 6:
|
|
45
|
-
product, version,
|
|
45
|
+
product, version, versions_fixed, product_type, vuln_id, severity = tuple(split)
|
|
46
46
|
extra_data = {
|
|
47
47
|
'lang': product_type,
|
|
48
48
|
'product': product,
|
|
49
49
|
'version': version,
|
|
50
50
|
}
|
|
51
|
-
if
|
|
52
|
-
extra_data['
|
|
51
|
+
if versions_fixed:
|
|
52
|
+
extra_data['versions_fixed'] = [c.strip() for c in versions_fixed.split(', ')]
|
|
53
53
|
data = {
|
|
54
54
|
'id': vuln_id,
|
|
55
55
|
'name': vuln_id,
|
|
56
|
-
'matched_at': self.
|
|
56
|
+
'matched_at': self.inputs[0],
|
|
57
57
|
'confidence': 'medium',
|
|
58
58
|
'severity': severity.lower(),
|
|
59
59
|
'provider': 'grype',
|
|
@@ -63,7 +63,6 @@ class grype(VulnCode):
|
|
|
63
63
|
if vuln_id.startswith('GHSA'):
|
|
64
64
|
data['provider'] = 'github.com'
|
|
65
65
|
data['references'] = [f'https://github.com/advisories/{vuln_id}']
|
|
66
|
-
data['tags'].extend(['cve', 'ghsa'])
|
|
67
66
|
vuln = VulnCode.lookup_ghsa(vuln_id)
|
|
68
67
|
if vuln:
|
|
69
68
|
data.update(vuln)
|
|
@@ -72,8 +71,7 @@ class grype(VulnCode):
|
|
|
72
71
|
elif vuln_id.startswith('CVE'):
|
|
73
72
|
vuln = VulnCode.lookup_cve(vuln_id)
|
|
74
73
|
if vuln:
|
|
75
|
-
vuln['tags'].append('cve')
|
|
76
74
|
data.update(vuln)
|
|
77
75
|
data['severity'] = data['severity'] or severity.lower()
|
|
78
76
|
data['extra_data'] = extra_data
|
|
79
|
-
|
|
77
|
+
yield data
|
secator/tasks/h8mail.py
CHANGED
|
@@ -4,7 +4,7 @@ import json
|
|
|
4
4
|
from secator.decorators import task
|
|
5
5
|
from secator.definitions import EMAIL, OUTPUT_PATH
|
|
6
6
|
from secator.tasks._categories import OSInt
|
|
7
|
-
from secator.output_types import UserAccount
|
|
7
|
+
from secator.output_types import UserAccount, Info, Error
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
@task()
|
|
@@ -17,17 +17,11 @@ class h8mail(OSInt):
|
|
|
17
17
|
file_flag = '-domain'
|
|
18
18
|
version_flag = '--help'
|
|
19
19
|
opt_prefix = '--'
|
|
20
|
-
opt_key_map = {
|
|
21
|
-
|
|
22
|
-
}
|
|
23
20
|
opts = {
|
|
24
21
|
'config': {'type': str, 'help': 'Configuration file for API keys'},
|
|
25
22
|
'local_breach': {'type': str, 'short': 'lb', 'help': 'Local breach file'}
|
|
26
23
|
}
|
|
27
|
-
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
install_cmd = 'pipx install h8mail'
|
|
24
|
+
install_cmd = 'pipx install h8mail && pipx upgrade h8mail'
|
|
31
25
|
|
|
32
26
|
@staticmethod
|
|
33
27
|
def on_start(self):
|
|
@@ -37,44 +31,40 @@ class h8mail(OSInt):
|
|
|
37
31
|
self.output_path = output_path
|
|
38
32
|
self.cmd = self.cmd.replace('--json', f'--json {self.output_path}')
|
|
39
33
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
self.
|
|
43
|
-
|
|
44
|
-
if self.return_code != 0:
|
|
34
|
+
@staticmethod
|
|
35
|
+
def on_cmd_done(self):
|
|
36
|
+
if not os.path.exists(self.output_path):
|
|
37
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
45
38
|
return
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
for entry in entries:
|
|
63
|
-
source, site_name = tuple(entry.split(':'))
|
|
64
|
-
yield UserAccount(**{
|
|
65
|
-
"site_name": site_name,
|
|
66
|
-
"username": email.split('@')[0],
|
|
67
|
-
"email": email,
|
|
68
|
-
"extra_data": {
|
|
69
|
-
'source': source
|
|
70
|
-
},
|
|
71
|
-
})
|
|
72
|
-
else:
|
|
39
|
+
|
|
40
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
41
|
+
with open(self.output_path, 'r') as f:
|
|
42
|
+
data = json.load(f)
|
|
43
|
+
|
|
44
|
+
targets = data['targets']
|
|
45
|
+
for target in targets:
|
|
46
|
+
email = target['target']
|
|
47
|
+
target_data = target.get('data', [])
|
|
48
|
+
pwn_num = target['pwn_num']
|
|
49
|
+
if not pwn_num > 0:
|
|
50
|
+
continue
|
|
51
|
+
if len(target_data) > 0:
|
|
52
|
+
entries = target_data[0]
|
|
53
|
+
for entry in entries:
|
|
54
|
+
source, site_name = tuple(entry.split(':'))
|
|
73
55
|
yield UserAccount(**{
|
|
56
|
+
"site_name": site_name,
|
|
74
57
|
"username": email.split('@')[0],
|
|
75
58
|
"email": email,
|
|
76
59
|
"extra_data": {
|
|
77
|
-
'source':
|
|
60
|
+
'source': source
|
|
78
61
|
},
|
|
79
62
|
})
|
|
80
|
-
|
|
63
|
+
else:
|
|
64
|
+
yield UserAccount(**{
|
|
65
|
+
"username": email.split('@')[0],
|
|
66
|
+
"email": email,
|
|
67
|
+
"extra_data": {
|
|
68
|
+
'source': self.get_opt_value('local_breach')
|
|
69
|
+
},
|
|
70
|
+
})
|