secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import validators
|
|
2
|
+
from collections import defaultdict
|
|
3
|
+
from urllib.parse import urlparse, urlunparse, parse_qs
|
|
4
|
+
|
|
5
|
+
from secator.definitions import HOST, URL, DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, FOLLOW_REDIRECT, METHOD, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, USER_AGENT, HEADER, OPT_NOT_SUPPORTED # noqa: E501
|
|
6
|
+
from secator.output_types import Url
|
|
7
|
+
from secator.decorators import task
|
|
8
|
+
from secator.serializers import JSONSerializer
|
|
9
|
+
from secator.tasks._categories import HttpCrawler
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
URLFINDER_SOURCES = [
|
|
13
|
+
'alienvault',
|
|
14
|
+
'commoncrawl',
|
|
15
|
+
'urlscan',
|
|
16
|
+
'waybackarchive',
|
|
17
|
+
'virustotal'
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@task()
|
|
22
|
+
class urlfinder(HttpCrawler):
|
|
23
|
+
"""Find URLs in text."""
|
|
24
|
+
cmd = 'urlfinder'
|
|
25
|
+
input_types = [HOST, URL]
|
|
26
|
+
output_types = [Url]
|
|
27
|
+
item_loaders = [JSONSerializer()]
|
|
28
|
+
json_flag = '-j'
|
|
29
|
+
tags = ['pattern', 'scan']
|
|
30
|
+
file_flag = '-list'
|
|
31
|
+
input_flag = '-d'
|
|
32
|
+
version_flag = '-version'
|
|
33
|
+
opts = {
|
|
34
|
+
'sources': {'type': str, 'help': f'Sources to use (comma-delimited) ({", ".join(URLFINDER_SOURCES)})', 'required': False}, # noqa: E501
|
|
35
|
+
'exclude_sources': {'type': str, 'help': 'Sources to exclude (comma-delimited)', 'required': False},
|
|
36
|
+
'version': {'type': bool, 'help': 'Version', 'required': False},
|
|
37
|
+
'stats': {'type': bool, 'help': 'Display source statistics', 'required': False},
|
|
38
|
+
'max_param_occurrences': {'type': int, 'help': 'Max occurrences for the same parameter in the same URL before discarding next results', 'required': False, 'default': 10, 'internal': True}, # noqa: E501
|
|
39
|
+
}
|
|
40
|
+
opt_key_map = {
|
|
41
|
+
'sources': 's',
|
|
42
|
+
'exclude_sources': 'es',
|
|
43
|
+
'url_scope': 'us',
|
|
44
|
+
'url_out_scope': 'uos',
|
|
45
|
+
'field_scope': 'fs',
|
|
46
|
+
'no_scope': 'ns',
|
|
47
|
+
'display_out_scope': 'do',
|
|
48
|
+
'match': 'm',
|
|
49
|
+
'filter': 'f',
|
|
50
|
+
HEADER: OPT_NOT_SUPPORTED,
|
|
51
|
+
DELAY: OPT_NOT_SUPPORTED,
|
|
52
|
+
DEPTH: OPT_NOT_SUPPORTED,
|
|
53
|
+
FILTER_CODES: OPT_NOT_SUPPORTED,
|
|
54
|
+
FILTER_REGEX: OPT_NOT_SUPPORTED,
|
|
55
|
+
FILTER_SIZE: OPT_NOT_SUPPORTED,
|
|
56
|
+
FILTER_WORDS: OPT_NOT_SUPPORTED,
|
|
57
|
+
MATCH_CODES: OPT_NOT_SUPPORTED,
|
|
58
|
+
MATCH_REGEX: OPT_NOT_SUPPORTED,
|
|
59
|
+
MATCH_SIZE: OPT_NOT_SUPPORTED,
|
|
60
|
+
MATCH_WORDS: OPT_NOT_SUPPORTED,
|
|
61
|
+
FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
|
|
62
|
+
METHOD: OPT_NOT_SUPPORTED,
|
|
63
|
+
PROXY: 'proxy',
|
|
64
|
+
RATE_LIMIT: 'rl',
|
|
65
|
+
RETRIES: OPT_NOT_SUPPORTED,
|
|
66
|
+
THREADS: OPT_NOT_SUPPORTED,
|
|
67
|
+
TIMEOUT: 'timeout',
|
|
68
|
+
USER_AGENT: OPT_NOT_SUPPORTED,
|
|
69
|
+
}
|
|
70
|
+
encoding = 'ansi'
|
|
71
|
+
install_version = 'v0.0.3'
|
|
72
|
+
install_cmd = 'go install -v github.com/projectdiscovery/urlfinder/cmd/urlfinder@[install_version]'
|
|
73
|
+
github_handle = 'projectdiscovery/urlfinder'
|
|
74
|
+
proxychains = False
|
|
75
|
+
proxy_socks5 = True
|
|
76
|
+
proxy_http = True
|
|
77
|
+
profile = 'io'
|
|
78
|
+
|
|
79
|
+
@staticmethod
|
|
80
|
+
def before_init(self):
|
|
81
|
+
for idx, input in enumerate(self.inputs):
|
|
82
|
+
if validators.url(input):
|
|
83
|
+
self.inputs[idx] = urlparse(input).netloc
|
|
84
|
+
|
|
85
|
+
@staticmethod
|
|
86
|
+
def on_init(self):
|
|
87
|
+
self.max_param_occurrences = self.get_opt_value('max_param_occurrences')
|
|
88
|
+
self.seen_params = defaultdict(lambda: defaultdict(int))
|
|
89
|
+
|
|
90
|
+
@staticmethod
|
|
91
|
+
def on_json_loaded(self, item):
|
|
92
|
+
parsed_url = urlparse(item['url'])
|
|
93
|
+
base_url = urlunparse(parsed_url._replace(query="", fragment="")) # Remove query & fragment
|
|
94
|
+
query_params = parse_qs(parsed_url.query)
|
|
95
|
+
current_params = set(query_params.keys())
|
|
96
|
+
for param in current_params:
|
|
97
|
+
self.seen_params[base_url][param] += 1
|
|
98
|
+
if self.seen_params[base_url][param] > int(self.max_param_occurrences):
|
|
99
|
+
return
|
|
100
|
+
yield Url(url=item['url'], host=parsed_url.hostname, extra_data={'source': item['source']})
|
secator/tasks/wafw00f.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import tempfile
|
|
3
|
+
import shlex
|
|
4
|
+
import yaml
|
|
5
|
+
|
|
6
|
+
from secator.decorators import task
|
|
7
|
+
from secator.runners import Command
|
|
8
|
+
from secator.definitions import (OUTPUT_PATH, HEADER, PROXY, URL, TIMEOUT)
|
|
9
|
+
from secator.output_types import Tag, Info, Error
|
|
10
|
+
from secator.tasks._categories import OPTS
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@task()
|
|
14
|
+
class wafw00f(Command):
|
|
15
|
+
"""Web Application Firewall Fingerprinting tool."""
|
|
16
|
+
cmd = 'wafw00f'
|
|
17
|
+
input_types = [URL]
|
|
18
|
+
output_types = [Tag]
|
|
19
|
+
tags = ['waf', 'scan']
|
|
20
|
+
input_flag = None
|
|
21
|
+
file_flag = '-i'
|
|
22
|
+
json_flag = '-f json'
|
|
23
|
+
encoding = 'ansi'
|
|
24
|
+
opt_prefix = '--'
|
|
25
|
+
meta_opts = {
|
|
26
|
+
PROXY: OPTS[PROXY],
|
|
27
|
+
HEADER: OPTS[HEADER],
|
|
28
|
+
TIMEOUT: OPTS[TIMEOUT]
|
|
29
|
+
}
|
|
30
|
+
opts = {
|
|
31
|
+
'list': {'is_flag': True, 'default': False, 'help': 'List all WAFs that WAFW00F is able to detect'},
|
|
32
|
+
'waf_type': {'type': str, 'short': 'wt', 'help': 'Test for one specific WAF'},
|
|
33
|
+
'find_all': {'is_flag': True, 'short': 'ta', 'default': False, 'help': 'Find all WAFs which match the signatures, do not stop testing on the first one'}, # noqa: E501
|
|
34
|
+
'no_follow_redirects': {'is_flag': True, 'short': 'nfr', 'default': False, 'help': 'Do not follow redirections given by 3xx responses'}, # noqa: E501
|
|
35
|
+
}
|
|
36
|
+
opt_value_map = {
|
|
37
|
+
HEADER: lambda x: wafw00f.headers_to_file(x)
|
|
38
|
+
}
|
|
39
|
+
opt_key_map = {
|
|
40
|
+
HEADER: 'headers',
|
|
41
|
+
PROXY: 'proxy',
|
|
42
|
+
'waf_type': 'test',
|
|
43
|
+
'find_all': 'findall',
|
|
44
|
+
'no_follow_redirects': 'noredirect',
|
|
45
|
+
}
|
|
46
|
+
install_version = 'v2.3.1'
|
|
47
|
+
install_cmd = 'pipx install git+https://github.com/EnableSecurity/wafw00f.git@[install_version] --force'
|
|
48
|
+
install_github_bin = False
|
|
49
|
+
github_handle = 'EnableSecurity/wafw00f'
|
|
50
|
+
proxy_http = True
|
|
51
|
+
|
|
52
|
+
@staticmethod
|
|
53
|
+
def on_cmd(self):
|
|
54
|
+
self.output_path = self.get_opt_value(OUTPUT_PATH)
|
|
55
|
+
if not self.output_path:
|
|
56
|
+
self.output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
57
|
+
self.cmd += f' -o {shlex.quote(self.output_path)}'
|
|
58
|
+
|
|
59
|
+
self.headers = self.get_opt_value(HEADER)
|
|
60
|
+
if self.headers:
|
|
61
|
+
header_file = f'{self.reports_folder}/.inputs/headers.txt'
|
|
62
|
+
with open(header_file, 'w') as f:
|
|
63
|
+
for header in self.headers.split(';;'):
|
|
64
|
+
f.write(f'{header}\n')
|
|
65
|
+
self.cmd = self.cmd.replace(self.headers, header_file)
|
|
66
|
+
|
|
67
|
+
@staticmethod
|
|
68
|
+
def on_cmd_done(self):
|
|
69
|
+
# Skip parsing if list mode
|
|
70
|
+
list_mode = self.get_opt_value('list')
|
|
71
|
+
if list_mode:
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
if not os.path.exists(self.output_path):
|
|
75
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
76
|
+
return
|
|
77
|
+
|
|
78
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
79
|
+
with open(self.output_path, 'r') as f:
|
|
80
|
+
results = yaml.safe_load(f.read())
|
|
81
|
+
|
|
82
|
+
if len(results) > 0 and results[0]['detected']:
|
|
83
|
+
waf_name = results[0]['firewall']
|
|
84
|
+
url = results[0]['url']
|
|
85
|
+
match = results[0]['trigger_url']
|
|
86
|
+
manufacter = results[0]['manufacturer']
|
|
87
|
+
yield Tag(
|
|
88
|
+
category='info',
|
|
89
|
+
name='waf',
|
|
90
|
+
match=url,
|
|
91
|
+
value=waf_name,
|
|
92
|
+
extra_data={
|
|
93
|
+
'manufacter': manufacter,
|
|
94
|
+
'trigger_url': match,
|
|
95
|
+
'headers': self.get_opt_value('header', preprocess=True)
|
|
96
|
+
}
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
@staticmethod
|
|
100
|
+
def headers_to_file(headers):
|
|
101
|
+
temp_dir = tempfile.gettempdir()
|
|
102
|
+
header_file = f'{temp_dir}/headers.txt'
|
|
103
|
+
with open(header_file, 'w') as f:
|
|
104
|
+
for header in headers.split(';;'):
|
|
105
|
+
f.write(f'{header}\n')
|
|
106
|
+
return header_file
|
secator/tasks/whois.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from secator.decorators import task
|
|
2
|
+
from secator.definitions import HOST
|
|
3
|
+
from secator.output_types import Domain
|
|
4
|
+
from secator.runners import Command
|
|
5
|
+
from secator.serializers import JSONSerializer
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@task()
|
|
9
|
+
class whois(Command):
|
|
10
|
+
"""The whois tool retrieves registration information about domain names and IP addresses."""
|
|
11
|
+
cmd = 'whoisdomain'
|
|
12
|
+
input_flag = '-d'
|
|
13
|
+
json_flag = '--json'
|
|
14
|
+
input_chunk_size = 1
|
|
15
|
+
input_types = [HOST]
|
|
16
|
+
output_types = [Domain]
|
|
17
|
+
item_loaders = [JSONSerializer()]
|
|
18
|
+
version_flag = '-V'
|
|
19
|
+
install_version = '1.20230906.1'
|
|
20
|
+
install_cmd_pre = {'*': ['whois']}
|
|
21
|
+
install_cmd = 'pipx install whoisdomain==[install_version] --force'
|
|
22
|
+
install_github_bin = False
|
|
23
|
+
github_handle = 'mboot-github/WhoisDomain'
|
|
24
|
+
|
|
25
|
+
@staticmethod
|
|
26
|
+
def on_json_loaded(self, item):
|
|
27
|
+
yield Domain(
|
|
28
|
+
domain=item['name'],
|
|
29
|
+
registrar=item['registrar'],
|
|
30
|
+
creation_date=item['creation_date'],
|
|
31
|
+
expiration_date=item['expiration_date'],
|
|
32
|
+
registrant=item['registrant'],
|
|
33
|
+
extra_data={'emails': item['emails']}
|
|
34
|
+
)
|
secator/tasks/wpprobe.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import re
|
|
3
|
+
import click
|
|
4
|
+
import yaml
|
|
5
|
+
import shlex
|
|
6
|
+
|
|
7
|
+
from secator.decorators import task
|
|
8
|
+
from secator.runners import Command
|
|
9
|
+
from secator.definitions import OUTPUT_PATH, THREADS, URL
|
|
10
|
+
from secator.output_types import Vulnerability, Tag, Info, Warning, Error
|
|
11
|
+
from secator.tasks._categories import OPTS
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@task()
|
|
15
|
+
class wpprobe(Command):
|
|
16
|
+
"""Fast wordpress plugin enumeration tool."""
|
|
17
|
+
cmd = 'wpprobe'
|
|
18
|
+
input_types = [URL]
|
|
19
|
+
output_types = [Vulnerability, Tag]
|
|
20
|
+
tags = ['vuln', 'scan', 'wordpress']
|
|
21
|
+
file_flag = '-f'
|
|
22
|
+
input_flag = '-u'
|
|
23
|
+
opt_prefix = '-'
|
|
24
|
+
opts = {
|
|
25
|
+
'mode': {'type': click.Choice(['scan', 'update', 'update-db']), 'default': 'scan', 'help': 'WPProbe mode', 'required': True, 'internal': True}, # noqa: E501
|
|
26
|
+
'output_path': {'type': str, 'default': None, 'help': 'Output JSON file path', 'internal': True, 'display': False}, # noqa: E501
|
|
27
|
+
}
|
|
28
|
+
meta_opts = {
|
|
29
|
+
THREADS: OPTS[THREADS]
|
|
30
|
+
}
|
|
31
|
+
opt_key_map = {
|
|
32
|
+
THREADS: 't'
|
|
33
|
+
}
|
|
34
|
+
install_version = 'v0.5.6'
|
|
35
|
+
install_cmd = 'go install github.com/Chocapikk/wpprobe@[install_version]'
|
|
36
|
+
github_handle = 'Chocapikk/wpprobe'
|
|
37
|
+
install_post = {
|
|
38
|
+
'*': 'wpprobe update-db'
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
@staticmethod
|
|
42
|
+
def on_cmd(self):
|
|
43
|
+
mode = self.get_opt_value('mode')
|
|
44
|
+
if mode == 'update' or mode == 'update-db':
|
|
45
|
+
self.cmd = f'{wpprobe.cmd} {mode}'
|
|
46
|
+
return
|
|
47
|
+
self.cmd = re.sub(wpprobe.cmd, f'{wpprobe.cmd} {mode}', self.cmd, 1)
|
|
48
|
+
output_path = self.get_opt_value(OUTPUT_PATH)
|
|
49
|
+
if not output_path:
|
|
50
|
+
output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
51
|
+
self.output_path = output_path
|
|
52
|
+
self.cmd += f' -o {shlex.quote(self.output_path)}'
|
|
53
|
+
|
|
54
|
+
@staticmethod
|
|
55
|
+
def on_cmd_done(self):
|
|
56
|
+
if not self.get_opt_value('mode') == 'scan':
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
if not os.path.exists(self.output_path):
|
|
60
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
64
|
+
with open(self.output_path, 'r') as f:
|
|
65
|
+
results = yaml.safe_load(f.read())
|
|
66
|
+
if not results or 'url' not in results:
|
|
67
|
+
yield Warning(message='No results found !')
|
|
68
|
+
return
|
|
69
|
+
url = results['url']
|
|
70
|
+
for plugin_name, plugin_data in results['plugins'].items():
|
|
71
|
+
for plugin_data_version in plugin_data:
|
|
72
|
+
plugin_version = plugin_data_version['version']
|
|
73
|
+
yield Tag(
|
|
74
|
+
category='info',
|
|
75
|
+
name='wordpress_plugin',
|
|
76
|
+
match=url,
|
|
77
|
+
value=plugin_name + ':' + plugin_version,
|
|
78
|
+
extra_data={
|
|
79
|
+
'name': plugin_name,
|
|
80
|
+
'version': plugin_version
|
|
81
|
+
}
|
|
82
|
+
)
|
|
83
|
+
severities = plugin_data_version.get('severities', {})
|
|
84
|
+
|
|
85
|
+
# Fix for https://github.com/Chocapikk/wpprobe/issues/17
|
|
86
|
+
if isinstance(severities, list):
|
|
87
|
+
tmp_severities = {}
|
|
88
|
+
for severity in severities:
|
|
89
|
+
for k, v in severity.items():
|
|
90
|
+
if k != 'n/a':
|
|
91
|
+
tmp_severities[k] = v
|
|
92
|
+
severities = tmp_severities
|
|
93
|
+
|
|
94
|
+
for severity, severity_data in severities.items():
|
|
95
|
+
if severity == 'None':
|
|
96
|
+
severity = 'unknown'
|
|
97
|
+
for item in severity_data:
|
|
98
|
+
for vuln in item['vulnerabilities']:
|
|
99
|
+
auth_type = item.get('auth_type')
|
|
100
|
+
extra_data = {
|
|
101
|
+
'plugin_name': plugin_name,
|
|
102
|
+
'plugin_version': plugin_version,
|
|
103
|
+
}
|
|
104
|
+
if auth_type:
|
|
105
|
+
extra_data['auth_type'] = auth_type
|
|
106
|
+
yield Vulnerability(
|
|
107
|
+
name=vuln['title'],
|
|
108
|
+
id=vuln['cve'],
|
|
109
|
+
severity=severity,
|
|
110
|
+
cvss_score=vuln['cvss_score'],
|
|
111
|
+
tags=['wordpress', 'wordpress_plugin', plugin_name],
|
|
112
|
+
reference=vuln['cve_link'],
|
|
113
|
+
extra_data=extra_data,
|
|
114
|
+
matched_at=url,
|
|
115
|
+
confidence='high'
|
|
116
|
+
)
|
secator/tasks/wpscan.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import shlex
|
|
4
|
+
|
|
5
|
+
from secator.config import CONFIG
|
|
6
|
+
from secator.decorators import task
|
|
7
|
+
from secator.definitions import (CONFIDENCE, CVSS_SCORE, DELAY, DESCRIPTION,
|
|
8
|
+
EXTRA_DATA, FOLLOW_REDIRECT, HEADER, ID,
|
|
9
|
+
MATCHED_AT, NAME, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROVIDER,
|
|
10
|
+
PROXY, RATE_LIMIT, REFERENCES, RETRIES,
|
|
11
|
+
SEVERITY, TAGS, THREADS, TIMEOUT,
|
|
12
|
+
URL, USER_AGENT)
|
|
13
|
+
from secator.output_types import Tag, Vulnerability, Info, Error
|
|
14
|
+
from secator.tasks._categories import VulnHttp
|
|
15
|
+
from secator.installer import parse_version
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@task()
|
|
19
|
+
class wpscan(VulnHttp):
|
|
20
|
+
"""Wordpress security scanner."""
|
|
21
|
+
cmd = 'wpscan --force --verbose'
|
|
22
|
+
input_types = [URL]
|
|
23
|
+
output_types = [Vulnerability, Tag]
|
|
24
|
+
tags = ['vuln', 'scan', 'wordpress']
|
|
25
|
+
input_flag = '--url'
|
|
26
|
+
input_chunk_size = 1
|
|
27
|
+
json_flag = '-f json'
|
|
28
|
+
opt_prefix = '--'
|
|
29
|
+
opts = {
|
|
30
|
+
'cookie_string': {'type': str, 'short': 'cookie', 'help': 'Cookie string, format: cookie1=value1;...'},
|
|
31
|
+
'api_token': {'type': str, 'short': 'token', 'help': 'WPScan API Token to display vulnerability data'},
|
|
32
|
+
'wp_content_dir': {'type': str, 'short': 'wcd', 'help': 'wp-content directory if custom or not detected'},
|
|
33
|
+
'wp_plugins_dir': {'type': str, 'short': 'wpd', 'help': 'wp-plugins directory if custom or not detected'},
|
|
34
|
+
'passwords': {'type': str, 'help': 'List of passwords to use during the password attack.'},
|
|
35
|
+
'usernames': {'type': str, 'help': 'List of usernames to use during the password attack.'},
|
|
36
|
+
'login_uri': {'type': str, 'short': 'lu', 'help': 'URI of the login page if different from /wp-login.php'},
|
|
37
|
+
'detection_mode': {'type': str, 'short': 'dm', 'help': 'Detection mode between mixed, passive, and aggressive'},
|
|
38
|
+
'random_user_agent': {'is_flag': True, 'short': 'rua', 'help': 'Random user agent'},
|
|
39
|
+
'disable_tls_checks': {'is_flag': True, 'short': 'dtc', 'help': 'Disable TLS checks'}
|
|
40
|
+
}
|
|
41
|
+
opt_key_map = {
|
|
42
|
+
HEADER: OPT_NOT_SUPPORTED,
|
|
43
|
+
DELAY: 'throttle',
|
|
44
|
+
FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
|
|
45
|
+
PROXY: 'proxy',
|
|
46
|
+
RATE_LIMIT: OPT_NOT_SUPPORTED,
|
|
47
|
+
RETRIES: OPT_NOT_SUPPORTED,
|
|
48
|
+
TIMEOUT: 'request-timeout',
|
|
49
|
+
THREADS: 'max-threads',
|
|
50
|
+
USER_AGENT: 'user-agent',
|
|
51
|
+
}
|
|
52
|
+
opt_value_map = {
|
|
53
|
+
DELAY: lambda x: x * 1000
|
|
54
|
+
}
|
|
55
|
+
output_map = {
|
|
56
|
+
Vulnerability: {
|
|
57
|
+
ID: lambda x: '',
|
|
58
|
+
NAME: lambda x: x['to_s'].split(':')[0],
|
|
59
|
+
DESCRIPTION: lambda x: '',
|
|
60
|
+
SEVERITY: lambda x: 'info',
|
|
61
|
+
CONFIDENCE: lambda x: 'high' if x.get('confidence', 0) == 100 else 'low',
|
|
62
|
+
CVSS_SCORE: lambda x: 0,
|
|
63
|
+
MATCHED_AT: lambda x: x['url'],
|
|
64
|
+
TAGS: lambda x: [x['type']],
|
|
65
|
+
REFERENCES: lambda x: x.get('references', {}).get('url', []),
|
|
66
|
+
EXTRA_DATA: lambda x: {
|
|
67
|
+
'data': x.get('interesting_entries', []),
|
|
68
|
+
'found_by': x.get('found_by', ''),
|
|
69
|
+
'confirmed_by': x.get('confirmed_by', {}),
|
|
70
|
+
'metasploit': x.get('references', {}).get('metasploit', [])
|
|
71
|
+
},
|
|
72
|
+
PROVIDER: 'wpscan',
|
|
73
|
+
},
|
|
74
|
+
}
|
|
75
|
+
install_version = 'v3.8.28'
|
|
76
|
+
install_pre_cmd = {
|
|
77
|
+
'apt': ['make', 'kali:libcurl4t64', 'libffi-dev'],
|
|
78
|
+
'pacman': ['make', 'ruby-erb'],
|
|
79
|
+
'*': ['make']
|
|
80
|
+
}
|
|
81
|
+
install_cmd = f'gem install wpscan -v [install_version_strip] --user-install -n {CONFIG.dirs.bin}'
|
|
82
|
+
install_post = {
|
|
83
|
+
'kali': (
|
|
84
|
+
f'gem uninstall nokogiri --user-install -n {CONFIG.dirs.bin} --force --executables && '
|
|
85
|
+
f'gem install nokogiri --user-install -n {CONFIG.dirs.bin} --platform=ruby'
|
|
86
|
+
)
|
|
87
|
+
}
|
|
88
|
+
install_github_bin = False
|
|
89
|
+
github_handle = 'wpscanteam/wpscan'
|
|
90
|
+
proxychains = False
|
|
91
|
+
proxy_http = True
|
|
92
|
+
proxy_socks5 = False
|
|
93
|
+
profile = 'io'
|
|
94
|
+
|
|
95
|
+
@staticmethod
|
|
96
|
+
def on_init(self):
|
|
97
|
+
output_path = self.get_opt_value(OUTPUT_PATH)
|
|
98
|
+
if not output_path:
|
|
99
|
+
output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
100
|
+
self.output_path = output_path
|
|
101
|
+
self.cmd += f' -o {shlex.quote(self.output_path)}'
|
|
102
|
+
|
|
103
|
+
@staticmethod
|
|
104
|
+
def on_cmd_done(self):
|
|
105
|
+
if not os.path.exists(self.output_path):
|
|
106
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
110
|
+
with open(self.output_path, 'r') as f:
|
|
111
|
+
data = json.load(f)
|
|
112
|
+
|
|
113
|
+
# Get URL
|
|
114
|
+
target = data.get('target_url', self.inputs[0])
|
|
115
|
+
|
|
116
|
+
# Get errors
|
|
117
|
+
scan_aborted = data.get('scan_aborted', False)
|
|
118
|
+
if scan_aborted:
|
|
119
|
+
yield Error(message=scan_aborted, traceback='\n'.join(data.get('trace', [])))
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
# Wordpress version
|
|
123
|
+
version = data.get('version', {})
|
|
124
|
+
if version:
|
|
125
|
+
wp_version = version['number']
|
|
126
|
+
wp_version_status = version['status']
|
|
127
|
+
if wp_version_status == 'outdated':
|
|
128
|
+
vuln = version
|
|
129
|
+
vuln.update({
|
|
130
|
+
'url': target,
|
|
131
|
+
'to_s': 'Wordpress outdated version',
|
|
132
|
+
'type': wp_version,
|
|
133
|
+
'references': {},
|
|
134
|
+
})
|
|
135
|
+
yield vuln
|
|
136
|
+
|
|
137
|
+
# Main theme
|
|
138
|
+
main_theme = data.get('main_theme', {})
|
|
139
|
+
if main_theme:
|
|
140
|
+
version = main_theme.get('version', {})
|
|
141
|
+
slug = main_theme['slug']
|
|
142
|
+
location = main_theme['location']
|
|
143
|
+
if version:
|
|
144
|
+
number = version['number']
|
|
145
|
+
latest_version = main_theme.get('latest_version') or 'unknown'
|
|
146
|
+
yield Tag(
|
|
147
|
+
category='info',
|
|
148
|
+
name='wordpress_theme',
|
|
149
|
+
match=target,
|
|
150
|
+
value=slug + ':' + number,
|
|
151
|
+
extra_data={
|
|
152
|
+
'url': location,
|
|
153
|
+
'latest_version': latest_version
|
|
154
|
+
}
|
|
155
|
+
)
|
|
156
|
+
outdated = parse_version(number) < parse_version(latest_version) if latest_version != 'unknown' and number else False # noqa: E501
|
|
157
|
+
if outdated:
|
|
158
|
+
yield Vulnerability(
|
|
159
|
+
matched_at=target,
|
|
160
|
+
name=f'Wordpress theme - {slug} {number} outdated',
|
|
161
|
+
description=f'The wordpress theme {slug} is outdated, consider updating to the latest version {latest_version}',
|
|
162
|
+
confidence='high',
|
|
163
|
+
severity='info',
|
|
164
|
+
tags=['wordpress', 'wordpress_theme']
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
# Interesting findings
|
|
168
|
+
interesting_findings = data.get('interesting_findings', [])
|
|
169
|
+
for item in interesting_findings:
|
|
170
|
+
yield item
|
|
171
|
+
|
|
172
|
+
# Plugins
|
|
173
|
+
plugins = data.get('plugins', {})
|
|
174
|
+
for _, data in plugins.items():
|
|
175
|
+
version = data.get('version', {})
|
|
176
|
+
slug = data['slug']
|
|
177
|
+
location = data['location']
|
|
178
|
+
if version:
|
|
179
|
+
number = version['number']
|
|
180
|
+
latest_version = data.get('latest_version') or 'unknown'
|
|
181
|
+
yield Tag(
|
|
182
|
+
category='info',
|
|
183
|
+
name='wordpress_plugin',
|
|
184
|
+
match=target,
|
|
185
|
+
value=slug + ':' + number,
|
|
186
|
+
extra_data={
|
|
187
|
+
'url': location,
|
|
188
|
+
'name': slug,
|
|
189
|
+
'version': number,
|
|
190
|
+
'latest_version': latest_version
|
|
191
|
+
}
|
|
192
|
+
)
|
|
193
|
+
outdated = parse_version(number) < parse_version(latest_version) if latest_version != 'unknown' and number else False # noqa: E501
|
|
194
|
+
if outdated:
|
|
195
|
+
yield Vulnerability(
|
|
196
|
+
matched_at=target,
|
|
197
|
+
name=f'Wordpress plugin - {slug} {number} outdated',
|
|
198
|
+
description=f'The wordpress plugin {slug} is outdated, consider updating to the latest version {latest_version}.',
|
|
199
|
+
confidence='high',
|
|
200
|
+
severity='info',
|
|
201
|
+
tags=['wordpress', 'wordpress_plugin']
|
|
202
|
+
)
|
secator/tasks/x8.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
from secator.decorators import task
|
|
2
|
+
from secator.definitions import (URL, WORDLIST, RETRIES, OPT_NOT_SUPPORTED, USER_AGENT, THREADS, DELAY, TIMEOUT, RATE_LIMIT, METHOD, HEADER, FOLLOW_REDIRECT, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, DEPTH) # noqa: E501
|
|
3
|
+
from secator.output_types import Url, Tag
|
|
4
|
+
from secator.serializers import JSONSerializer
|
|
5
|
+
from secator.tasks._categories import HttpFuzzer
|
|
6
|
+
from secator.utils import process_wordlist
|
|
7
|
+
from urllib.parse import urlparse, urlunparse
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@task()
|
|
11
|
+
class x8(HttpFuzzer):
|
|
12
|
+
"""Hidden parameters discovery suite written in Rust."""
|
|
13
|
+
cmd = 'x8'
|
|
14
|
+
input_types = [URL]
|
|
15
|
+
output_types = [Url, Tag]
|
|
16
|
+
tags = ['url', 'fuzz', 'params']
|
|
17
|
+
file_flag = '-u'
|
|
18
|
+
input_flag = '-u'
|
|
19
|
+
json_flag = '-O json'
|
|
20
|
+
opt_prefix = '-'
|
|
21
|
+
version_flag = '-V'
|
|
22
|
+
opts = {
|
|
23
|
+
WORDLIST: {'type': str, 'short': 'w', 'default': None, 'process': process_wordlist, 'help': 'Wordlist to use'},
|
|
24
|
+
}
|
|
25
|
+
opt_key_map = {
|
|
26
|
+
USER_AGENT: OPT_NOT_SUPPORTED,
|
|
27
|
+
THREADS: 'c',
|
|
28
|
+
DEPTH: OPT_NOT_SUPPORTED,
|
|
29
|
+
DELAY: '--delay',
|
|
30
|
+
TIMEOUT: '--timeout',
|
|
31
|
+
METHOD: '--method',
|
|
32
|
+
WORDLIST: 'w',
|
|
33
|
+
FILTER_CODES: OPT_NOT_SUPPORTED,
|
|
34
|
+
FILTER_REGEX: OPT_NOT_SUPPORTED,
|
|
35
|
+
FILTER_SIZE: OPT_NOT_SUPPORTED,
|
|
36
|
+
FILTER_WORDS: OPT_NOT_SUPPORTED,
|
|
37
|
+
MATCH_CODES: OPT_NOT_SUPPORTED,
|
|
38
|
+
MATCH_REGEX: OPT_NOT_SUPPORTED,
|
|
39
|
+
MATCH_SIZE: OPT_NOT_SUPPORTED,
|
|
40
|
+
MATCH_WORDS: OPT_NOT_SUPPORTED,
|
|
41
|
+
HEADER: OPT_NOT_SUPPORTED,
|
|
42
|
+
RETRIES: OPT_NOT_SUPPORTED,
|
|
43
|
+
# HEADER: 'H',
|
|
44
|
+
RATE_LIMIT: OPT_NOT_SUPPORTED,
|
|
45
|
+
FOLLOW_REDIRECT: '--follow-redirects',
|
|
46
|
+
}
|
|
47
|
+
opt_value_map = {
|
|
48
|
+
HEADER: lambda headers: ';'.join(headers.split(';;'))
|
|
49
|
+
}
|
|
50
|
+
item_loaders = [JSONSerializer()]
|
|
51
|
+
install_pre_cmd = {
|
|
52
|
+
'apk': ['build-base', 'pkgconf', 'libssl3', 'libcrypto3', 'openssl-dev'],
|
|
53
|
+
'apt': ['build-essential', 'pkg-config', 'libssl-dev'],
|
|
54
|
+
'pacman': ['base-devel', 'pkg-config', 'openssl'],
|
|
55
|
+
'zypper': ['gcc', 'pkg-config', 'libopenssl-devel'],
|
|
56
|
+
'*': ['gcc', 'pkg-config', 'openssl-devel'],
|
|
57
|
+
}
|
|
58
|
+
install_version = '4.3.0'
|
|
59
|
+
install_cmd = 'cargo install x8@[install_version] --force'
|
|
60
|
+
install_github_bin = False # TODO: enable this once https://github.com/Sh1Yo/x8/issues/65 is fixed
|
|
61
|
+
# install_github_version_prefix = 'v'
|
|
62
|
+
# install_ignore_bin = ['alpine', 'ubuntu']
|
|
63
|
+
github_handle = 'Sh1Yo/x8'
|
|
64
|
+
proxychains = False
|
|
65
|
+
proxy_socks5 = False
|
|
66
|
+
proxy_http = False
|
|
67
|
+
profile = 'io'
|
|
68
|
+
|
|
69
|
+
@staticmethod
|
|
70
|
+
def on_init(self):
|
|
71
|
+
self.urls = []
|
|
72
|
+
self.request_headers = {}
|
|
73
|
+
for k, v in self.get_opt_value(HEADER, preprocess=True).items():
|
|
74
|
+
self.request_headers[k] = v
|
|
75
|
+
|
|
76
|
+
@staticmethod
|
|
77
|
+
def on_json_loaded(self, item):
|
|
78
|
+
url = item['url']
|
|
79
|
+
if url not in self.urls:
|
|
80
|
+
self.urls.append(url)
|
|
81
|
+
yield Url(url=url, method=item['method'], status_code=item['status'], content_length=item['size'], request_headers=self.request_headers) # noqa: E501
|
|
82
|
+
for param in item.get('found_params', []):
|
|
83
|
+
parsed_url = urlparse(url)
|
|
84
|
+
url_without_param = urlunparse(parsed_url._replace(query=''))
|
|
85
|
+
extra_data = {k: v for k, v in param.items() if k != 'name'}
|
|
86
|
+
extra_data['value'] = param['value']
|
|
87
|
+
extra_data['url'] = url
|
|
88
|
+
yield Tag(
|
|
89
|
+
category='info',
|
|
90
|
+
name='url_param',
|
|
91
|
+
match=url_without_param,
|
|
92
|
+
value=param['name'],
|
|
93
|
+
extra_data=extra_data
|
|
94
|
+
)
|