secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
from urllib.parse import urlparse
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
from secator.decorators import task
|
|
5
|
+
from secator.definitions import (OPT_NOT_SUPPORTED, HEADER,
|
|
6
|
+
DELAY, FOLLOW_REDIRECT, PROXY, RATE_LIMIT, RETRIES,
|
|
7
|
+
THREADS, TIMEOUT, USER_AGENT)
|
|
8
|
+
from secator.output_types import Vulnerability, Exploit, Warning
|
|
9
|
+
from secator.tasks._categories import Vuln
|
|
10
|
+
from secator.serializers import JSONSerializer
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@task()
|
|
14
|
+
class search_vulns(Vuln):
|
|
15
|
+
"""Search for known vulnerabilities in software by product name or CPE."""
|
|
16
|
+
cmd = 'search_vulns'
|
|
17
|
+
output_types = [Vulnerability, Exploit]
|
|
18
|
+
tags = ['vuln', 'recon']
|
|
19
|
+
input_flag = '-q'
|
|
20
|
+
input_chunk_size = 1
|
|
21
|
+
item_loaders = [JSONSerializer()]
|
|
22
|
+
json_flag = '-f json'
|
|
23
|
+
version_flag = '-V'
|
|
24
|
+
opts = {
|
|
25
|
+
'ignore_general_product_vulns': {
|
|
26
|
+
'is_flag': True,
|
|
27
|
+
'help': 'Ignore vulnerabilities that only affect a general product'
|
|
28
|
+
},
|
|
29
|
+
'include_single_version_vulns': {
|
|
30
|
+
'is_flag': True,
|
|
31
|
+
'help': 'Include vulnerabilities that only affect one specific version'
|
|
32
|
+
},
|
|
33
|
+
'include_patched': {
|
|
34
|
+
'is_flag': True,
|
|
35
|
+
'help': 'Include vulnerabilities reported as patched'
|
|
36
|
+
},
|
|
37
|
+
}
|
|
38
|
+
opt_key_map = {
|
|
39
|
+
'ignore_general_product_vulns': 'ignore-general-product-vulns',
|
|
40
|
+
'include_single_version_vulns': 'include-single-version-vulns',
|
|
41
|
+
'include_patched': 'include-patched',
|
|
42
|
+
HEADER: OPT_NOT_SUPPORTED,
|
|
43
|
+
DELAY: OPT_NOT_SUPPORTED,
|
|
44
|
+
FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
|
|
45
|
+
PROXY: OPT_NOT_SUPPORTED,
|
|
46
|
+
RATE_LIMIT: OPT_NOT_SUPPORTED,
|
|
47
|
+
RETRIES: OPT_NOT_SUPPORTED,
|
|
48
|
+
THREADS: OPT_NOT_SUPPORTED,
|
|
49
|
+
TIMEOUT: OPT_NOT_SUPPORTED,
|
|
50
|
+
USER_AGENT: OPT_NOT_SUPPORTED,
|
|
51
|
+
}
|
|
52
|
+
install_version = '0.8.4'
|
|
53
|
+
install_cmd = 'pipx install --force search_vulns==[install_version]'
|
|
54
|
+
install_post = {'*': 'search_vulns -u'}
|
|
55
|
+
github_handle = 'ra1nb0rn/search_vulns'
|
|
56
|
+
install_github_bin = False
|
|
57
|
+
proxychains = False
|
|
58
|
+
proxy_socks5 = False
|
|
59
|
+
proxy_http = False
|
|
60
|
+
profile = 'io'
|
|
61
|
+
|
|
62
|
+
@staticmethod
|
|
63
|
+
def before_init(self):
|
|
64
|
+
if len(self.inputs) == 0:
|
|
65
|
+
return
|
|
66
|
+
_in = self.inputs[0]
|
|
67
|
+
self.matched_at = None
|
|
68
|
+
if '~' in _in:
|
|
69
|
+
split = _in.split('~')
|
|
70
|
+
self.matched_at = split[0]
|
|
71
|
+
self.inputs[0] = split[1]
|
|
72
|
+
self.inputs[0] = self.inputs[0].replace('httpd', '').replace('/', ' ')
|
|
73
|
+
|
|
74
|
+
@staticmethod
|
|
75
|
+
def on_json_loaded(self, item):
|
|
76
|
+
"""Load vulnerability items from search_vulns JSON output."""
|
|
77
|
+
matched_at = self.matched_at if self.matched_at else self.inputs[0] if self.inputs else ''
|
|
78
|
+
|
|
79
|
+
values = item.values()
|
|
80
|
+
if not values:
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
data = list(values)[0]
|
|
84
|
+
if isinstance(data, str):
|
|
85
|
+
yield Warning(message=data)
|
|
86
|
+
return
|
|
87
|
+
|
|
88
|
+
vulns = data.get('vulns', {})
|
|
89
|
+
common_extra_data = {}
|
|
90
|
+
# product_ids = data.get('product_ids', {})
|
|
91
|
+
# cpes = product_ids.get('cpe', [])
|
|
92
|
+
# if cpes:
|
|
93
|
+
# common_extra_data.update({'cpes': cpes})
|
|
94
|
+
|
|
95
|
+
# Yield each vulnerability
|
|
96
|
+
for vuln_id, vuln_data in vulns.items():
|
|
97
|
+
cve_id = search_vulns.extract_id(vuln_data)
|
|
98
|
+
yield Vulnerability(
|
|
99
|
+
id=vuln_id,
|
|
100
|
+
name=search_vulns.extract_id(vuln_data),
|
|
101
|
+
description=vuln_data.get('description', ''),
|
|
102
|
+
severity=search_vulns.cvss_to_severity(vuln_data.get('cvss', 0)),
|
|
103
|
+
confidence='high',
|
|
104
|
+
cvss_score=float(vuln_data.get('cvss', 0)),
|
|
105
|
+
epss_score=vuln_data.get('epss', ''),
|
|
106
|
+
cvss_vec=vuln_data.get('cvss_vec', ''),
|
|
107
|
+
matched_at=matched_at,
|
|
108
|
+
references=search_vulns.extract_references(vuln_data),
|
|
109
|
+
extra_data=search_vulns.extract_extra_data(vuln_data),
|
|
110
|
+
provider='search_vulns',
|
|
111
|
+
tags=search_vulns.extract_tags(vuln_data),
|
|
112
|
+
)
|
|
113
|
+
exploits = vuln_data.get('exploits', [])
|
|
114
|
+
for exploit in exploits:
|
|
115
|
+
extra_data = common_extra_data.copy()
|
|
116
|
+
parts = exploit.replace('http://', '').replace('https://', '').replace('github.com', 'github').split('/')
|
|
117
|
+
hostname = urlparse(exploit).hostname
|
|
118
|
+
tags = [hostname]
|
|
119
|
+
provider = hostname.split('.')[-2]
|
|
120
|
+
is_github = 'github.com' in exploit
|
|
121
|
+
if is_github:
|
|
122
|
+
user = parts[1]
|
|
123
|
+
repo = parts[2]
|
|
124
|
+
name = 'Github'
|
|
125
|
+
extra_data.update({
|
|
126
|
+
'user': user,
|
|
127
|
+
'repo': repo,
|
|
128
|
+
})
|
|
129
|
+
else:
|
|
130
|
+
hostname = urlparse(exploit).hostname
|
|
131
|
+
name = provider.capitalize()
|
|
132
|
+
name = name + ' exploit'
|
|
133
|
+
last_part = exploit.split('/')[-1]
|
|
134
|
+
id = f'{cve_id}-exploit'
|
|
135
|
+
if last_part.isnumeric():
|
|
136
|
+
id = last_part
|
|
137
|
+
name += f' {id}'
|
|
138
|
+
yield Exploit(
|
|
139
|
+
name=name,
|
|
140
|
+
provider=provider,
|
|
141
|
+
id=id,
|
|
142
|
+
matched_at=matched_at,
|
|
143
|
+
confidence='high',
|
|
144
|
+
reference=exploit,
|
|
145
|
+
cves=[cve_id],
|
|
146
|
+
tags=tags,
|
|
147
|
+
extra_data=extra_data,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
@staticmethod
|
|
151
|
+
def extract_id(item):
|
|
152
|
+
"""Extract vulnerability ID from the item."""
|
|
153
|
+
return item.get('id', '')
|
|
154
|
+
|
|
155
|
+
@staticmethod
|
|
156
|
+
def extract_tags(item):
|
|
157
|
+
"""Extract tags from vulnerability item."""
|
|
158
|
+
tags = []
|
|
159
|
+
if item.get('cwe_id'):
|
|
160
|
+
tags.append(item['cwe_id'])
|
|
161
|
+
if item.get('cisa_known_exploited'):
|
|
162
|
+
tags.append('actively-exploited')
|
|
163
|
+
return tags
|
|
164
|
+
|
|
165
|
+
@staticmethod
|
|
166
|
+
def extract_references(item):
|
|
167
|
+
"""Extract references from vulnerability item."""
|
|
168
|
+
refs = []
|
|
169
|
+
aliases = item.get('aliases', {})
|
|
170
|
+
vuln_id = item.get('id', '')
|
|
171
|
+
if vuln_id and vuln_id in aliases:
|
|
172
|
+
refs.append(aliases[vuln_id])
|
|
173
|
+
|
|
174
|
+
# Add exploit references
|
|
175
|
+
exploits = item.get('exploits', [])
|
|
176
|
+
if exploits:
|
|
177
|
+
refs.extend(exploits)
|
|
178
|
+
|
|
179
|
+
return refs
|
|
180
|
+
|
|
181
|
+
@staticmethod
|
|
182
|
+
def extract_extra_data(item):
|
|
183
|
+
"""Extract extra data from vulnerability item."""
|
|
184
|
+
extra = {}
|
|
185
|
+
|
|
186
|
+
# Add published date
|
|
187
|
+
if item.get('published'):
|
|
188
|
+
extra['published'] = item['published']
|
|
189
|
+
|
|
190
|
+
# Add CVSS version
|
|
191
|
+
if item.get('cvss_ver'):
|
|
192
|
+
extra['cvss_version'] = item['cvss_ver']
|
|
193
|
+
|
|
194
|
+
# Add CWE ID
|
|
195
|
+
if item.get('cwe_id'):
|
|
196
|
+
extra['cwe_id'] = item['cwe_id']
|
|
197
|
+
|
|
198
|
+
# Add CISA known exploited flag
|
|
199
|
+
if item.get('cisa_known_exploited'):
|
|
200
|
+
extra['cisa_known_exploited'] = item['cisa_known_exploited']
|
|
201
|
+
|
|
202
|
+
# Add product IDs
|
|
203
|
+
if item.get('product_ids'):
|
|
204
|
+
extra['product_ids'] = item['product_ids']
|
|
205
|
+
|
|
206
|
+
# Add match reason
|
|
207
|
+
if item.get('match_reason'):
|
|
208
|
+
extra['match_reason'] = item['match_reason']
|
|
209
|
+
|
|
210
|
+
return extra
|
|
211
|
+
|
|
212
|
+
@staticmethod
|
|
213
|
+
def cvss_to_severity(cvss):
|
|
214
|
+
"""Convert CVSS score to severity level."""
|
|
215
|
+
cvss = float(cvss)
|
|
216
|
+
if not cvss or cvss < 0:
|
|
217
|
+
return None
|
|
218
|
+
if cvss < 4:
|
|
219
|
+
return 'low'
|
|
220
|
+
elif cvss < 7:
|
|
221
|
+
return 'medium'
|
|
222
|
+
elif cvss < 9:
|
|
223
|
+
return 'high'
|
|
224
|
+
else:
|
|
225
|
+
return 'critical'
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
from secator.config import CONFIG
|
|
4
|
+
from secator.decorators import task
|
|
5
|
+
from secator.definitions import (CVES, EXTRA_DATA, ID, MATCHED_AT, NAME,
|
|
6
|
+
PROVIDER, REFERENCE, TAGS, OPT_NOT_SUPPORTED, STRING, SLUG)
|
|
7
|
+
from secator.output_types import Exploit
|
|
8
|
+
from secator.runners import Command
|
|
9
|
+
from secator.serializers import JSONSerializer
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
SEARCHSPLOIT_TITLE_REGEX = re.compile(r'^((?:[a-zA-Z\-_!\.()]+\d?\s?)+)\.?\s*(.*)$')
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@task()
|
|
16
|
+
class searchsploit(Command):
|
|
17
|
+
"""Exploit searcher based on ExploitDB."""
|
|
18
|
+
cmd = 'searchsploit'
|
|
19
|
+
input_types = [STRING, SLUG]
|
|
20
|
+
output_types = [Exploit]
|
|
21
|
+
tags = ['exploit', 'recon']
|
|
22
|
+
input_chunk_size = 1
|
|
23
|
+
json_flag = '--json'
|
|
24
|
+
version_flag = OPT_NOT_SUPPORTED
|
|
25
|
+
opts = {
|
|
26
|
+
'strict': {'short': 's', 'is_flag': True, 'default': False, 'help': 'Strict match'}
|
|
27
|
+
}
|
|
28
|
+
opt_key_map = {}
|
|
29
|
+
item_loaders = [JSONSerializer()]
|
|
30
|
+
output_map = {
|
|
31
|
+
Exploit: {
|
|
32
|
+
NAME: 'Title',
|
|
33
|
+
ID: 'EDB-ID',
|
|
34
|
+
PROVIDER: lambda x: 'EDB',
|
|
35
|
+
CVES: lambda x: [c for c in x['Codes'].split(';') if c.startswith('CVE-')],
|
|
36
|
+
REFERENCE: lambda x: f'https://exploit-db.com/exploits/{x["EDB-ID"]}',
|
|
37
|
+
TAGS: lambda x: searchsploit.tags_extractor(x),
|
|
38
|
+
EXTRA_DATA: lambda x: {
|
|
39
|
+
k.lower().replace('date_', ''): v for k, v in x.items() if k not in ['Title', 'EDB-ID', 'Codes', 'Tags', 'Source'] and v != '' # noqa: E501
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
install_version = '2025-04-23'
|
|
44
|
+
install_pre = {'apk': ['ncurses']}
|
|
45
|
+
install_cmd = (
|
|
46
|
+
f'git clone --depth 1 --single-branch -b [install_version] https://gitlab.com/exploit-database/exploitdb.git {CONFIG.dirs.share}/exploitdb_[install_version] || true && ' # noqa: E501
|
|
47
|
+
f'ln -sf $HOME/.local/share/exploitdb_[install_version]/searchsploit {CONFIG.dirs.bin}/searchsploit'
|
|
48
|
+
)
|
|
49
|
+
proxychains = False
|
|
50
|
+
proxy_socks5 = False
|
|
51
|
+
proxy_http = False
|
|
52
|
+
profile = 'io'
|
|
53
|
+
|
|
54
|
+
@staticmethod
|
|
55
|
+
def tags_extractor(item):
|
|
56
|
+
tags = []
|
|
57
|
+
for tag in item['Tags'].split(','):
|
|
58
|
+
_tag = '_'.join(
|
|
59
|
+
tag.lower().replace('-', '_',).replace('(', '').replace(')', '').split(' ')
|
|
60
|
+
)
|
|
61
|
+
if not _tag:
|
|
62
|
+
continue
|
|
63
|
+
tags.append(tag)
|
|
64
|
+
return tags
|
|
65
|
+
|
|
66
|
+
@staticmethod
|
|
67
|
+
def before_init(self):
|
|
68
|
+
if len(self.inputs) == 0:
|
|
69
|
+
return
|
|
70
|
+
_in = self.inputs[0]
|
|
71
|
+
self.matched_at = None
|
|
72
|
+
if '~' in _in:
|
|
73
|
+
split = _in.split('~')
|
|
74
|
+
self.matched_at = split[0]
|
|
75
|
+
self.inputs[0] = split[1]
|
|
76
|
+
self.inputs[0] = self.inputs[0].replace('httpd', '').replace('/', ' ')
|
|
77
|
+
|
|
78
|
+
@staticmethod
|
|
79
|
+
def on_item_pre_convert(self, item):
|
|
80
|
+
if self.matched_at:
|
|
81
|
+
item[MATCHED_AT] = self.matched_at
|
|
82
|
+
return item
|
|
83
|
+
|
|
84
|
+
@staticmethod
|
|
85
|
+
def on_item(self, item):
|
|
86
|
+
if not isinstance(item, Exploit):
|
|
87
|
+
return item
|
|
88
|
+
match = SEARCHSPLOIT_TITLE_REGEX.match(item.name)
|
|
89
|
+
# if not match:
|
|
90
|
+
# self._print(f'[bold red]{item.name} ({item.reference}) did not match SEARCHSPLOIT_TITLE_REGEX. Please report this issue.[/]') # noqa: E501
|
|
91
|
+
if match:
|
|
92
|
+
group = match.groups()
|
|
93
|
+
product = '-'.join(group[0].strip().split(' '))
|
|
94
|
+
if len(group[1]) > 1:
|
|
95
|
+
try:
|
|
96
|
+
versions, title = tuple(group[1].split(' - '))
|
|
97
|
+
item.name = title
|
|
98
|
+
product_info = [f'{product.lower()} {v.strip()}' for v in versions.split('/')]
|
|
99
|
+
item.tags = product_info + item.tags
|
|
100
|
+
except ValueError:
|
|
101
|
+
item.name = item.name.split(' - ')[-1]
|
|
102
|
+
item.tags = [product.lower()]
|
|
103
|
+
pass
|
|
104
|
+
# else:
|
|
105
|
+
# self._print(f'[bold red]{item.name} ({item.reference}) did not quite match SEARCHSPLOIT_TITLE_REGEX. Please report this issue.[/]') # noqa: E501
|
|
106
|
+
input_tag = '-'.join(self.inputs[0].replace('\'', '').split(' '))
|
|
107
|
+
item.tags = [input_tag] + item.tags
|
|
108
|
+
item.matched_at = self.matched_at if self.matched_at else self.inputs[0] if self.inputs else ''
|
|
109
|
+
return item
|
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
from secator.config import CONFIG
|
|
2
|
+
from secator.decorators import task
|
|
3
|
+
from secator.output_types import Vulnerability, Tag
|
|
4
|
+
from secator.definitions import HOST, IP, TIMEOUT
|
|
5
|
+
from secator.tasks._categories import Command
|
|
6
|
+
from secator.serializers import JSONSerializer
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@task()
|
|
10
|
+
class sshaudit(Command):
|
|
11
|
+
"""SSH server & client security auditing (banner, key exchange, encryption, mac, compression, etc)."""
|
|
12
|
+
cmd = 'ssh-audit'
|
|
13
|
+
input_types = [HOST, IP]
|
|
14
|
+
output_types = [Vulnerability, Tag]
|
|
15
|
+
tags = ['ssh', 'audit', 'security']
|
|
16
|
+
item_loaders = [JSONSerializer()]
|
|
17
|
+
input_flag = None
|
|
18
|
+
file_flag = '-T'
|
|
19
|
+
json_flag = '-j'
|
|
20
|
+
ignore_return_code = True
|
|
21
|
+
opt_prefix = '--'
|
|
22
|
+
opts = {
|
|
23
|
+
'ssh_port': {'type': int, 'short': 'sshp', 'default': 22, 'help': 'SSH port to connect to'},
|
|
24
|
+
'ipv4': {'is_flag': True, 'short': '4', 'default': False, 'help': 'Enable IPv4 (order of precedence)'},
|
|
25
|
+
'ipv6': {'is_flag': True, 'short': '6', 'default': False, 'help': 'Enable IPv6 (order of precedence)'},
|
|
26
|
+
'batch': {'is_flag': True, 'short': 'b', 'default': False, 'help': 'Enable batch output for automated processing'},
|
|
27
|
+
'client_audit': {'is_flag': True, 'short': 'c', 'default': False, 'help': 'Start a listening server for client auditing'}, # noqa: E501
|
|
28
|
+
'level': {'type': str, 'short': 'l', 'default': None, 'help': 'Minimum output level (info, warn, fail)'},
|
|
29
|
+
}
|
|
30
|
+
opt_key_map = {
|
|
31
|
+
TIMEOUT: 'timeout',
|
|
32
|
+
'port': '-p',
|
|
33
|
+
'ipv4': '-4',
|
|
34
|
+
'ipv6': '-6',
|
|
35
|
+
'batch': '-b',
|
|
36
|
+
'client_audit': '-c',
|
|
37
|
+
'level': '-l',
|
|
38
|
+
'verbose': '-v',
|
|
39
|
+
'ssh_port': '-p',
|
|
40
|
+
}
|
|
41
|
+
install_github_handle = 'jtesta/ssh-audit'
|
|
42
|
+
install_version = 'v3.3.0'
|
|
43
|
+
install_cmd = (
|
|
44
|
+
f'git clone --depth 1 --single-branch -b [install_version] '
|
|
45
|
+
f'https://github.com/jtesta/ssh-audit.git {CONFIG.dirs.share}/ssh-audit_[install_version] || true && '
|
|
46
|
+
f'ln -sf {CONFIG.dirs.share}/ssh-audit_[install_version]/ssh-audit.py {CONFIG.dirs.bin}/ssh-audit && '
|
|
47
|
+
f'chmod +x {CONFIG.dirs.bin}/ssh-audit'
|
|
48
|
+
)
|
|
49
|
+
profile = 'io'
|
|
50
|
+
ignore_return_code = True
|
|
51
|
+
|
|
52
|
+
@staticmethod
|
|
53
|
+
def on_json_loaded(self, item):
|
|
54
|
+
target = item.get('target', 'unknown')
|
|
55
|
+
banner = item.get('banner', {})
|
|
56
|
+
software = banner.get('software', 'unknown')
|
|
57
|
+
|
|
58
|
+
yield Tag(
|
|
59
|
+
category='info',
|
|
60
|
+
name='ssh_banner',
|
|
61
|
+
value=banner.get('raw', ''),
|
|
62
|
+
match=target,
|
|
63
|
+
extra_data={
|
|
64
|
+
'software': software,
|
|
65
|
+
'protocol': banner.get('protocol', ''),
|
|
66
|
+
}
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Process CVEs
|
|
70
|
+
cves = item.get('cves', [])
|
|
71
|
+
for cve in cves:
|
|
72
|
+
yield Vulnerability(
|
|
73
|
+
name=f'SSH {cve}',
|
|
74
|
+
matched_at=target,
|
|
75
|
+
tags=['ssh', 'cve'],
|
|
76
|
+
severity='high',
|
|
77
|
+
confidence='high',
|
|
78
|
+
provider='ssh_audit',
|
|
79
|
+
extra_data={
|
|
80
|
+
'cve': cve,
|
|
81
|
+
'software': software
|
|
82
|
+
}
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# Process encryption algorithms
|
|
86
|
+
enc_list = item.get('enc', [])
|
|
87
|
+
for enc in enc_list:
|
|
88
|
+
algorithm = enc.get('algorithm', '')
|
|
89
|
+
notes = enc.get('notes', {})
|
|
90
|
+
failures = notes.get('fail', [])
|
|
91
|
+
warnings = notes.get('warn', [])
|
|
92
|
+
|
|
93
|
+
# Create vulnerabilities for failures
|
|
94
|
+
for failure in failures:
|
|
95
|
+
yield Vulnerability(
|
|
96
|
+
name='SSH weak encryption algorithm',
|
|
97
|
+
matched_at=target,
|
|
98
|
+
tags=['ssh', 'encryption', 'cipher'],
|
|
99
|
+
severity='high',
|
|
100
|
+
confidence='high',
|
|
101
|
+
provider='ssh_audit',
|
|
102
|
+
extra_data={
|
|
103
|
+
'algorithm': algorithm,
|
|
104
|
+
'issue': failure,
|
|
105
|
+
'type': 'encryption'
|
|
106
|
+
}
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# Create vulnerabilities for warnings
|
|
110
|
+
for warning in warnings:
|
|
111
|
+
yield Vulnerability(
|
|
112
|
+
name='SSH encryption algorithm warning',
|
|
113
|
+
matched_at=target,
|
|
114
|
+
tags=['ssh', 'encryption', 'cipher'],
|
|
115
|
+
severity='medium',
|
|
116
|
+
confidence='high',
|
|
117
|
+
provider='ssh_audit',
|
|
118
|
+
extra_data={
|
|
119
|
+
'algorithm': algorithm,
|
|
120
|
+
'issue': warning,
|
|
121
|
+
'type': 'encryption'
|
|
122
|
+
}
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
# Create info tags for successful algorithms if verbose
|
|
126
|
+
if not failures and not warnings:
|
|
127
|
+
info_notes = ', '.join(notes.get('info', []))
|
|
128
|
+
value = f'{algorithm} {info_notes}'
|
|
129
|
+
yield Tag(
|
|
130
|
+
category='info',
|
|
131
|
+
name='ssh_encryption',
|
|
132
|
+
value=value,
|
|
133
|
+
match=target,
|
|
134
|
+
extra_data={
|
|
135
|
+
'algorithm': algorithm,
|
|
136
|
+
}
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Process MAC algorithms
|
|
140
|
+
mac_list = item.get('mac', [])
|
|
141
|
+
for mac in mac_list:
|
|
142
|
+
algorithm = mac.get('algorithm', '')
|
|
143
|
+
notes = mac.get('notes', {})
|
|
144
|
+
failures = notes.get('fail', [])
|
|
145
|
+
warnings = notes.get('warn', [])
|
|
146
|
+
|
|
147
|
+
# Create vulnerabilities for failures
|
|
148
|
+
for failure in failures:
|
|
149
|
+
yield Vulnerability(
|
|
150
|
+
name='SSH weak MAC algorithm',
|
|
151
|
+
matched_at=target,
|
|
152
|
+
tags=['ssh', 'mac', 'authentication'],
|
|
153
|
+
severity='high',
|
|
154
|
+
confidence='high',
|
|
155
|
+
provider='ssh_audit',
|
|
156
|
+
extra_data={
|
|
157
|
+
'algorithm': algorithm,
|
|
158
|
+
'issue': failure,
|
|
159
|
+
'type': 'mac'
|
|
160
|
+
}
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Create vulnerabilities for warnings
|
|
164
|
+
for warning in warnings:
|
|
165
|
+
yield Vulnerability(
|
|
166
|
+
name='SSH MAC algorithm warning',
|
|
167
|
+
matched_at=target,
|
|
168
|
+
tags=['ssh', 'mac', 'authentication'],
|
|
169
|
+
severity='medium',
|
|
170
|
+
confidence='high',
|
|
171
|
+
provider='ssh_audit',
|
|
172
|
+
extra_data={
|
|
173
|
+
'algorithm': algorithm,
|
|
174
|
+
'issue': warning,
|
|
175
|
+
'type': 'mac'
|
|
176
|
+
}
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
# Create info tags for successful algorithms if verbose
|
|
180
|
+
if not failures and not warnings:
|
|
181
|
+
info_notes = ', '.join(notes.get('info', []))
|
|
182
|
+
value = f'{algorithm} {info_notes}'
|
|
183
|
+
yield Tag(
|
|
184
|
+
category='info',
|
|
185
|
+
name='ssh_mac',
|
|
186
|
+
value=value,
|
|
187
|
+
match=target,
|
|
188
|
+
extra_data={
|
|
189
|
+
'algorithm': algorithm,
|
|
190
|
+
}
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
# Process key exchange algorithms
|
|
194
|
+
kex_list = item.get('kex', [])
|
|
195
|
+
for kex in kex_list:
|
|
196
|
+
algorithm = kex.get('algorithm', '')
|
|
197
|
+
notes = kex.get('notes', {})
|
|
198
|
+
failures = notes.get('fail', [])
|
|
199
|
+
warnings = notes.get('warn', [])
|
|
200
|
+
|
|
201
|
+
# Create vulnerabilities for failures
|
|
202
|
+
for failure in failures:
|
|
203
|
+
yield Vulnerability(
|
|
204
|
+
name='SSH weak key exchange algorithm',
|
|
205
|
+
matched_at=target,
|
|
206
|
+
tags=['ssh', 'kex', 'key-exchange'],
|
|
207
|
+
severity='high',
|
|
208
|
+
confidence='high',
|
|
209
|
+
provider='ssh_audit',
|
|
210
|
+
extra_data={
|
|
211
|
+
'algorithm': algorithm,
|
|
212
|
+
'issue': failure,
|
|
213
|
+
'type': 'kex'
|
|
214
|
+
}
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
# Create vulnerabilities for warnings
|
|
218
|
+
for warning in warnings:
|
|
219
|
+
yield Vulnerability(
|
|
220
|
+
name='SSH key exchange algorithm warning',
|
|
221
|
+
matched_at=target,
|
|
222
|
+
tags=['ssh', 'kex', 'key-exchange'],
|
|
223
|
+
severity='medium',
|
|
224
|
+
confidence='high',
|
|
225
|
+
provider='ssh_audit',
|
|
226
|
+
extra_data={
|
|
227
|
+
'algorithm': algorithm,
|
|
228
|
+
'issue': warning,
|
|
229
|
+
'type': 'kex'
|
|
230
|
+
}
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
# Create info tags for successful algorithms if verbose
|
|
234
|
+
if not failures and not warnings:
|
|
235
|
+
info_notes = ', '.join(notes.get('info', []))
|
|
236
|
+
value = f'{algorithm} {info_notes}'
|
|
237
|
+
yield Tag(
|
|
238
|
+
category='info',
|
|
239
|
+
name='ssh_kex',
|
|
240
|
+
value=value,
|
|
241
|
+
match=target,
|
|
242
|
+
extra_data={
|
|
243
|
+
'algorithm': algorithm,
|
|
244
|
+
}
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
# Process host key algorithms
|
|
248
|
+
key_list = item.get('key', [])
|
|
249
|
+
for key in key_list:
|
|
250
|
+
algorithm = key.get('algorithm', '')
|
|
251
|
+
notes = key.get('notes', {})
|
|
252
|
+
failures = notes.get('fail', [])
|
|
253
|
+
warnings = notes.get('warn', [])
|
|
254
|
+
|
|
255
|
+
# Create vulnerabilities for failures
|
|
256
|
+
for failure in failures:
|
|
257
|
+
yield Vulnerability(
|
|
258
|
+
name='SSH weak host key algorithm',
|
|
259
|
+
matched_at=target,
|
|
260
|
+
tags=['ssh', 'host-key'],
|
|
261
|
+
severity='high',
|
|
262
|
+
confidence='high',
|
|
263
|
+
provider='ssh_audit',
|
|
264
|
+
extra_data={
|
|
265
|
+
'algorithm': algorithm,
|
|
266
|
+
'issue': failure,
|
|
267
|
+
'type': 'host_key'
|
|
268
|
+
}
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
# Create vulnerabilities for warnings
|
|
272
|
+
for warning in warnings:
|
|
273
|
+
yield Vulnerability(
|
|
274
|
+
name='SSH host key algorithm warning',
|
|
275
|
+
matched_at=target,
|
|
276
|
+
tags=['ssh', 'host-key'],
|
|
277
|
+
severity='medium',
|
|
278
|
+
confidence='high',
|
|
279
|
+
provider='ssh_audit',
|
|
280
|
+
extra_data={
|
|
281
|
+
'algorithm': algorithm,
|
|
282
|
+
'issue': warning,
|
|
283
|
+
'type': 'host_key'
|
|
284
|
+
}
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
# Create info tags for successful algorithms if verbose
|
|
288
|
+
if not failures and not warnings:
|
|
289
|
+
info_notes = ', '.join(notes.get('info', []))
|
|
290
|
+
value = f'{algorithm} {info_notes}'
|
|
291
|
+
yield Tag(
|
|
292
|
+
category='info',
|
|
293
|
+
name='ssh_host_key',
|
|
294
|
+
match=target,
|
|
295
|
+
value=value,
|
|
296
|
+
extra_data={
|
|
297
|
+
'algorithm': algorithm,
|
|
298
|
+
}
|
|
299
|
+
)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from secator.decorators import task
|
|
2
|
+
from secator.definitions import (DELAY, DOMAIN, HOST, OPT_NOT_SUPPORTED, PROXY,
|
|
3
|
+
RATE_LIMIT, RETRIES, THREADS, TIMEOUT)
|
|
4
|
+
from secator.output_types import Subdomain
|
|
5
|
+
from secator.serializers import JSONSerializer
|
|
6
|
+
from secator.tasks._categories import ReconDns
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@task()
|
|
10
|
+
class subfinder(ReconDns):
|
|
11
|
+
"""Fast passive subdomain enumeration tool."""
|
|
12
|
+
cmd = 'subfinder -cs'
|
|
13
|
+
input_types = [HOST]
|
|
14
|
+
output_types = [Subdomain]
|
|
15
|
+
tags = ['dns', 'recon']
|
|
16
|
+
file_flag = '-dL'
|
|
17
|
+
input_flag = '-d'
|
|
18
|
+
json_flag = '-json'
|
|
19
|
+
opt_key_map = {
|
|
20
|
+
DELAY: OPT_NOT_SUPPORTED,
|
|
21
|
+
PROXY: 'proxy',
|
|
22
|
+
RATE_LIMIT: 'rate-limit',
|
|
23
|
+
RETRIES: OPT_NOT_SUPPORTED,
|
|
24
|
+
TIMEOUT: 'timeout',
|
|
25
|
+
THREADS: 't'
|
|
26
|
+
}
|
|
27
|
+
opt_value_map = {
|
|
28
|
+
PROXY: lambda x: x.replace('http://', '').replace('https://', '') if x else None
|
|
29
|
+
}
|
|
30
|
+
item_loaders = [JSONSerializer()]
|
|
31
|
+
output_map = {
|
|
32
|
+
Subdomain: {
|
|
33
|
+
DOMAIN: 'input',
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
install_version = 'v2.7.0'
|
|
37
|
+
install_cmd = 'go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@[install_version]'
|
|
38
|
+
github_handle = 'projectdiscovery/subfinder'
|
|
39
|
+
proxychains = False
|
|
40
|
+
proxy_http = True
|
|
41
|
+
proxy_socks5 = False
|
|
42
|
+
profile = 'io'
|
|
43
|
+
|
|
44
|
+
@staticmethod
|
|
45
|
+
def validate_item(self, item):
|
|
46
|
+
if isinstance(item, dict):
|
|
47
|
+
return item['input'] != 'localhost'
|
|
48
|
+
return True
|