secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
secator/tasks/testssl.py
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
import shlex
|
|
5
|
+
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
|
|
8
|
+
from secator.config import CONFIG
|
|
9
|
+
from secator.decorators import task
|
|
10
|
+
from secator.output_types import Vulnerability, Certificate, Error, Info, Ip, Tag
|
|
11
|
+
from secator.definitions import (PROXY, HOST, USER_AGENT, HEADER, OUTPUT_PATH,
|
|
12
|
+
CERTIFICATE_STATUS_UNKNOWN, CERTIFICATE_STATUS_TRUSTED, CERTIFICATE_STATUS_REVOKED,
|
|
13
|
+
TIMEOUT)
|
|
14
|
+
from secator.tasks._categories import Command, OPTS
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@task()
|
|
18
|
+
class testssl(Command):
|
|
19
|
+
"""SSL/TLS security scanner, including ciphers, protocols and cryptographic flaws."""
|
|
20
|
+
cmd = 'testssl.sh'
|
|
21
|
+
input_types = [HOST]
|
|
22
|
+
output_types = [Certificate, Vulnerability, Ip, Tag]
|
|
23
|
+
tags = ['dns', 'recon', 'tls']
|
|
24
|
+
input_flag = None
|
|
25
|
+
file_flag = '-iL'
|
|
26
|
+
file_eof_newline = True
|
|
27
|
+
version_flag = ''
|
|
28
|
+
opt_prefix = '--'
|
|
29
|
+
opts = {
|
|
30
|
+
'verbose': {'is_flag': True, 'default': False, 'internal': True, 'display': True, 'help': 'Record all SSL/TLS info, not only critical info'}, # noqa: E501
|
|
31
|
+
'parallel': {'is_flag': True, 'default': False, 'help': 'Test multiple hosts in parallel'},
|
|
32
|
+
'warnings': {'type': str, 'default': None, 'help': 'Set to "batch" to stop on errors, and "off" to skip errors and continue'}, # noqa: E501
|
|
33
|
+
'ids_friendly': {'is_flag': True, 'default': False, 'help': 'Avoid IDS blocking by skipping a few vulnerability checks'}, # noqa: E501
|
|
34
|
+
'hints': {'is_flag': True, 'default': False, 'help': 'Additional hints to findings'},
|
|
35
|
+
'server_defaults': {'is_flag': True, 'default': False, 'help': 'Displays the server default picks and certificate info'}, # noqa: E501
|
|
36
|
+
}
|
|
37
|
+
meta_opts = {
|
|
38
|
+
PROXY: OPTS[PROXY],
|
|
39
|
+
USER_AGENT: OPTS[USER_AGENT],
|
|
40
|
+
HEADER: OPTS[HEADER],
|
|
41
|
+
TIMEOUT: OPTS[TIMEOUT],
|
|
42
|
+
}
|
|
43
|
+
opt_key_map = {
|
|
44
|
+
PROXY: 'proxy',
|
|
45
|
+
USER_AGENT: 'user-agent',
|
|
46
|
+
HEADER: 'reqheader',
|
|
47
|
+
TIMEOUT: 'connect-timeout',
|
|
48
|
+
'ipv6': '-6',
|
|
49
|
+
}
|
|
50
|
+
proxy_http = True
|
|
51
|
+
proxychains = False
|
|
52
|
+
proxy_socks5 = False
|
|
53
|
+
profile = 'io'
|
|
54
|
+
install_cmd_pre = {
|
|
55
|
+
'apk': ['hexdump', 'coreutils', 'procps', 'bash'],
|
|
56
|
+
'pacman': ['util-linux', 'bash'],
|
|
57
|
+
'*': ['bsdmainutils', 'bash']
|
|
58
|
+
}
|
|
59
|
+
install_version = 'v3.2.0'
|
|
60
|
+
install_cmd = (
|
|
61
|
+
f'git clone --depth 1 --single-branch -b [install_version] https://github.com/drwetter/testssl.sh.git {CONFIG.dirs.share}/testssl.sh_[install_version] || true && ' # noqa: E501
|
|
62
|
+
f'ln -sf {CONFIG.dirs.share}/testssl.sh_[install_version]/testssl.sh {CONFIG.dirs.bin}'
|
|
63
|
+
)
|
|
64
|
+
install_github_bin = False
|
|
65
|
+
github_handle = 'testssl/testssl.sh'
|
|
66
|
+
|
|
67
|
+
@staticmethod
|
|
68
|
+
def on_cmd(self):
|
|
69
|
+
output_path = self.get_opt_value(OUTPUT_PATH)
|
|
70
|
+
if not output_path:
|
|
71
|
+
output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
72
|
+
self.output_path = output_path
|
|
73
|
+
self.cmd += f' --jsonfile {shlex.quote(self.output_path)}'
|
|
74
|
+
|
|
75
|
+
# Hack because target needs to be the last argument in testssl.sh
|
|
76
|
+
if len(self.inputs) == 1:
|
|
77
|
+
target = self.inputs[0]
|
|
78
|
+
target_quoted = shlex.quote(target)
|
|
79
|
+
self.cmd = re.sub(re.escape(f' {target_quoted}'), "", self.cmd)
|
|
80
|
+
self.cmd += f' {target_quoted}'
|
|
81
|
+
|
|
82
|
+
@staticmethod
|
|
83
|
+
def on_cmd_done(self):
|
|
84
|
+
if not os.path.exists(self.output_path):
|
|
85
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
86
|
+
return
|
|
87
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
88
|
+
|
|
89
|
+
verbose = self.get_opt_value('verbose')
|
|
90
|
+
with open(self.output_path, 'r') as f:
|
|
91
|
+
data = json.load(f)
|
|
92
|
+
bad_cyphers = {}
|
|
93
|
+
retrieved_certificates = {}
|
|
94
|
+
ignored_item_ids = ["scanTime", "overall_grade", "DNS_CAArecord"]
|
|
95
|
+
ip_addresses = []
|
|
96
|
+
host_to_ips = {}
|
|
97
|
+
|
|
98
|
+
for item in data:
|
|
99
|
+
host, ip = tuple(item['ip'].split('/'))
|
|
100
|
+
id = item['id']
|
|
101
|
+
# port = item['port']
|
|
102
|
+
finding = item['finding']
|
|
103
|
+
severity = item['severity'].lower()
|
|
104
|
+
cwe = item.get('cwe')
|
|
105
|
+
vuln_tags = ['ssl', 'tls']
|
|
106
|
+
if cwe:
|
|
107
|
+
vuln_tags.append(cwe)
|
|
108
|
+
|
|
109
|
+
# Skip ignored items
|
|
110
|
+
if id.startswith(tuple(ignored_item_ids)):
|
|
111
|
+
continue
|
|
112
|
+
|
|
113
|
+
# Add IP to address pool
|
|
114
|
+
host_to_ips.setdefault(host, []).append(ip)
|
|
115
|
+
if ip not in ip_addresses:
|
|
116
|
+
ip_addresses.append(ip)
|
|
117
|
+
yield Ip(
|
|
118
|
+
host=host,
|
|
119
|
+
ip=ip,
|
|
120
|
+
alive=True
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
# Process errors
|
|
124
|
+
if id.startswith("scanProblem"):
|
|
125
|
+
yield Error(message=finding)
|
|
126
|
+
|
|
127
|
+
# Process bad ciphers
|
|
128
|
+
elif id.startswith('cipher-'):
|
|
129
|
+
splited_item = item["finding"].split(" ")
|
|
130
|
+
concerned_protocol = splited_item[0]
|
|
131
|
+
bad_cypher = splited_item[-1]
|
|
132
|
+
bad_cyphers.setdefault(ip, {}).setdefault(concerned_protocol, []).append(bad_cypher) # noqa: E501
|
|
133
|
+
|
|
134
|
+
# Process certificates
|
|
135
|
+
elif id.startswith('cert_') or id.startswith('cert '):
|
|
136
|
+
retrieved_certificates.setdefault(ip, []).append(item)
|
|
137
|
+
|
|
138
|
+
# Process intermediate certificates
|
|
139
|
+
elif id.startswith('intermediate_cert_'):
|
|
140
|
+
# TODO: implement this
|
|
141
|
+
pass
|
|
142
|
+
|
|
143
|
+
# If info or ok, create a tag only if 'verbose' option is set
|
|
144
|
+
elif severity in ['info', 'ok']:
|
|
145
|
+
if not verbose:
|
|
146
|
+
continue
|
|
147
|
+
yield Tag(
|
|
148
|
+
category='info',
|
|
149
|
+
name='ssl_tls',
|
|
150
|
+
match=host,
|
|
151
|
+
value=finding,
|
|
152
|
+
extra_data={
|
|
153
|
+
'subtype': id,
|
|
154
|
+
}
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Create vulnerability
|
|
158
|
+
else:
|
|
159
|
+
if id in ['TLS1', 'TLS1_1']:
|
|
160
|
+
human_name = f'SSL/TLS deprecated protocol offered: {id}'
|
|
161
|
+
else:
|
|
162
|
+
human_name = f'SSL/TLS {id}'
|
|
163
|
+
yield Vulnerability(
|
|
164
|
+
name=human_name,
|
|
165
|
+
matched_at=host,
|
|
166
|
+
ip=ip,
|
|
167
|
+
tags=vuln_tags,
|
|
168
|
+
severity=severity,
|
|
169
|
+
confidence='high',
|
|
170
|
+
extra_data={
|
|
171
|
+
'id': id,
|
|
172
|
+
'finding': finding
|
|
173
|
+
}
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
# Creating vulnerability for the deprecated ciphers
|
|
177
|
+
for ip, protocols in bad_cyphers.items():
|
|
178
|
+
for protocol, cyphers in protocols.items():
|
|
179
|
+
yield Vulnerability(
|
|
180
|
+
name=f'SSL/TLS vulnerability ciphers for {protocol} deprecated',
|
|
181
|
+
matched_at=ip,
|
|
182
|
+
ip=ip,
|
|
183
|
+
confidence='high',
|
|
184
|
+
severity='low',
|
|
185
|
+
extra_data={
|
|
186
|
+
'cyphers': cyphers
|
|
187
|
+
}
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Creating certificates for each founded target
|
|
191
|
+
host_to_ips = {k: set(v) for k, v in host_to_ips.items()}
|
|
192
|
+
for ip, certs in retrieved_certificates.items():
|
|
193
|
+
host = [k for k, v in host_to_ips.items() if ip in v][0]
|
|
194
|
+
cert_data = {
|
|
195
|
+
'host': host,
|
|
196
|
+
'ip': ip,
|
|
197
|
+
'fingerprint_sha256': None,
|
|
198
|
+
'subject_cn': None,
|
|
199
|
+
'subject_an': None,
|
|
200
|
+
'not_before': None,
|
|
201
|
+
'not_after': None,
|
|
202
|
+
'issuer_cn': None,
|
|
203
|
+
'self_signed': None,
|
|
204
|
+
'trusted': None,
|
|
205
|
+
'status': None,
|
|
206
|
+
'keysize': None,
|
|
207
|
+
'serial_number': None,
|
|
208
|
+
}
|
|
209
|
+
for cert in certs:
|
|
210
|
+
host = [k for k, v in host_to_ips.items() if ip in v][0]
|
|
211
|
+
id = cert['id']
|
|
212
|
+
finding = cert['finding']
|
|
213
|
+
|
|
214
|
+
if id.startswith('cert_crlDistributionPoints') and finding != '--':
|
|
215
|
+
# TODO not implemented, need to find a certificate that is revoked by CRL
|
|
216
|
+
cert_data['status'] = CERTIFICATE_STATUS_UNKNOWN
|
|
217
|
+
|
|
218
|
+
if id.startswith('cert_ocspRevoked'):
|
|
219
|
+
if finding.startswith('not revoked'):
|
|
220
|
+
cert_data['status'] = CERTIFICATE_STATUS_TRUSTED
|
|
221
|
+
else:
|
|
222
|
+
cert_data['status'] = CERTIFICATE_STATUS_REVOKED
|
|
223
|
+
|
|
224
|
+
if id.startswith('cert_fingerprintSHA256'):
|
|
225
|
+
cert_data['fingerprint_sha256'] = finding
|
|
226
|
+
|
|
227
|
+
if id.startswith('cert_commonName'):
|
|
228
|
+
cert_data['subject_cn'] = finding
|
|
229
|
+
|
|
230
|
+
if id.startswith('cert_subjectAltName'):
|
|
231
|
+
cert_data['subject_an'] = finding.split(" ")
|
|
232
|
+
|
|
233
|
+
if id.startswith('cert_notBefore'):
|
|
234
|
+
cert_data['not_before'] = datetime.strptime(finding, "%Y-%m-%d %H:%M")
|
|
235
|
+
|
|
236
|
+
if id.startswith('cert_notAfter'):
|
|
237
|
+
cert_data['not_after'] = datetime.strptime(finding, "%Y-%m-%d %H:%M")
|
|
238
|
+
|
|
239
|
+
if id.startswith('cert_caIssuers'):
|
|
240
|
+
cert_data['issuer_cn'] = finding
|
|
241
|
+
|
|
242
|
+
if id.startswith('cert_chain_of_trust'):
|
|
243
|
+
cert_data['self_signed'] = 'self signed' in finding
|
|
244
|
+
|
|
245
|
+
if id.startswith('cert_chain_of_trust'):
|
|
246
|
+
cert_data['trusted'] = finding.startswith('passed')
|
|
247
|
+
|
|
248
|
+
if id.startswith('cert_keySize'):
|
|
249
|
+
cert_data['keysize'] = int(finding.split(" ")[1])
|
|
250
|
+
|
|
251
|
+
if id.startswith('cert_serialNumber'):
|
|
252
|
+
cert_data['serial_number'] = finding
|
|
253
|
+
|
|
254
|
+
if id.startswith('cert ') and finding.startswith('-----BEGIN CERTIFICATE-----'):
|
|
255
|
+
cert_data['raw_value'] = finding
|
|
256
|
+
|
|
257
|
+
# For the following attributes commented, it's because at the time of writting it
|
|
258
|
+
# I did not found the value inside the result of testssl
|
|
259
|
+
cert = Certificate(
|
|
260
|
+
**cert_data
|
|
261
|
+
# issuer_dn='',
|
|
262
|
+
# issuer='',
|
|
263
|
+
# TODO: delete the ciphers attribute from certificate outputType
|
|
264
|
+
# ciphers=None,
|
|
265
|
+
# TODO: need to find a way to retrieve the parent certificate,
|
|
266
|
+
# parent_certificate=None,
|
|
267
|
+
)
|
|
268
|
+
yield cert
|
|
269
|
+
if cert.is_expired():
|
|
270
|
+
yield Vulnerability(
|
|
271
|
+
name='SSL certificate expired',
|
|
272
|
+
provider='testssl',
|
|
273
|
+
description='The SSL certificate is expired. This can easily lead to domain takeovers',
|
|
274
|
+
matched_at=host,
|
|
275
|
+
ip=ip,
|
|
276
|
+
tags=['ssl', 'tls'],
|
|
277
|
+
severity='medium',
|
|
278
|
+
confidence='high',
|
|
279
|
+
extra_data={
|
|
280
|
+
'id': id,
|
|
281
|
+
'expiration_date': Certificate.format_date(cert.not_after)
|
|
282
|
+
}
|
|
283
|
+
)
|
secator/tasks/trivy.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import click
|
|
2
|
+
import os
|
|
3
|
+
import yaml
|
|
4
|
+
import shlex
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from secator.config import CONFIG
|
|
9
|
+
from secator.decorators import task
|
|
10
|
+
from secator.definitions import (THREADS, OUTPUT_PATH, OPT_NOT_SUPPORTED, HEADER, DELAY, FOLLOW_REDIRECT,
|
|
11
|
+
PATH, PROXY, RATE_LIMIT, RETRIES, TIMEOUT, USER_AGENT, STRING)
|
|
12
|
+
from secator.output_types import Vulnerability, Tag, Info, Error
|
|
13
|
+
from secator.tasks._categories import Vuln
|
|
14
|
+
from secator.utils import caml_to_snake
|
|
15
|
+
from secator.rich import console
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
TRIVY_MODES = ['image', 'fs', 'repo']
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def convert_mode(mode):
|
|
22
|
+
return 'fs' if mode == 'filesystem' else 'repo' if mode == 'git' else mode
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@task()
|
|
26
|
+
class trivy(Vuln):
|
|
27
|
+
"""Comprehensive and versatile security scanner."""
|
|
28
|
+
cmd = 'trivy'
|
|
29
|
+
input_types = [PATH, STRING]
|
|
30
|
+
output_types = [Tag, Vulnerability]
|
|
31
|
+
tags = ['vuln', 'scan']
|
|
32
|
+
input_chunk_size = 1
|
|
33
|
+
json_flag = '-f json'
|
|
34
|
+
version_flag = '--version'
|
|
35
|
+
opts = {
|
|
36
|
+
"mode": {"type": click.Choice(TRIVY_MODES), 'help': f'Scan mode ({", ".join(TRIVY_MODES)})', 'internal': True, 'required': False} # noqa: E501
|
|
37
|
+
}
|
|
38
|
+
opt_key_map = {
|
|
39
|
+
THREADS: OPT_NOT_SUPPORTED,
|
|
40
|
+
HEADER: OPT_NOT_SUPPORTED,
|
|
41
|
+
DELAY: OPT_NOT_SUPPORTED,
|
|
42
|
+
FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
|
|
43
|
+
PROXY: OPT_NOT_SUPPORTED,
|
|
44
|
+
RATE_LIMIT: OPT_NOT_SUPPORTED,
|
|
45
|
+
RETRIES: OPT_NOT_SUPPORTED,
|
|
46
|
+
TIMEOUT: OPT_NOT_SUPPORTED,
|
|
47
|
+
USER_AGENT: OPT_NOT_SUPPORTED
|
|
48
|
+
}
|
|
49
|
+
opt_value_map = {
|
|
50
|
+
'mode': lambda x: convert_mode(x)
|
|
51
|
+
}
|
|
52
|
+
install_version = 'v0.61.1'
|
|
53
|
+
install_cmd = (
|
|
54
|
+
'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh |'
|
|
55
|
+
f'sudo sh -s -- -b {CONFIG.dirs.bin} [install_version]'
|
|
56
|
+
)
|
|
57
|
+
github_handle = 'aquasecurity/trivy'
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
def on_cmd(self):
|
|
61
|
+
mode = self.cmd_options.get('mode', {}).get('value')
|
|
62
|
+
if mode and mode not in TRIVY_MODES:
|
|
63
|
+
raise Exception(f'Invalid mode: {mode}')
|
|
64
|
+
if not mode and len(self.inputs) > 0:
|
|
65
|
+
git_path = Path(self.inputs[0]) / '.git'
|
|
66
|
+
if git_path.exists():
|
|
67
|
+
mode = 'repo'
|
|
68
|
+
elif Path(self.inputs[0]).exists():
|
|
69
|
+
mode = 'fs'
|
|
70
|
+
else:
|
|
71
|
+
mode = 'image'
|
|
72
|
+
console.print(Info(message=f'Auto mode detected: {mode} for input: {self.inputs[0]}'))
|
|
73
|
+
|
|
74
|
+
output_path = self.get_opt_value(OUTPUT_PATH)
|
|
75
|
+
if not output_path:
|
|
76
|
+
output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
77
|
+
self.output_path = output_path
|
|
78
|
+
self.cmd = self.cmd.replace(f' -mode {mode}', '').replace('trivy', f'trivy {mode}')
|
|
79
|
+
self.cmd += f' -o {shlex.quote(self.output_path)}'
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
def on_cmd_done(self):
|
|
83
|
+
if not os.path.exists(self.output_path):
|
|
84
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
85
|
+
return
|
|
86
|
+
|
|
87
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
88
|
+
with open(self.output_path, 'r') as f:
|
|
89
|
+
results = yaml.safe_load(f.read()).get('Results', [])
|
|
90
|
+
for item in results:
|
|
91
|
+
for vuln in item.get('Vulnerabilities', []):
|
|
92
|
+
vuln_id = vuln['VulnerabilityID']
|
|
93
|
+
extra_data = {}
|
|
94
|
+
if 'PkgName' in vuln:
|
|
95
|
+
extra_data['product'] = vuln['PkgName']
|
|
96
|
+
if 'InstalledVersion' in vuln:
|
|
97
|
+
extra_data['version'] = vuln['InstalledVersion']
|
|
98
|
+
cvss = vuln.get('CVSS', {})
|
|
99
|
+
cvss_score = -1
|
|
100
|
+
for _, cvss_data in cvss.items():
|
|
101
|
+
cvss_score = cvss_data.get('V3Score', -1) or cvss_data.get('V2Score', -1)
|
|
102
|
+
data = {
|
|
103
|
+
'name': vuln_id.replace('-', '_'),
|
|
104
|
+
'id': vuln_id,
|
|
105
|
+
'provider': vuln.get('DataSource', {}).get('ID', ''),
|
|
106
|
+
'description': vuln.get('Description'),
|
|
107
|
+
'matched_at': self.inputs[0],
|
|
108
|
+
'confidence': 'high',
|
|
109
|
+
'severity': vuln['Severity'].lower(),
|
|
110
|
+
'cvss_score': cvss_score,
|
|
111
|
+
'reference': vuln.get('PrimaryURL', ''),
|
|
112
|
+
'references': vuln.get('References', []),
|
|
113
|
+
'extra_data': extra_data
|
|
114
|
+
}
|
|
115
|
+
if vuln_id.startswith('CVE'):
|
|
116
|
+
remote_data = Vuln.lookup_cve(vuln_id)
|
|
117
|
+
if remote_data:
|
|
118
|
+
data.update(remote_data)
|
|
119
|
+
yield Vulnerability(**data)
|
|
120
|
+
for secret in item.get('Secrets', []):
|
|
121
|
+
code_context = '\n'.join([line['Content'] for line in secret.get('Code', {}).get('Lines') or []])
|
|
122
|
+
extra_data = {'code_context': code_context}
|
|
123
|
+
extra_data.update({caml_to_snake(k): v for k, v in secret.items() if k not in ['RuleID', 'Match', 'Code']})
|
|
124
|
+
yield Tag(
|
|
125
|
+
category='secret',
|
|
126
|
+
name=secret['RuleID'].replace('-', '_'),
|
|
127
|
+
value=secret['Match'],
|
|
128
|
+
match=item['Target'],
|
|
129
|
+
extra_data=extra_data
|
|
130
|
+
)
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
import click
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from secator.config import CONFIG
|
|
6
|
+
from secator.decorators import task
|
|
7
|
+
from secator.runners import Command
|
|
8
|
+
from secator.definitions import (PATH, URL, STRING, OPT_SPACE_SEPARATED, GCS_URL, ADDONS_ENABLED, SLUG)
|
|
9
|
+
from secator.utils import caml_to_snake
|
|
10
|
+
from secator.output_types import Tag, Info, Warning, Error
|
|
11
|
+
from secator.rich import console
|
|
12
|
+
from secator.serializers import JSONSerializer
|
|
13
|
+
|
|
14
|
+
TRUFFLEHOG_MODES = [
|
|
15
|
+
'git',
|
|
16
|
+
'github',
|
|
17
|
+
'gitlab',
|
|
18
|
+
's3',
|
|
19
|
+
'filesystem',
|
|
20
|
+
'gcs',
|
|
21
|
+
'docker',
|
|
22
|
+
'postman',
|
|
23
|
+
'jenkins',
|
|
24
|
+
'elasticsearch',
|
|
25
|
+
'huggingface',
|
|
26
|
+
'syslog',
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@task()
|
|
31
|
+
class trufflehog(Command):
|
|
32
|
+
"""Tool for finding secrets in git repositories and filesystems using TruffleHog."""
|
|
33
|
+
cmd = 'trufflehog'
|
|
34
|
+
tags = ['secret', 'scan']
|
|
35
|
+
input_types = [PATH, URL, STRING, GCS_URL, SLUG]
|
|
36
|
+
item_loaders = [JSONSerializer()]
|
|
37
|
+
input_flag = None
|
|
38
|
+
file_flag = OPT_SPACE_SEPARATED
|
|
39
|
+
json_flag = '--json'
|
|
40
|
+
opt_prefix = '--'
|
|
41
|
+
opts = {
|
|
42
|
+
'mode': {
|
|
43
|
+
'type': click.Choice(TRUFFLEHOG_MODES),
|
|
44
|
+
'help': f'Scan mode ({", ".join(TRUFFLEHOG_MODES)})',
|
|
45
|
+
'internal': True
|
|
46
|
+
},
|
|
47
|
+
'status': {'type': str, 'help': 'Results status (verified, unknown, unverified, filtered_unverified)'},
|
|
48
|
+
'concurrency': {'type': int, 'help': 'Number of concurrent workers'},
|
|
49
|
+
'config': {'type': str, 'short': 'config', 'help': 'Config file path'},
|
|
50
|
+
'git_branch': {'type': str, 'help': 'Branch to scan (git mode only)'},
|
|
51
|
+
'git_depth': {'type': int, 'help': 'Commit depth to scan (git mode only)'},
|
|
52
|
+
'git_since_commit': {'type': str, 'help': 'Scan commits starting from this commit'},
|
|
53
|
+
'git_max_depth': {'type': int, 'help': 'Maximum depth of commits to scan'},
|
|
54
|
+
'jenkins_username': {'type': str, 'help': 'Jenkins username to use when --mode jenkins'},
|
|
55
|
+
'jenkins_password': {'type': str, 'help': 'Jenkins password to use when --mode jenkins'},
|
|
56
|
+
'postman_collection_id': {'type': str, 'help': 'Postman collection ID to use when --mode postman'},
|
|
57
|
+
'postman_token': {'type': str, 'help': 'Postman API token to use when --mode postman'},
|
|
58
|
+
'postman_workspace_id': {'type': str, 'help': 'Postman workspace ID to use when --mode postman'},
|
|
59
|
+
'gitlab_token': {'type': str, 'help': 'Gitlab token to use when --mode gitlab'},
|
|
60
|
+
'gitlab_endpoint': {'type': str, 'default': 'https://gitlab.com', 'help': 'Gitlab endpoint to use when --mode gitlab', 'internal': True}, # noqa: E501
|
|
61
|
+
'elasticsearch_nodes': {'type': str, 'help': 'Elasticsearch nodes (space separated) to use when --mode elasticsearch'}, # noqa: E501
|
|
62
|
+
'elasticsearch_service_token': {'type': str, 'help': 'Elasticsearch service token to use when --mode elasticsearch'}, # noqa: E501
|
|
63
|
+
'elasticsearch_cloud_id': {'type': str, 'help': 'Elasticsearch cloud ID to use when --mode elasticsearch'},
|
|
64
|
+
'elasticsearch_api_key': {'type': str, 'help': 'Elasticsearch API key to use when --mode elasticsearch'},
|
|
65
|
+
}
|
|
66
|
+
opt_key_map = {
|
|
67
|
+
'jenkins_username': '--username',
|
|
68
|
+
'jenkins_password': '--password',
|
|
69
|
+
'postman_collection_id': '--collection-id',
|
|
70
|
+
'postman_token': '--token',
|
|
71
|
+
'postman_workspace_id': '--workspace-id',
|
|
72
|
+
'git_branch': '--branch',
|
|
73
|
+
'git_depth': '--depth',
|
|
74
|
+
'git_since_commit': '--since-commit',
|
|
75
|
+
'git_max_depth': '--max-depth',
|
|
76
|
+
'gitlab_token': '--token',
|
|
77
|
+
'gitlab_endpoint': '--endpoint',
|
|
78
|
+
'elasticsearch_nodes': '--nodes',
|
|
79
|
+
'elasticsearch_service_token': '--service-token',
|
|
80
|
+
'elasticsearch_cloud_id': '--cloud-id',
|
|
81
|
+
'elasticsearch_api_key': '--api-key',
|
|
82
|
+
'status': '--results',
|
|
83
|
+
}
|
|
84
|
+
output_types = [Tag, Info]
|
|
85
|
+
ignore_return_code = True
|
|
86
|
+
install_version = 'v3.91.0'
|
|
87
|
+
install_cmd = (
|
|
88
|
+
f'git clone https://github.com/trufflesecurity/trufflehog.git '
|
|
89
|
+
f'{CONFIG.dirs.share}/trufflehog_[install_version] || true && '
|
|
90
|
+
f'cd {CONFIG.dirs.share}/trufflehog_[install_version] && go build -o trufflehog . && '
|
|
91
|
+
f'mv {CONFIG.dirs.share}/trufflehog_[install_version]/trufflehog {CONFIG.dirs.bin}'
|
|
92
|
+
)
|
|
93
|
+
github_handle = 'trufflesecurity/trufflehog'
|
|
94
|
+
|
|
95
|
+
@staticmethod
|
|
96
|
+
def before_init(self):
|
|
97
|
+
blob_folder = f'{self.reports_folder}/.inputs'
|
|
98
|
+
del_indexes = []
|
|
99
|
+
gcs_objects = False
|
|
100
|
+
for i, input in enumerate(self.inputs):
|
|
101
|
+
if input.startswith('gs://'):
|
|
102
|
+
if not ADDONS_ENABLED['gcs']:
|
|
103
|
+
raise Exception('GCS addon is not installed. Please install it using `secator install addons gcs`.')
|
|
104
|
+
gcs_objects = True
|
|
105
|
+
from secator.hooks.gcs import download_blob
|
|
106
|
+
split_input = input.split('/')
|
|
107
|
+
bucket_name, source_blob_name = split_input[2], '/'.join(split_input[3:])
|
|
108
|
+
destination_file_name = f'{blob_folder}/{source_blob_name}'
|
|
109
|
+
download_blob(bucket_name, source_blob_name, destination_file_name)
|
|
110
|
+
del_indexes.append(i)
|
|
111
|
+
for i in reversed(del_indexes):
|
|
112
|
+
del self.inputs[i]
|
|
113
|
+
if gcs_objects:
|
|
114
|
+
self.inputs.append(blob_folder)
|
|
115
|
+
|
|
116
|
+
@staticmethod
|
|
117
|
+
def on_cmd(self):
|
|
118
|
+
mode = self.get_opt_value('mode')
|
|
119
|
+
new_input = None
|
|
120
|
+
submode = None
|
|
121
|
+
input = self.inputs[0] if self.inputs else None
|
|
122
|
+
if mode and mode not in TRUFFLEHOG_MODES:
|
|
123
|
+
raise Exception(f'Invalid mode: {mode}')
|
|
124
|
+
if not mode and input:
|
|
125
|
+
git_path = Path(input).joinpath('.git')
|
|
126
|
+
if git_path.exists():
|
|
127
|
+
mode = 'git'
|
|
128
|
+
submode = 'local'
|
|
129
|
+
elif Path(input).exists():
|
|
130
|
+
mode = 'filesystem'
|
|
131
|
+
elif input.startswith('https://github.com/'):
|
|
132
|
+
mode = 'github'
|
|
133
|
+
len_args = len(input.split('/'))
|
|
134
|
+
if len_args == 4:
|
|
135
|
+
submode = 'org'
|
|
136
|
+
new_input = input.split('/')[-1]
|
|
137
|
+
elif len_args == 5:
|
|
138
|
+
submode = 'repo'
|
|
139
|
+
new_input = '/'.join(input.split('/')[-2:])
|
|
140
|
+
elif input.startswith('https://gitlab.com/'):
|
|
141
|
+
mode = 'gitlab'
|
|
142
|
+
|
|
143
|
+
if mode:
|
|
144
|
+
console.print(Info(message=f'Auto mode detected: {mode} for input: {input}'))
|
|
145
|
+
else:
|
|
146
|
+
error = (f'Could not determine mode for input "{input}". Please specify the mode manually using the --mode option') # noqa: E501
|
|
147
|
+
raise Exception(error)
|
|
148
|
+
|
|
149
|
+
# Add correct option
|
|
150
|
+
mode_to_option = {
|
|
151
|
+
'github_org': '--org',
|
|
152
|
+
'github_repo': '--repo',
|
|
153
|
+
'git': None,
|
|
154
|
+
'gitlab': '--repo',
|
|
155
|
+
's3': '--bucket',
|
|
156
|
+
'gcs': '--cloud-environment --project-id',
|
|
157
|
+
'docker': '--image',
|
|
158
|
+
'jenkins': '--url',
|
|
159
|
+
None: None,
|
|
160
|
+
}
|
|
161
|
+
submode_to_option = {
|
|
162
|
+
'local': 'file://',
|
|
163
|
+
'org': '--org ',
|
|
164
|
+
'repo': '--repo ',
|
|
165
|
+
None: None,
|
|
166
|
+
}
|
|
167
|
+
if new_input:
|
|
168
|
+
console.print(Info(message=f'Replacing input {input} with {new_input}'))
|
|
169
|
+
self.cmd = self.cmd.replace(input, f'{new_input}')
|
|
170
|
+
input = new_input
|
|
171
|
+
submode_option = submode_to_option.get(submode)
|
|
172
|
+
if submode_option:
|
|
173
|
+
self.cmd = self.cmd.replace(input, f'{submode_option}{input}')
|
|
174
|
+
option = mode_to_option.get(mode)
|
|
175
|
+
if option:
|
|
176
|
+
self.cmd = self.cmd.replace(input, f'{option} {input}')
|
|
177
|
+
if f'trufflehog {mode}' not in self.cmd:
|
|
178
|
+
self.cmd = self.cmd.replace('trufflehog', f'trufflehog {mode}', 1)
|
|
179
|
+
|
|
180
|
+
@staticmethod
|
|
181
|
+
def on_json_loaded(self, item):
|
|
182
|
+
level = item.get('level')
|
|
183
|
+
if level:
|
|
184
|
+
msg = item.get('msg', '').capitalize()
|
|
185
|
+
if level.startswith('info'):
|
|
186
|
+
yield Info(message=msg)
|
|
187
|
+
elif msg == 'Error running scan':
|
|
188
|
+
error = item.get('error')
|
|
189
|
+
msg += ' - ' + error if error else ''
|
|
190
|
+
yield Error(message=msg)
|
|
191
|
+
return
|
|
192
|
+
|
|
193
|
+
if 'SourceMetadata' not in item:
|
|
194
|
+
return item
|
|
195
|
+
|
|
196
|
+
rule_id = caml_to_snake(item.get('DetectorName', 'Unknown'))
|
|
197
|
+
source_metadata = item.get('SourceMetadata', {}).get('Data', {})
|
|
198
|
+
raw = item.get('RawV2') or item.get('Raw')
|
|
199
|
+
detector_data = {caml_to_snake(k): v for k, v in item.items() if k not in ['SourceMetadata', 'Raw', 'RawV2']}
|
|
200
|
+
data = {caml_to_snake(k): v for k, v in source_metadata[list(source_metadata.keys())[0]].items()}
|
|
201
|
+
if 'timestamp' in data:
|
|
202
|
+
del data['timestamp']
|
|
203
|
+
subtype = list(source_metadata.keys())[0].lower()
|
|
204
|
+
extra_data = {
|
|
205
|
+
'subtype': subtype,
|
|
206
|
+
'detector_data': {caml_to_snake(k): v for k, v in detector_data.items()}
|
|
207
|
+
}
|
|
208
|
+
extra_data.update({caml_to_snake(k): v for k, v in data.items()})
|
|
209
|
+
match = ''
|
|
210
|
+
repo_path = data.get('repository', '')
|
|
211
|
+
if 'file://' in repo_path:
|
|
212
|
+
repo_path = repo_path.replace('file://', '')
|
|
213
|
+
file = data.get('file')
|
|
214
|
+
line_no = data.get('line')
|
|
215
|
+
link = data.get('link')
|
|
216
|
+
if file:
|
|
217
|
+
match += file
|
|
218
|
+
if line_no:
|
|
219
|
+
match += f":{line_no}"
|
|
220
|
+
if link:
|
|
221
|
+
match = link
|
|
222
|
+
if repo_path and subtype != 'github':
|
|
223
|
+
match = repo_path + '/' + match
|
|
224
|
+
|
|
225
|
+
if not match:
|
|
226
|
+
console.print(Warning(message=f'Could not determine match for subtype: {subtype}'))
|
|
227
|
+
match = self.inputs[0]
|
|
228
|
+
|
|
229
|
+
item_extra_data = item.get('ExtraData') or {}
|
|
230
|
+
rtype = item_extra_data.get('resource_type')
|
|
231
|
+
name = rule_id.lower()
|
|
232
|
+
if rtype:
|
|
233
|
+
name = f"{name}_{rtype.lower().replace(' ', '_')}"
|
|
234
|
+
yield Tag(
|
|
235
|
+
category='secret',
|
|
236
|
+
name=name,
|
|
237
|
+
value=raw,
|
|
238
|
+
match=match,
|
|
239
|
+
extra_data=extra_data
|
|
240
|
+
)
|