bbot 2.6.0.6840rc0__py3-none-any.whl → 2.7.2.7424rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bbot/__init__.py +1 -1
- bbot/cli.py +22 -8
- bbot/core/engine.py +1 -1
- bbot/core/event/__init__.py +2 -2
- bbot/core/event/base.py +138 -110
- bbot/core/flags.py +1 -0
- bbot/core/helpers/bloom.py +6 -7
- bbot/core/helpers/depsinstaller/installer.py +21 -2
- bbot/core/helpers/dns/dns.py +0 -1
- bbot/core/helpers/dns/engine.py +0 -2
- bbot/core/helpers/files.py +2 -2
- bbot/core/helpers/git.py +17 -0
- bbot/core/helpers/helper.py +6 -5
- bbot/core/helpers/misc.py +8 -23
- bbot/core/helpers/ntlm.py +0 -2
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +25 -8
- bbot/core/helpers/web/web.py +2 -1
- bbot/core/modules.py +22 -60
- bbot/defaults.yml +4 -2
- bbot/modules/apkpure.py +1 -1
- bbot/modules/baddns.py +1 -1
- bbot/modules/baddns_direct.py +1 -1
- bbot/modules/baddns_zone.py +1 -1
- bbot/modules/badsecrets.py +1 -1
- bbot/modules/base.py +123 -38
- bbot/modules/bucket_amazon.py +1 -1
- bbot/modules/bucket_digitalocean.py +1 -1
- bbot/modules/bucket_firebase.py +1 -1
- bbot/modules/bucket_google.py +1 -1
- bbot/modules/{bucket_azure.py → bucket_microsoft.py} +2 -2
- bbot/modules/builtwith.py +4 -2
- bbot/modules/dnsbimi.py +1 -4
- bbot/modules/dnsbrute.py +6 -1
- bbot/modules/dnsdumpster.py +35 -52
- bbot/modules/dnstlsrpt.py +0 -6
- bbot/modules/docker_pull.py +1 -1
- bbot/modules/emailformat.py +17 -1
- bbot/modules/ffuf.py +4 -1
- bbot/modules/ffuf_shortnames.py +6 -3
- bbot/modules/filedownload.py +7 -4
- bbot/modules/git_clone.py +47 -22
- bbot/modules/gitdumper.py +4 -14
- bbot/modules/github_workflows.py +6 -5
- bbot/modules/gitlab_com.py +31 -0
- bbot/modules/gitlab_onprem.py +84 -0
- bbot/modules/gowitness.py +0 -6
- bbot/modules/graphql_introspection.py +5 -2
- bbot/modules/httpx.py +2 -0
- bbot/modules/iis_shortnames.py +0 -7
- bbot/modules/internal/cloudcheck.py +65 -72
- bbot/modules/internal/unarchive.py +9 -3
- bbot/modules/lightfuzz/lightfuzz.py +6 -2
- bbot/modules/lightfuzz/submodules/esi.py +42 -0
- bbot/modules/medusa.py +4 -7
- bbot/modules/nuclei.py +1 -1
- bbot/modules/otx.py +9 -2
- bbot/modules/output/base.py +3 -11
- bbot/modules/paramminer_headers.py +10 -7
- bbot/modules/portfilter.py +2 -0
- bbot/modules/postman_download.py +1 -1
- bbot/modules/retirejs.py +232 -0
- bbot/modules/securitytxt.py +0 -3
- bbot/modules/sslcert.py +2 -2
- bbot/modules/subdomaincenter.py +1 -16
- bbot/modules/telerik.py +7 -2
- bbot/modules/templates/bucket.py +24 -4
- bbot/modules/templates/gitlab.py +98 -0
- bbot/modules/trufflehog.py +6 -3
- bbot/modules/wafw00f.py +2 -2
- bbot/presets/web/lightfuzz-heavy.yml +1 -1
- bbot/presets/web/lightfuzz-medium.yml +1 -1
- bbot/presets/web/lightfuzz-superheavy.yml +1 -1
- bbot/scanner/manager.py +44 -37
- bbot/scanner/scanner.py +12 -4
- bbot/scripts/benchmark_report.py +433 -0
- bbot/test/benchmarks/__init__.py +2 -0
- bbot/test/benchmarks/test_bloom_filter_benchmarks.py +105 -0
- bbot/test/benchmarks/test_closest_match_benchmarks.py +76 -0
- bbot/test/benchmarks/test_event_validation_benchmarks.py +438 -0
- bbot/test/benchmarks/test_excavate_benchmarks.py +291 -0
- bbot/test/benchmarks/test_ipaddress_benchmarks.py +143 -0
- bbot/test/benchmarks/test_weighted_shuffle_benchmarks.py +70 -0
- bbot/test/test_step_1/test_bbot_fastapi.py +2 -2
- bbot/test/test_step_1/test_events.py +22 -21
- bbot/test/test_step_1/test_helpers.py +1 -0
- bbot/test/test_step_1/test_manager_scope_accuracy.py +45 -0
- bbot/test/test_step_1/test_modules_basic.py +40 -15
- bbot/test/test_step_1/test_python_api.py +2 -2
- bbot/test/test_step_1/test_regexes.py +21 -4
- bbot/test/test_step_1/test_scan.py +7 -8
- bbot/test/test_step_1/test_web.py +46 -0
- bbot/test/test_step_2/module_tests/base.py +6 -1
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +52 -18
- bbot/test/test_step_2/module_tests/test_module_bucket_google.py +1 -1
- bbot/test/test_step_2/module_tests/{test_module_bucket_azure.py → test_module_bucket_microsoft.py} +7 -5
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +19 -31
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +3 -5
- bbot/test/test_step_2/module_tests/test_module_emailformat.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_emails.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_excavate.py +57 -4
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +10 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab_com.py +66 -0
- bbot/test/test_step_2/module_tests/{test_module_gitlab.py → test_module_gitlab_onprem.py} +4 -69
- bbot/test/test_step_2/module_tests/test_module_lightfuzz.py +71 -3
- bbot/test/test_step_2/module_tests/test_module_nuclei.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_otx.py +3 -0
- bbot/test/test_step_2/module_tests/test_module_portfilter.py +2 -0
- bbot/test/test_step_2/module_tests/test_module_retirejs.py +161 -0
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +10 -1
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/METADATA +10 -7
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/RECORD +117 -106
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/WHEEL +1 -1
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info/licenses}/LICENSE +98 -58
- bbot/modules/censys.py +0 -98
- bbot/modules/gitlab.py +0 -141
- bbot/modules/zoomeye.py +0 -77
- bbot/test/test_step_2/module_tests/test_module_censys.py +0 -83
- bbot/test/test_step_2/module_tests/test_module_zoomeye.py +0 -35
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/entry_points.txt +0 -0
bbot/modules/retirejs.py
ADDED
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from enum import IntEnum
|
|
3
|
+
from bbot.modules.base import BaseModule
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class RetireJSSeverity(IntEnum):
|
|
7
|
+
NONE = 0
|
|
8
|
+
LOW = 1
|
|
9
|
+
MEDIUM = 2
|
|
10
|
+
HIGH = 3
|
|
11
|
+
CRITICAL = 4
|
|
12
|
+
|
|
13
|
+
@classmethod
|
|
14
|
+
def from_string(cls, severity_str):
|
|
15
|
+
try:
|
|
16
|
+
return cls[severity_str.upper()]
|
|
17
|
+
except (KeyError, AttributeError):
|
|
18
|
+
return cls.NONE
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class retirejs(BaseModule):
|
|
22
|
+
watched_events = ["URL_UNVERIFIED"]
|
|
23
|
+
produced_events = ["FINDING"]
|
|
24
|
+
flags = ["active", "safe", "web-thorough"]
|
|
25
|
+
meta = {
|
|
26
|
+
"description": "Detect vulnerable/out-of-date JavaScript libraries",
|
|
27
|
+
"created_date": "2025-08-19",
|
|
28
|
+
"author": "@liquidsec",
|
|
29
|
+
}
|
|
30
|
+
options = {
|
|
31
|
+
"version": "5.3.0",
|
|
32
|
+
"node_version": "18.19.1",
|
|
33
|
+
"severity": "medium",
|
|
34
|
+
}
|
|
35
|
+
options_desc = {
|
|
36
|
+
"version": "retire.js version",
|
|
37
|
+
"node_version": "Node.js version to install locally",
|
|
38
|
+
"severity": "Minimum severity level to report (none, low, medium, high, critical)",
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
deps_ansible = [
|
|
42
|
+
# Download Node.js binary (Linux x64)
|
|
43
|
+
{
|
|
44
|
+
"name": "Download Node.js binary (Linux x64)",
|
|
45
|
+
"get_url": {
|
|
46
|
+
"url": "https://nodejs.org/dist/v#{BBOT_MODULES_RETIREJS_NODE_VERSION}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64.tar.xz",
|
|
47
|
+
"dest": "#{BBOT_TEMP}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64.tar.xz",
|
|
48
|
+
"mode": "0644",
|
|
49
|
+
},
|
|
50
|
+
},
|
|
51
|
+
# Extract Node.js binary (x64)
|
|
52
|
+
{
|
|
53
|
+
"name": "Extract Node.js binary (x64)",
|
|
54
|
+
"unarchive": {
|
|
55
|
+
"src": "#{BBOT_TEMP}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64.tar.xz",
|
|
56
|
+
"dest": "#{BBOT_TOOLS}",
|
|
57
|
+
"remote_src": True,
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
# Remove existing node directory if it exists
|
|
61
|
+
{
|
|
62
|
+
"name": "Remove existing node directory",
|
|
63
|
+
"file": {"path": "#{BBOT_TOOLS}/node", "state": "absent"},
|
|
64
|
+
},
|
|
65
|
+
# Rename extracted directory to 'node' (x64)
|
|
66
|
+
{
|
|
67
|
+
"name": "Rename Node.js directory (x64)",
|
|
68
|
+
"command": "mv #{BBOT_TOOLS}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64 #{BBOT_TOOLS}/node",
|
|
69
|
+
},
|
|
70
|
+
# Set permissions on entire Node.js bin directory
|
|
71
|
+
{
|
|
72
|
+
"name": "Set permissions on Node.js bin directory",
|
|
73
|
+
"file": {"path": "#{BBOT_TOOLS}/node/bin", "mode": "0755", "recurse": "yes"},
|
|
74
|
+
},
|
|
75
|
+
# Make Node.js binary executable
|
|
76
|
+
{
|
|
77
|
+
"name": "Make Node.js binary executable",
|
|
78
|
+
"file": {"path": "#{BBOT_TOOLS}/node/bin/node", "mode": "0755"},
|
|
79
|
+
},
|
|
80
|
+
# Remove existing retirejs directory if it exists
|
|
81
|
+
{
|
|
82
|
+
"name": "Remove existing retirejs directory",
|
|
83
|
+
"file": {"path": "#{BBOT_TOOLS}/retirejs", "state": "absent"},
|
|
84
|
+
},
|
|
85
|
+
# Create retire.js local directory
|
|
86
|
+
{
|
|
87
|
+
"name": "Create retire.js directory in BBOT_TOOLS",
|
|
88
|
+
"file": {"path": "#{BBOT_TOOLS}/retirejs", "state": "directory", "mode": "0755"},
|
|
89
|
+
},
|
|
90
|
+
# Install retire.js locally using local Node.js
|
|
91
|
+
{
|
|
92
|
+
"name": "Install retire.js locally",
|
|
93
|
+
"shell": "cd #{BBOT_TOOLS}/retirejs && #{BBOT_TOOLS}/node/bin/node #{BBOT_TOOLS}/node/lib/node_modules/npm/bin/npm-cli.js install --prefix . retire@#{BBOT_MODULES_RETIREJS_VERSION} --no-fund --no-audit --silent --no-optional",
|
|
94
|
+
"args": {"creates": "#{BBOT_TOOLS}/retirejs/node_modules/.bin/retire"},
|
|
95
|
+
"timeout": 600,
|
|
96
|
+
"ignore_errors": False,
|
|
97
|
+
},
|
|
98
|
+
# Make retire script executable
|
|
99
|
+
{
|
|
100
|
+
"name": "Make retire script executable",
|
|
101
|
+
"file": {"path": "#{BBOT_TOOLS}/retirejs/node_modules/.bin/retire", "mode": "0755"},
|
|
102
|
+
},
|
|
103
|
+
# Create retire cache directory
|
|
104
|
+
{
|
|
105
|
+
"name": "Create retire cache directory",
|
|
106
|
+
"file": {"path": "#{BBOT_CACHE}/retire_cache", "state": "directory", "mode": "0755"},
|
|
107
|
+
},
|
|
108
|
+
]
|
|
109
|
+
|
|
110
|
+
accept_url_special = True
|
|
111
|
+
scope_distance_modifier = 1
|
|
112
|
+
_module_threads = 4
|
|
113
|
+
|
|
114
|
+
async def setup(self):
|
|
115
|
+
excavate_enabled = self.scan.config.get("excavate")
|
|
116
|
+
if not excavate_enabled:
|
|
117
|
+
return None, "retirejs will not function without excavate enabled"
|
|
118
|
+
|
|
119
|
+
# Validate severity level
|
|
120
|
+
valid_severities = ["none", "low", "medium", "high", "critical"]
|
|
121
|
+
configured_severity = self.config.get("severity", "medium").lower()
|
|
122
|
+
if configured_severity not in valid_severities:
|
|
123
|
+
return (
|
|
124
|
+
False,
|
|
125
|
+
f"Invalid severity level '{configured_severity}'. Valid options are: {', '.join(valid_severities)}",
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
self.repofile = await self.helpers.download(
|
|
129
|
+
"https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository-v4.json", cache_hrs=24
|
|
130
|
+
)
|
|
131
|
+
if not self.repofile:
|
|
132
|
+
return False, "failed to download retire.js repository file"
|
|
133
|
+
return True
|
|
134
|
+
|
|
135
|
+
async def handle_event(self, event):
|
|
136
|
+
js_file = await self.helpers.request(event.data)
|
|
137
|
+
if js_file:
|
|
138
|
+
js_file_body = js_file.text
|
|
139
|
+
if js_file_body:
|
|
140
|
+
js_file_body_saved = self.helpers.tempfile(js_file_body, pipe=False, extension="js")
|
|
141
|
+
results = await self.execute_retirejs(js_file_body_saved)
|
|
142
|
+
if not results:
|
|
143
|
+
self.warning("no output from retire.js")
|
|
144
|
+
return
|
|
145
|
+
results_json = json.loads(results)
|
|
146
|
+
if results_json.get("data"):
|
|
147
|
+
for file_result in results_json["data"]:
|
|
148
|
+
for component_result in file_result.get("results", []):
|
|
149
|
+
component = component_result.get("component", "unknown")
|
|
150
|
+
version = component_result.get("version", "unknown")
|
|
151
|
+
vulnerabilities = component_result.get("vulnerabilities", [])
|
|
152
|
+
for vuln in vulnerabilities:
|
|
153
|
+
severity = vuln.get("severity", "unknown")
|
|
154
|
+
|
|
155
|
+
# Filter by minimum severity level
|
|
156
|
+
min_severity = RetireJSSeverity.from_string(self.config.get("severity", "medium"))
|
|
157
|
+
vuln_severity = RetireJSSeverity.from_string(severity)
|
|
158
|
+
if vuln_severity < min_severity:
|
|
159
|
+
self.debug(
|
|
160
|
+
f"Skipping vulnerability with severity '{severity}' (below minimum '{min_severity.name.lower()}')"
|
|
161
|
+
)
|
|
162
|
+
continue
|
|
163
|
+
|
|
164
|
+
identifiers = vuln.get("identifiers", {})
|
|
165
|
+
summary = identifiers.get("summary", "Unknown vulnerability")
|
|
166
|
+
cves = identifiers.get("CVE", [])
|
|
167
|
+
description_parts = [
|
|
168
|
+
f"Vulnerable JavaScript library detected: {component} v{version}",
|
|
169
|
+
f"Severity: {severity.upper()}",
|
|
170
|
+
f"Summary: {summary}",
|
|
171
|
+
f"JavaScript URL: {event.data}",
|
|
172
|
+
]
|
|
173
|
+
if cves:
|
|
174
|
+
description_parts.append(f"CVE(s): {', '.join(cves)}")
|
|
175
|
+
|
|
176
|
+
below_version = vuln.get("below", "")
|
|
177
|
+
at_or_above = vuln.get("atOrAbove", "")
|
|
178
|
+
if at_or_above and below_version:
|
|
179
|
+
description_parts.append(f"Affected versions: [{at_or_above} to {below_version})")
|
|
180
|
+
elif below_version:
|
|
181
|
+
description_parts.append(f"Affected versions: [< {below_version}]")
|
|
182
|
+
elif at_or_above:
|
|
183
|
+
description_parts.append(f"Affected versions: [>= {at_or_above}]")
|
|
184
|
+
description = " ".join(description_parts)
|
|
185
|
+
data = {
|
|
186
|
+
"description": description,
|
|
187
|
+
"severity": severity,
|
|
188
|
+
"component": component,
|
|
189
|
+
"url": event.parent.data["url"],
|
|
190
|
+
}
|
|
191
|
+
await self.emit_event(
|
|
192
|
+
data,
|
|
193
|
+
"FINDING",
|
|
194
|
+
parent=event,
|
|
195
|
+
context=f"{{module}} identified vulnerable JavaScript library {component} v{version} ({severity} severity)",
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
async def filter_event(self, event):
|
|
199
|
+
url_extension = getattr(event, "url_extension", "")
|
|
200
|
+
if url_extension != "js":
|
|
201
|
+
return False, f"it is a {url_extension} URL but retirejs only accepts js URLs"
|
|
202
|
+
return True
|
|
203
|
+
|
|
204
|
+
async def execute_retirejs(self, js_file):
|
|
205
|
+
cache_dir = self.helpers.cache_dir / "retire_cache"
|
|
206
|
+
retire_dir = self.scan.helpers.tools_dir / "retirejs"
|
|
207
|
+
local_node_dir = self.scan.helpers.tools_dir / "node"
|
|
208
|
+
|
|
209
|
+
# Use the retire binary directly with our local Node.js
|
|
210
|
+
retire_binary_path = retire_dir / "node_modules" / ".bin" / "retire"
|
|
211
|
+
command = [
|
|
212
|
+
str(local_node_dir / "bin" / "node"),
|
|
213
|
+
str(retire_binary_path),
|
|
214
|
+
"--outputformat",
|
|
215
|
+
"json",
|
|
216
|
+
"--cachedir",
|
|
217
|
+
str(cache_dir),
|
|
218
|
+
"--path",
|
|
219
|
+
js_file,
|
|
220
|
+
"--jsrepo",
|
|
221
|
+
str(self.repofile),
|
|
222
|
+
]
|
|
223
|
+
|
|
224
|
+
proxy = self.scan.web_config.get("http_proxy")
|
|
225
|
+
if proxy:
|
|
226
|
+
command.extend(["--proxy", proxy])
|
|
227
|
+
|
|
228
|
+
self.verbose(f"Running retire.js on {js_file}")
|
|
229
|
+
self.verbose(f"retire.js command: {command}")
|
|
230
|
+
|
|
231
|
+
result = await self.run_process(command)
|
|
232
|
+
return result.stdout
|
bbot/modules/securitytxt.py
CHANGED
bbot/modules/sslcert.py
CHANGED
|
@@ -20,7 +20,7 @@ class sslcert(BaseModule):
|
|
|
20
20
|
options = {"timeout": 5.0, "skip_non_ssl": True}
|
|
21
21
|
options_desc = {"timeout": "Socket connect timeout in seconds", "skip_non_ssl": "Don't try common non-SSL ports"}
|
|
22
22
|
deps_apt = ["openssl"]
|
|
23
|
-
deps_pip = ["pyOpenSSL~=
|
|
23
|
+
deps_pip = ["pyOpenSSL~=25.3.0"]
|
|
24
24
|
_module_threads = 25
|
|
25
25
|
scope_distance_modifier = 1
|
|
26
26
|
_priority = 2
|
|
@@ -77,7 +77,7 @@ class sslcert(BaseModule):
|
|
|
77
77
|
dns_names = dns_names[:1] + [n for n in dns_names[1:] if self.scan.in_scope(n)]
|
|
78
78
|
for event_type, results in (("DNS_NAME", set(dns_names)), ("EMAIL_ADDRESS", emails)):
|
|
79
79
|
for event_data in results:
|
|
80
|
-
if event_data is not None and event_data != event:
|
|
80
|
+
if event_data is not None and event_data != event.data:
|
|
81
81
|
self.debug(f"Discovered new {event_type} via SSL certificate parsing: [{event_data}]")
|
|
82
82
|
try:
|
|
83
83
|
ssl_event = self.make_event(event_data, event_type, parent=event, raise_error=True)
|
bbot/modules/subdomaincenter.py
CHANGED
|
@@ -12,25 +12,10 @@ class subdomaincenter(subdomain_enum):
|
|
|
12
12
|
}
|
|
13
13
|
|
|
14
14
|
base_url = "https://api.subdomain.center"
|
|
15
|
-
retries = 2
|
|
16
|
-
|
|
17
|
-
async def sleep(self, time_to_wait):
|
|
18
|
-
self.info(f"Sleeping for {time_to_wait} seconds to avoid rate limit")
|
|
19
|
-
await self.helpers.sleep(time_to_wait)
|
|
20
15
|
|
|
21
16
|
async def request_url(self, query):
|
|
22
17
|
url = f"{self.base_url}/?domain={self.helpers.quote(query)}"
|
|
23
|
-
response =
|
|
24
|
-
status_code = 0
|
|
25
|
-
for i, _ in enumerate(range(self.retries + 1)):
|
|
26
|
-
if i > 0:
|
|
27
|
-
self.verbose(f"Retry #{i} for {query} after response code {status_code}")
|
|
28
|
-
response = await self.helpers.request(url, timeout=self.http_timeout + 30)
|
|
29
|
-
status_code = getattr(response, "status_code", 0)
|
|
30
|
-
if status_code == 429:
|
|
31
|
-
await self.sleep(20)
|
|
32
|
-
else:
|
|
33
|
-
break
|
|
18
|
+
response = await self.api_request(url)
|
|
34
19
|
return response
|
|
35
20
|
|
|
36
21
|
async def parse_results(self, r, query):
|
bbot/modules/telerik.py
CHANGED
|
@@ -161,7 +161,7 @@ class telerik(BaseModule):
|
|
|
161
161
|
|
|
162
162
|
in_scope_only = True
|
|
163
163
|
|
|
164
|
-
deps_pip = ["pycryptodome~=3.
|
|
164
|
+
deps_pip = ["pycryptodome~=3.23.0"]
|
|
165
165
|
|
|
166
166
|
deps_ansible = [
|
|
167
167
|
{"name": "Create telerik dir", "file": {"state": "directory", "path": "#{BBOT_TOOLS}/telerik/"}},
|
|
@@ -204,7 +204,7 @@ class telerik(BaseModule):
|
|
|
204
204
|
webresource = "Telerik.Web.UI.WebResource.axd?type=rau"
|
|
205
205
|
result, _ = await self.test_detector(base_url, webresource)
|
|
206
206
|
if result:
|
|
207
|
-
if "RadAsyncUpload handler is registered
|
|
207
|
+
if "RadAsyncUpload handler is registered succesfully" in result.text:
|
|
208
208
|
self.verbose("Detected Telerik instance (Telerik.Web.UI.WebResource.axd?type=rau)")
|
|
209
209
|
|
|
210
210
|
probe_data = {
|
|
@@ -263,6 +263,11 @@ class telerik(BaseModule):
|
|
|
263
263
|
str(root_tool_path / "testfile.txt"),
|
|
264
264
|
result.url,
|
|
265
265
|
]
|
|
266
|
+
|
|
267
|
+
# Add proxy if set in the scan config
|
|
268
|
+
if self.scan.http_proxy:
|
|
269
|
+
command.append(self.scan.http_proxy)
|
|
270
|
+
|
|
266
271
|
output = await self.run_process(command)
|
|
267
272
|
description = f"[CVE-2017-11317] [{str(version)}] {webresource}"
|
|
268
273
|
if "fileInfo" in output.stdout:
|
bbot/modules/templates/bucket.py
CHANGED
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import regex as re
|
|
3
|
+
from functools import cached_property
|
|
1
4
|
from bbot.modules.base import BaseModule
|
|
2
5
|
|
|
3
6
|
|
|
@@ -11,7 +14,7 @@ class bucket_template(BaseModule):
|
|
|
11
14
|
}
|
|
12
15
|
scope_distance_modifier = 3
|
|
13
16
|
|
|
14
|
-
|
|
17
|
+
cloudcheck_provider_name = "Amazon|Google|DigitalOcean|etc"
|
|
15
18
|
delimiters = ("", ".", "-")
|
|
16
19
|
base_domains = ["s3.amazonaws.com|digitaloceanspaces.com|etc"]
|
|
17
20
|
regions = [None]
|
|
@@ -19,8 +22,14 @@ class bucket_template(BaseModule):
|
|
|
19
22
|
|
|
20
23
|
async def setup(self):
|
|
21
24
|
self.buckets_tried = set()
|
|
22
|
-
self.cloud_helper = self.helpers.cloud.providers[self.cloud_helper_name]
|
|
23
25
|
self.permutations = self.config.get("permutations", False)
|
|
26
|
+
cloudcheck_import_path = "cloudcheck.providers"
|
|
27
|
+
try:
|
|
28
|
+
self.cloudcheck_provider = getattr(
|
|
29
|
+
importlib.import_module(cloudcheck_import_path), self.cloudcheck_provider_name
|
|
30
|
+
)
|
|
31
|
+
except (ImportError, AttributeError) as e:
|
|
32
|
+
return False, f"cloud helper at {cloudcheck_import_path} not found: {e}"
|
|
24
33
|
return True
|
|
25
34
|
|
|
26
35
|
async def filter_event(self, event):
|
|
@@ -33,7 +42,7 @@ class bucket_template(BaseModule):
|
|
|
33
42
|
return True
|
|
34
43
|
|
|
35
44
|
def filter_bucket(self, event):
|
|
36
|
-
if f"
|
|
45
|
+
if not any(t.endswith(f"-{self.cloudcheck_provider_name.lower()}") for t in event.tags):
|
|
37
46
|
return False, "bucket belongs to a different cloud provider"
|
|
38
47
|
return True, ""
|
|
39
48
|
|
|
@@ -156,7 +165,7 @@ class bucket_template(BaseModule):
|
|
|
156
165
|
return (msg, tags)
|
|
157
166
|
|
|
158
167
|
def valid_bucket_name(self, bucket_name):
|
|
159
|
-
valid = self.
|
|
168
|
+
valid = self.is_valid_bucket_name(bucket_name)
|
|
160
169
|
if valid and not self.helpers.is_ip(bucket_name):
|
|
161
170
|
bucket_hash = hash(bucket_name)
|
|
162
171
|
if bucket_hash not in self.buckets_tried:
|
|
@@ -164,6 +173,17 @@ class bucket_template(BaseModule):
|
|
|
164
173
|
return True
|
|
165
174
|
return False
|
|
166
175
|
|
|
176
|
+
def is_valid_bucket_name(self, bucket_name):
|
|
177
|
+
return any(regex.match(bucket_name) for regex in self.bucket_name_regexes)
|
|
178
|
+
|
|
179
|
+
@cached_property
|
|
180
|
+
def bucket_name_regexes(self):
|
|
181
|
+
return [re.compile(regex) for regex in self.cloudcheck_provider.regexes["STORAGE_BUCKET_NAME"]]
|
|
182
|
+
|
|
183
|
+
# @cached_property
|
|
184
|
+
# def bucket_hostname_regexes(self):
|
|
185
|
+
# return [re.compile(regex) for regex in self.cloudcheck_provider.regexes["STORAGE_BUCKET_HOSTNAME"]]
|
|
186
|
+
|
|
167
187
|
def build_url(self, bucket_name, base_domain, region):
|
|
168
188
|
return f"https://{bucket_name}.{base_domain}/"
|
|
169
189
|
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from bbot.modules.base import BaseModule
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class GitLabBaseModule(BaseModule):
|
|
5
|
+
"""Common functionality for interacting with GitLab instances.
|
|
6
|
+
|
|
7
|
+
This template is intended to be inherited by two concrete modules:
|
|
8
|
+
1. ``gitlab_com`` – Handles public SaaS instances (gitlab.com / gitlab.org).
|
|
9
|
+
2. ``gitlab_onprem`` – Handles self-hosted, on-premises GitLab servers.
|
|
10
|
+
|
|
11
|
+
Both child modules share identical behaviour when talking to the GitLab
|
|
12
|
+
REST API; they only differ in which events they are willing to accept.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
# domains owned by GitLab
|
|
16
|
+
saas_domains = ["gitlab.com", "gitlab.org"]
|
|
17
|
+
|
|
18
|
+
async def setup(self):
|
|
19
|
+
if self.options.get("api_key") is not None:
|
|
20
|
+
await self.require_api_key()
|
|
21
|
+
return True
|
|
22
|
+
|
|
23
|
+
async def handle_social(self, event):
|
|
24
|
+
"""Enumerate projects belonging to a user or group profile."""
|
|
25
|
+
username = event.data.get("profile_name", "")
|
|
26
|
+
if not username:
|
|
27
|
+
return
|
|
28
|
+
base_url = self.get_base_url(event)
|
|
29
|
+
urls = [
|
|
30
|
+
# User-owned projects
|
|
31
|
+
self.helpers.urljoin(base_url, f"api/v4/users/{username}/projects?simple=true"),
|
|
32
|
+
# Group-owned projects
|
|
33
|
+
self.helpers.urljoin(base_url, f"api/v4/groups/{username}/projects?simple=true"),
|
|
34
|
+
]
|
|
35
|
+
for url in urls:
|
|
36
|
+
await self.handle_projects_url(url, event)
|
|
37
|
+
|
|
38
|
+
async def handle_projects_url(self, projects_url, event):
|
|
39
|
+
for project in await self.gitlab_json_request(projects_url):
|
|
40
|
+
project_url = project.get("web_url", "")
|
|
41
|
+
if project_url:
|
|
42
|
+
code_event = self.make_event({"url": project_url}, "CODE_REPOSITORY", tags="git", parent=event)
|
|
43
|
+
await self.emit_event(
|
|
44
|
+
code_event,
|
|
45
|
+
context=f"{{module}} enumerated projects and found {{event.type}} at {project_url}",
|
|
46
|
+
)
|
|
47
|
+
namespace = project.get("namespace", {})
|
|
48
|
+
if namespace:
|
|
49
|
+
await self.handle_namespace(namespace, event)
|
|
50
|
+
|
|
51
|
+
async def handle_groups_url(self, groups_url, event):
|
|
52
|
+
for group in await self.gitlab_json_request(groups_url):
|
|
53
|
+
await self.handle_namespace(group, event)
|
|
54
|
+
|
|
55
|
+
async def gitlab_json_request(self, url):
|
|
56
|
+
"""Helper that performs an HTTP request and safely returns JSON list."""
|
|
57
|
+
response = await self.api_request(url)
|
|
58
|
+
if response is not None:
|
|
59
|
+
try:
|
|
60
|
+
json_data = response.json()
|
|
61
|
+
except Exception:
|
|
62
|
+
return []
|
|
63
|
+
if json_data and isinstance(json_data, list):
|
|
64
|
+
return json_data
|
|
65
|
+
return []
|
|
66
|
+
|
|
67
|
+
async def handle_namespace(self, namespace, event):
|
|
68
|
+
namespace_name = namespace.get("path", "")
|
|
69
|
+
namespace_url = namespace.get("web_url", "")
|
|
70
|
+
namespace_path = namespace.get("full_path", "")
|
|
71
|
+
|
|
72
|
+
if not (namespace_name and namespace_url and namespace_path):
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
namespace_url = self.helpers.parse_url(namespace_url)._replace(path=f"/{namespace_path}").geturl()
|
|
76
|
+
|
|
77
|
+
social_event = self.make_event(
|
|
78
|
+
{
|
|
79
|
+
"platform": "gitlab",
|
|
80
|
+
"profile_name": namespace_path,
|
|
81
|
+
"url": namespace_url,
|
|
82
|
+
},
|
|
83
|
+
"SOCIAL",
|
|
84
|
+
parent=event,
|
|
85
|
+
)
|
|
86
|
+
await self.emit_event(
|
|
87
|
+
social_event,
|
|
88
|
+
context=f'{{module}} found GitLab namespace ({{event.type}}) "{namespace_name}" at {namespace_url}',
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# ------------------------------------------------------------------
|
|
92
|
+
# Utility helpers
|
|
93
|
+
# ------------------------------------------------------------------
|
|
94
|
+
def get_base_url(self, event):
|
|
95
|
+
base_url = event.data.get("url", "")
|
|
96
|
+
if not base_url:
|
|
97
|
+
base_url = f"https://{event.host}"
|
|
98
|
+
return self.helpers.urlparse(base_url)._replace(path="/").geturl()
|
bbot/modules/trufflehog.py
CHANGED
|
@@ -14,7 +14,7 @@ class trufflehog(BaseModule):
|
|
|
14
14
|
}
|
|
15
15
|
|
|
16
16
|
options = {
|
|
17
|
-
"version": "3.90.
|
|
17
|
+
"version": "3.90.8",
|
|
18
18
|
"config": "",
|
|
19
19
|
"only_verified": True,
|
|
20
20
|
"concurrency": 8,
|
|
@@ -41,11 +41,14 @@ class trufflehog(BaseModule):
|
|
|
41
41
|
|
|
42
42
|
scope_distance_modifier = 2
|
|
43
43
|
|
|
44
|
-
async def
|
|
45
|
-
self.verified = self.config.get("only_verified", True)
|
|
44
|
+
async def setup_deps(self):
|
|
46
45
|
self.config_file = self.config.get("config", "")
|
|
47
46
|
if self.config_file:
|
|
48
47
|
self.config_file = await self.helpers.wordlist(self.config_file)
|
|
48
|
+
return True
|
|
49
|
+
|
|
50
|
+
async def setup(self):
|
|
51
|
+
self.verified = self.config.get("only_verified", True)
|
|
49
52
|
self.concurrency = int(self.config.get("concurrency", 8))
|
|
50
53
|
|
|
51
54
|
self.deleted_forks = self.config.get("deleted_forks", False)
|
bbot/modules/wafw00f.py
CHANGED
|
@@ -22,7 +22,7 @@ class wafw00f(BaseModule):
|
|
|
22
22
|
"author": "@liquidsec",
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
-
deps_pip = ["wafw00f~=2.
|
|
25
|
+
deps_pip = ["wafw00f~=2.3.1"]
|
|
26
26
|
|
|
27
27
|
options = {"generic_detect": True}
|
|
28
28
|
options_desc = {"generic_detect": "When no specific WAF detections are made, try to perform a generic detect"}
|
|
@@ -42,7 +42,7 @@ class wafw00f(BaseModule):
|
|
|
42
42
|
async def handle_event(self, event):
|
|
43
43
|
url = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/"
|
|
44
44
|
WW = await self.helpers.run_in_executor(wafw00f_main.WAFW00F, url, followredirect=False)
|
|
45
|
-
waf_detections = await self.helpers.run_in_executor(WW.identwaf)
|
|
45
|
+
waf_detections, url = await self.helpers.run_in_executor(WW.identwaf)
|
|
46
46
|
if waf_detections:
|
|
47
47
|
for waf in waf_detections:
|
|
48
48
|
await self.emit_event(
|
|
@@ -8,6 +8,6 @@ config:
|
|
|
8
8
|
modules:
|
|
9
9
|
lightfuzz:
|
|
10
10
|
force_common_headers: True # Fuzz common headers like X-Forwarded-For even if they're not observed on the target
|
|
11
|
-
enabled_submodules: [cmdi,crypto,path,serial,sqli,ssti,xss]
|
|
11
|
+
enabled_submodules: [cmdi,crypto,path,serial,sqli,ssti,xss,esi]
|
|
12
12
|
excavate:
|
|
13
13
|
speculate_params: True # speculate potential parameters extracted from JSON/XML web responses
|
bbot/scanner/manager.py
CHANGED
|
@@ -94,10 +94,6 @@ class ScanIngress(BaseInterceptModule):
|
|
|
94
94
|
# special handling of URL extensions
|
|
95
95
|
url_extension = getattr(event, "url_extension", None)
|
|
96
96
|
if url_extension is not None:
|
|
97
|
-
if url_extension in self.scan.url_extension_httpx_only:
|
|
98
|
-
event.add_tag("httpx-only")
|
|
99
|
-
event._omit = True
|
|
100
|
-
|
|
101
97
|
# blacklist by extension
|
|
102
98
|
if url_extension in self.scan.url_extension_blacklist:
|
|
103
99
|
self.debug(
|
|
@@ -192,58 +188,69 @@ class ScanEgress(BaseInterceptModule):
|
|
|
192
188
|
abort_if = kwargs.pop("abort_if", None)
|
|
193
189
|
on_success_callback = kwargs.pop("on_success_callback", None)
|
|
194
190
|
|
|
195
|
-
#
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
191
|
+
# mark omitted event types
|
|
192
|
+
# we could do this all in the output module's filter_event(), but we mark it here permanently so the events' .get_parent() can factor in the omission, and skip over omitted parents
|
|
193
|
+
omitted_event_type = event.type in self.scan.omitted_event_types
|
|
194
|
+
is_target = "target" in event.tags
|
|
195
|
+
if omitted_event_type and not is_target:
|
|
196
|
+
self.debug(f"Making {event} omitted because its type is omitted in the config")
|
|
197
|
+
event._omit = True
|
|
201
198
|
|
|
202
199
|
# make event internal if it's above our configured report distance
|
|
203
200
|
event_in_report_distance = event.scope_distance <= self.scan.scope_report_distance
|
|
204
201
|
event_will_be_output = event.always_emit or event_in_report_distance
|
|
205
202
|
|
|
206
|
-
if
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
203
|
+
# if an event isn't being re-emitted for output, we may want to make it internal
|
|
204
|
+
if not event._graph_important:
|
|
205
|
+
if not event_will_be_output and not event.internal:
|
|
206
|
+
self.debug(
|
|
207
|
+
f"Making {event} internal because its scope_distance ({event.scope_distance}) > scope_report_distance ({self.scan.scope_report_distance})"
|
|
208
|
+
)
|
|
209
|
+
event.internal = True
|
|
210
|
+
|
|
211
|
+
# mark special URLs (e.g. Javascript) as internal so they don't get output except when they're critical to the graph
|
|
212
|
+
if event.type.startswith("URL") and not event.internal:
|
|
213
|
+
extension = getattr(event, "url_extension", "")
|
|
214
|
+
if extension in self.scan.url_extension_special:
|
|
215
|
+
self.debug(f"Making {event} internal because it is a special URL (extension {extension})")
|
|
216
|
+
event.internal = True
|
|
211
217
|
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
218
|
+
# custom callback - abort event emission if it returns true
|
|
219
|
+
abort_result = False
|
|
220
|
+
if callable(abort_if):
|
|
221
|
+
async with self.scan._acatch(context=abort_if):
|
|
222
|
+
abort_result = await self.scan.helpers.execute_sync_or_async(abort_if, event)
|
|
223
|
+
msg = f"{event.module}: not raising event {event} due to custom criteria in abort_if()"
|
|
224
|
+
with suppress(ValueError, TypeError):
|
|
225
|
+
abort_result, reason = abort_result
|
|
226
|
+
msg += f": {reason}"
|
|
227
|
+
if abort_result:
|
|
228
|
+
return False, msg
|
|
229
|
+
|
|
230
|
+
if event._suppress_chain_dupes:
|
|
231
|
+
for parent in event.get_parents():
|
|
232
|
+
if parent == event:
|
|
233
|
+
return False, f"an identical parent {event} was found, and _suppress_chain_dupes=True"
|
|
215
234
|
|
|
216
235
|
# if we discovered something interesting from an internal event,
|
|
217
236
|
# make sure we preserve its chain of parents
|
|
218
|
-
|
|
237
|
+
# here we retroactively resurrect any interesting internal events that led to this discovery
|
|
238
|
+
# "interesting" meaning any event types that aren't omitted in the config
|
|
239
|
+
# (by using .get_parent() instead of .parent, we're intentionally skipping over omitted events)
|
|
240
|
+
parent = event.get_parent()
|
|
219
241
|
event_is_graph_worthy = (not event.internal) or event._graph_important
|
|
220
242
|
parent_is_graph_worthy = (not parent.internal) or parent._graph_important
|
|
221
243
|
if event_is_graph_worthy and not parent_is_graph_worthy:
|
|
222
244
|
parent_in_report_distance = parent.scope_distance <= self.scan.scope_report_distance
|
|
245
|
+
self.debug(f"parent {parent} in report distance: {parent_in_report_distance}")
|
|
223
246
|
if parent_in_report_distance:
|
|
247
|
+
self.debug(f"setting parent {parent} internal to False")
|
|
224
248
|
parent.internal = False
|
|
225
249
|
if not parent._graph_important:
|
|
226
|
-
parent._graph_important = True
|
|
227
250
|
self.debug(f"Re-queuing internal event {parent} with parent {event} to prevent graph orphan")
|
|
251
|
+
parent._graph_important = True
|
|
228
252
|
await self.emit_event(parent)
|
|
229
253
|
|
|
230
|
-
if event._suppress_chain_dupes:
|
|
231
|
-
for parent in event.get_parents():
|
|
232
|
-
if parent == event:
|
|
233
|
-
return False, f"an identical parent {event} was found, and _suppress_chain_dupes=True"
|
|
234
|
-
|
|
235
|
-
# custom callback - abort event emission it returns true
|
|
236
|
-
abort_result = False
|
|
237
|
-
if callable(abort_if):
|
|
238
|
-
async with self.scan._acatch(context=abort_if):
|
|
239
|
-
abort_result = await self.scan.helpers.execute_sync_or_async(abort_if, event)
|
|
240
|
-
msg = f"{event.module}: not raising event {event} due to custom criteria in abort_if()"
|
|
241
|
-
with suppress(ValueError, TypeError):
|
|
242
|
-
abort_result, reason = abort_result
|
|
243
|
-
msg += f": {reason}"
|
|
244
|
-
if abort_result:
|
|
245
|
-
return False, msg
|
|
246
|
-
|
|
247
254
|
# run success callback before distributing event (so it can add tags, etc.)
|
|
248
255
|
if callable(on_success_callback):
|
|
249
256
|
async with self.scan._acatch(context=on_success_callback):
|