bbot 2.3.0.5546rc0__py3-none-any.whl → 2.3.1.5815rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +1 -1
- bbot/core/engine.py +1 -1
- bbot/core/event/base.py +7 -5
- bbot/core/helpers/async_helpers.py +7 -1
- bbot/core/helpers/depsinstaller/installer.py +7 -2
- bbot/core/helpers/diff.py +13 -4
- bbot/core/helpers/dns/brute.py +8 -2
- bbot/core/helpers/dns/engine.py +3 -2
- bbot/core/helpers/ratelimiter.py +8 -2
- bbot/core/helpers/regexes.py +5 -2
- bbot/core/helpers/web/engine.py +1 -1
- bbot/core/helpers/web/web.py +1 -1
- bbot/core/shared_deps.py +14 -0
- bbot/defaults.yml +44 -0
- bbot/modules/ajaxpro.py +64 -37
- bbot/modules/baddns.py +23 -15
- bbot/modules/baddns_direct.py +2 -2
- bbot/modules/badsecrets.py +2 -2
- bbot/modules/base.py +49 -15
- bbot/modules/censys.py +1 -1
- bbot/modules/deadly/dastardly.py +3 -3
- bbot/modules/deadly/nuclei.py +1 -1
- bbot/modules/dehashed.py +2 -2
- bbot/modules/dnsbrute_mutations.py +3 -1
- bbot/modules/docker_pull.py +1 -1
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +12 -12
- bbot/modules/extractous.py +1 -1
- bbot/modules/ffuf_shortnames.py +107 -48
- bbot/modules/filedownload.py +6 -0
- bbot/modules/generic_ssrf.py +54 -40
- bbot/modules/github_codesearch.py +2 -2
- bbot/modules/github_org.py +16 -20
- bbot/modules/github_workflows.py +6 -2
- bbot/modules/gowitness.py +6 -0
- bbot/modules/hunt.py +1 -1
- bbot/modules/hunterio.py +1 -1
- bbot/modules/iis_shortnames.py +23 -7
- bbot/modules/internal/excavate.py +5 -3
- bbot/modules/internal/unarchive.py +82 -0
- bbot/modules/jadx.py +2 -2
- bbot/modules/output/asset_inventory.py +1 -1
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/discord.py +2 -1
- bbot/modules/output/slack.py +2 -1
- bbot/modules/output/teams.py +10 -25
- bbot/modules/output/web_parameters.py +55 -0
- bbot/modules/paramminer_headers.py +15 -10
- bbot/modules/portfilter.py +41 -0
- bbot/modules/portscan.py +1 -22
- bbot/modules/postman.py +61 -43
- bbot/modules/postman_download.py +10 -147
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +1 -1
- bbot/modules/templates/postman.py +163 -1
- bbot/modules/templates/subdomain_enum.py +1 -1
- bbot/modules/templates/webhook.py +17 -26
- bbot/modules/trufflehog.py +3 -3
- bbot/modules/wappalyzer.py +1 -1
- bbot/modules/zoomeye.py +1 -1
- bbot/presets/kitchen-sink.yml +1 -1
- bbot/presets/nuclei/nuclei-budget.yml +19 -0
- bbot/presets/nuclei/nuclei-intense.yml +28 -0
- bbot/presets/nuclei/nuclei-technology.yml +23 -0
- bbot/presets/nuclei/nuclei.yml +34 -0
- bbot/presets/spider-intense.yml +13 -0
- bbot/scanner/preset/args.py +29 -3
- bbot/scanner/preset/preset.py +43 -24
- bbot/scanner/scanner.py +17 -7
- bbot/test/bbot_fixtures.py +7 -7
- bbot/test/test_step_1/test_bloom_filter.py +2 -2
- bbot/test/test_step_1/test_cli.py +5 -5
- bbot/test/test_step_1/test_dns.py +33 -0
- bbot/test/test_step_1/test_events.py +15 -5
- bbot/test/test_step_1/test_modules_basic.py +21 -21
- bbot/test/test_step_1/test_presets.py +94 -4
- bbot/test/test_step_1/test_regexes.py +13 -13
- bbot/test/test_step_1/test_scan.py +78 -0
- bbot/test/test_step_1/test_web.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_ajaxpro.py +43 -23
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_baddns.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dnscaa.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +9 -9
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +12 -12
- bbot/test/test_step_2/module_tests/test_module_excavate.py +15 -15
- bbot/test/test_step_2/module_tests/test_module_extractous.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +8 -8
- bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_github_codesearch.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +9 -9
- bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +35 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_portfilter.py +48 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +338 -3
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +4 -161
- bbot/test/test_step_2/module_tests/test_module_securitytxt.py +12 -12
- bbot/test/test_step_2/module_tests/test_module_teams.py +10 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_unarchive.py +229 -0
- bbot/test/test_step_2/module_tests/test_module_viewdns.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_web_parameters.py +59 -0
- bbot/test/test_step_2/module_tests/test_module_websocket.py +5 -4
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/METADATA +7 -7
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/RECORD +115 -105
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/WHEEL +1 -1
- bbot/wordlists/ffuf_shortname_candidates.txt +0 -107982
- /bbot/presets/{baddns-thorough.yml → baddns-intense.yml} +0 -0
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/LICENSE +0 -0
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/entry_points.txt +0 -0
bbot/modules/generic_ssrf.py
CHANGED
|
@@ -37,10 +37,10 @@ ssrf_params = [
|
|
|
37
37
|
class BaseSubmodule:
|
|
38
38
|
technique_description = "base technique description"
|
|
39
39
|
severity = "INFO"
|
|
40
|
-
paths =
|
|
40
|
+
paths = []
|
|
41
41
|
|
|
42
|
-
def __init__(self,
|
|
43
|
-
self.
|
|
42
|
+
def __init__(self, generic_ssrf):
|
|
43
|
+
self.generic_ssrf = generic_ssrf
|
|
44
44
|
self.test_paths = self.create_paths()
|
|
45
45
|
|
|
46
46
|
def set_base_url(self, event):
|
|
@@ -51,30 +51,30 @@ class BaseSubmodule:
|
|
|
51
51
|
|
|
52
52
|
async def test(self, event):
|
|
53
53
|
base_url = self.set_base_url(event)
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
54
|
+
for test_path_result in self.test_paths:
|
|
55
|
+
for lower in [True, False]:
|
|
56
|
+
test_path = test_path_result[0]
|
|
57
|
+
if lower:
|
|
58
|
+
test_path = test_path.lower()
|
|
59
|
+
subdomain_tag = test_path_result[1]
|
|
60
|
+
test_url = f"{base_url}{test_path}"
|
|
61
|
+
self.generic_ssrf.debug(f"Sending request to URL: {test_url}")
|
|
62
|
+
r = await self.generic_ssrf.helpers.curl(url=test_url)
|
|
63
|
+
if r:
|
|
64
|
+
self.process(event, r, subdomain_tag)
|
|
65
65
|
|
|
66
66
|
def process(self, event, r, subdomain_tag):
|
|
67
|
-
response_token = self.
|
|
67
|
+
response_token = self.generic_ssrf.interactsh_domain.split(".")[0][::-1]
|
|
68
68
|
if response_token in r:
|
|
69
|
-
|
|
69
|
+
echoed_response = True
|
|
70
70
|
else:
|
|
71
|
-
|
|
71
|
+
echoed_response = False
|
|
72
72
|
|
|
73
|
-
self.
|
|
73
|
+
self.generic_ssrf.interactsh_subdomain_tags[subdomain_tag] = (
|
|
74
74
|
event,
|
|
75
75
|
self.technique_description,
|
|
76
76
|
self.severity,
|
|
77
|
-
|
|
77
|
+
echoed_response,
|
|
78
78
|
)
|
|
79
79
|
|
|
80
80
|
|
|
@@ -86,15 +86,15 @@ class Generic_SSRF(BaseSubmodule):
|
|
|
86
86
|
return event.data
|
|
87
87
|
|
|
88
88
|
def create_paths(self):
|
|
89
|
-
|
|
90
|
-
for param in ssrf_params:
|
|
91
|
-
query_string += f"{param}=http://SSRF_CANARY&"
|
|
92
|
-
|
|
93
|
-
query_string_lower = ""
|
|
89
|
+
test_paths = []
|
|
94
90
|
for param in ssrf_params:
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
91
|
+
query_string = ""
|
|
92
|
+
subdomain_tag = self.generic_ssrf.helpers.rand_string(4)
|
|
93
|
+
ssrf_canary = f"{subdomain_tag}.{self.generic_ssrf.interactsh_domain}"
|
|
94
|
+
self.generic_ssrf.parameter_subdomain_tags_map[subdomain_tag] = param
|
|
95
|
+
query_string += f"{param}=http://{ssrf_canary}&"
|
|
96
|
+
test_paths.append((f"?{query_string.rstrip('&')}", subdomain_tag))
|
|
97
|
+
return test_paths
|
|
98
98
|
|
|
99
99
|
|
|
100
100
|
class Generic_SSRF_POST(BaseSubmodule):
|
|
@@ -107,21 +107,22 @@ class Generic_SSRF_POST(BaseSubmodule):
|
|
|
107
107
|
async def test(self, event):
|
|
108
108
|
test_url = f"{event.data}"
|
|
109
109
|
|
|
110
|
-
subdomain_tag = self.parent_module.helpers.rand_string(4, digits=False)
|
|
111
110
|
post_data = {}
|
|
112
111
|
for param in ssrf_params:
|
|
113
|
-
|
|
112
|
+
subdomain_tag = self.generic_ssrf.helpers.rand_string(4, digits=False)
|
|
113
|
+
self.generic_ssrf.parameter_subdomain_tags_map[subdomain_tag] = param
|
|
114
|
+
post_data[param] = f"http://{subdomain_tag}.{self.generic_ssrf.interactsh_domain}"
|
|
114
115
|
|
|
115
|
-
subdomain_tag_lower = self.
|
|
116
|
+
subdomain_tag_lower = self.generic_ssrf.helpers.rand_string(4, digits=False)
|
|
116
117
|
post_data_lower = {
|
|
117
|
-
k.lower(): f"http://{subdomain_tag_lower}.{self.
|
|
118
|
+
k.lower(): f"http://{subdomain_tag_lower}.{self.generic_ssrf.interactsh_domain}"
|
|
118
119
|
for k, v in post_data.items()
|
|
119
120
|
}
|
|
120
121
|
|
|
121
122
|
post_data_list = [(subdomain_tag, post_data), (subdomain_tag_lower, post_data_lower)]
|
|
122
123
|
|
|
123
124
|
for tag, pd in post_data_list:
|
|
124
|
-
r = await self.
|
|
125
|
+
r = await self.generic_ssrf.helpers.curl(url=test_url, method="POST", post_data=pd)
|
|
125
126
|
self.process(event, r, tag)
|
|
126
127
|
|
|
127
128
|
|
|
@@ -131,17 +132,17 @@ class Generic_XXE(BaseSubmodule):
|
|
|
131
132
|
paths = None
|
|
132
133
|
|
|
133
134
|
async def test(self, event):
|
|
134
|
-
rand_entity = self.
|
|
135
|
-
subdomain_tag = self.
|
|
135
|
+
rand_entity = self.generic_ssrf.helpers.rand_string(4, digits=False)
|
|
136
|
+
subdomain_tag = self.generic_ssrf.helpers.rand_string(4, digits=False)
|
|
136
137
|
|
|
137
138
|
post_body = f"""<?xml version="1.0" encoding="ISO-8859-1"?>
|
|
138
139
|
<!DOCTYPE foo [
|
|
139
140
|
<!ELEMENT foo ANY >
|
|
140
|
-
<!ENTITY {rand_entity} SYSTEM "http://{subdomain_tag}.{self.
|
|
141
|
+
<!ENTITY {rand_entity} SYSTEM "http://{subdomain_tag}.{self.generic_ssrf.interactsh_domain}" >
|
|
141
142
|
]>
|
|
142
143
|
<foo>&{rand_entity};</foo>"""
|
|
143
144
|
test_url = event.parsed_url.geturl()
|
|
144
|
-
r = await self.
|
|
145
|
+
r = await self.generic_ssrf.helpers.curl(
|
|
145
146
|
url=test_url, method="POST", raw_body=post_body, headers={"Content-type": "application/xml"}
|
|
146
147
|
)
|
|
147
148
|
if r:
|
|
@@ -160,6 +161,7 @@ class generic_ssrf(BaseModule):
|
|
|
160
161
|
async def setup(self):
|
|
161
162
|
self.submodules = {}
|
|
162
163
|
self.interactsh_subdomain_tags = {}
|
|
164
|
+
self.parameter_subdomain_tags_map = {}
|
|
163
165
|
self.severity = None
|
|
164
166
|
self.generic_only = self.config.get("generic_only", False)
|
|
165
167
|
|
|
@@ -190,22 +192,34 @@ class generic_ssrf(BaseModule):
|
|
|
190
192
|
|
|
191
193
|
async def interactsh_callback(self, r):
|
|
192
194
|
full_id = r.get("full-id", None)
|
|
195
|
+
subdomain_tag = full_id.split(".")[0]
|
|
196
|
+
|
|
193
197
|
if full_id:
|
|
194
198
|
if "." in full_id:
|
|
195
|
-
match = self.interactsh_subdomain_tags.get(
|
|
199
|
+
match = self.interactsh_subdomain_tags.get(subdomain_tag)
|
|
196
200
|
if not match:
|
|
197
201
|
return
|
|
198
202
|
matched_event = match[0]
|
|
199
203
|
matched_technique = match[1]
|
|
200
204
|
matched_severity = match[2]
|
|
201
|
-
|
|
205
|
+
matched_echoed_response = str(match[3])
|
|
206
|
+
|
|
207
|
+
# Check if any SSRF parameter is in the DNS request
|
|
208
|
+
triggering_param = self.parameter_subdomain_tags_map.get(subdomain_tag, None)
|
|
209
|
+
description = f"Out-of-band interaction: [{matched_technique}]"
|
|
210
|
+
if triggering_param:
|
|
211
|
+
self.debug(f"Found triggering parameter: {triggering_param}")
|
|
212
|
+
description += f" [Triggering Parameter: {triggering_param}]"
|
|
213
|
+
description += f" [{r.get('protocol').upper()}] Echoed Response: {matched_echoed_response}"
|
|
214
|
+
|
|
215
|
+
self.debug(f"Emitting event with description: {description}") # Debug the final description
|
|
202
216
|
|
|
203
217
|
await self.emit_event(
|
|
204
218
|
{
|
|
205
219
|
"severity": matched_severity,
|
|
206
220
|
"host": str(matched_event.host),
|
|
207
221
|
"url": matched_event.data,
|
|
208
|
-
"description":
|
|
222
|
+
"description": description,
|
|
209
223
|
},
|
|
210
224
|
"VULNERABILITY",
|
|
211
225
|
matched_event,
|
|
@@ -227,7 +241,7 @@ class generic_ssrf(BaseModule):
|
|
|
227
241
|
|
|
228
242
|
async def finish(self):
|
|
229
243
|
if self.scan.config.get("interactsh_disable", False) is False:
|
|
230
|
-
await self.helpers.sleep(
|
|
244
|
+
await self.helpers.sleep(2)
|
|
231
245
|
try:
|
|
232
246
|
for r in await self.interactsh_instance.poll():
|
|
233
247
|
await self.interactsh_callback(r)
|
|
@@ -42,7 +42,7 @@ class github_codesearch(github, subdomain_enum):
|
|
|
42
42
|
async def query(self, query):
|
|
43
43
|
repos = {}
|
|
44
44
|
url = f"{self.base_url}/search/code?per_page=100&type=Code&q={self.helpers.quote(query)}&page=" + "{page}"
|
|
45
|
-
agen = self.api_page_iter(url, headers=self.headers,
|
|
45
|
+
agen = self.api_page_iter(url, headers=self.headers, _json=False)
|
|
46
46
|
num_results = 0
|
|
47
47
|
try:
|
|
48
48
|
async for r in agen:
|
|
@@ -78,7 +78,7 @@ class github_codesearch(github, subdomain_enum):
|
|
|
78
78
|
if num_results >= self.limit:
|
|
79
79
|
break
|
|
80
80
|
finally:
|
|
81
|
-
agen.aclose()
|
|
81
|
+
await agen.aclose()
|
|
82
82
|
return repos
|
|
83
83
|
|
|
84
84
|
def raw_url(self, url):
|
bbot/modules/github_org.py
CHANGED
|
@@ -24,6 +24,10 @@ class github_org(github):
|
|
|
24
24
|
self.include_member_repos = self.config.get("include_member_repos", False)
|
|
25
25
|
return await super().setup()
|
|
26
26
|
|
|
27
|
+
def _api_response_is_success(self, r):
|
|
28
|
+
# we allow 404s because they're normal
|
|
29
|
+
return r.is_success or getattr(r, "status_code", 0) == 404
|
|
30
|
+
|
|
27
31
|
async def filter_event(self, event):
|
|
28
32
|
if event.type == "SOCIAL":
|
|
29
33
|
if event.data.get("platform", "") != "github":
|
|
@@ -104,7 +108,7 @@ class github_org(github):
|
|
|
104
108
|
async def query_org_repos(self, query):
|
|
105
109
|
repos = []
|
|
106
110
|
url = f"{self.base_url}/orgs/{self.helpers.quote(query)}/repos?per_page=100&page=" + "{page}"
|
|
107
|
-
agen = self.api_page_iter(url,
|
|
111
|
+
agen = self.api_page_iter(url, _json=False)
|
|
108
112
|
try:
|
|
109
113
|
async for r in agen:
|
|
110
114
|
if r is None:
|
|
@@ -126,13 +130,13 @@ class github_org(github):
|
|
|
126
130
|
html_url = item.get("html_url", "")
|
|
127
131
|
repos.append(html_url)
|
|
128
132
|
finally:
|
|
129
|
-
agen.aclose()
|
|
133
|
+
await agen.aclose()
|
|
130
134
|
return repos
|
|
131
135
|
|
|
132
136
|
async def query_org_members(self, query):
|
|
133
137
|
members = []
|
|
134
138
|
url = f"{self.base_url}/orgs/{self.helpers.quote(query)}/members?per_page=100&page=" + "{page}"
|
|
135
|
-
agen = self.api_page_iter(url,
|
|
139
|
+
agen = self.api_page_iter(url, _json=False)
|
|
136
140
|
try:
|
|
137
141
|
async for r in agen:
|
|
138
142
|
if r is None:
|
|
@@ -154,13 +158,13 @@ class github_org(github):
|
|
|
154
158
|
login = item.get("login", "")
|
|
155
159
|
members.append(login)
|
|
156
160
|
finally:
|
|
157
|
-
agen.aclose()
|
|
161
|
+
await agen.aclose()
|
|
158
162
|
return members
|
|
159
163
|
|
|
160
164
|
async def query_user_repos(self, query):
|
|
161
165
|
repos = []
|
|
162
166
|
url = f"{self.base_url}/users/{self.helpers.quote(query)}/repos?per_page=100&page=" + "{page}"
|
|
163
|
-
agen = self.api_page_iter(url,
|
|
167
|
+
agen = self.api_page_iter(url, _json=False)
|
|
164
168
|
try:
|
|
165
169
|
async for r in agen:
|
|
166
170
|
if r is None:
|
|
@@ -182,7 +186,7 @@ class github_org(github):
|
|
|
182
186
|
html_url = item.get("html_url", "")
|
|
183
187
|
repos.append(html_url)
|
|
184
188
|
finally:
|
|
185
|
-
agen.aclose()
|
|
189
|
+
await agen.aclose()
|
|
186
190
|
return repos
|
|
187
191
|
|
|
188
192
|
async def validate_org(self, org):
|
|
@@ -198,18 +202,10 @@ class github_org(github):
|
|
|
198
202
|
return is_org, in_scope
|
|
199
203
|
if status_code == 200:
|
|
200
204
|
is_org = True
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
if (
|
|
208
|
-
isinstance(v, str)
|
|
209
|
-
and (self.helpers.is_dns_name(v) and "." in v or self.helpers.is_url(v) or self.helpers.is_email(v))
|
|
210
|
-
and self.scan.in_scope(v)
|
|
211
|
-
):
|
|
212
|
-
self.verbose(f'Found in-scope key "{k}": "{v}" for {org}, it appears to be in-scope')
|
|
213
|
-
in_scope = True
|
|
214
|
-
break
|
|
205
|
+
in_scope_hosts = await self.scan.extract_in_scope_hostnames(getattr(r, "text", ""))
|
|
206
|
+
if in_scope_hosts:
|
|
207
|
+
self.verbose(
|
|
208
|
+
f'Found in-scope hostname(s): "{in_scope_hosts}" for github org: {org}, it appears to be in-scope'
|
|
209
|
+
)
|
|
210
|
+
in_scope = True
|
|
215
211
|
return is_org, in_scope
|
bbot/modules/github_workflows.py
CHANGED
|
@@ -30,6 +30,10 @@ class github_workflows(github):
|
|
|
30
30
|
self.helpers.mkdir(self.output_dir)
|
|
31
31
|
return await super().setup()
|
|
32
32
|
|
|
33
|
+
def _api_response_is_success(self, r):
|
|
34
|
+
# we allow 404s because they're normal
|
|
35
|
+
return r.is_success or getattr(r, "status_code", 0) == 404
|
|
36
|
+
|
|
33
37
|
async def filter_event(self, event):
|
|
34
38
|
if event.type == "CODE_REPOSITORY":
|
|
35
39
|
if "git" not in event.tags and "github" not in event.data.get("url", ""):
|
|
@@ -88,7 +92,7 @@ class github_workflows(github):
|
|
|
88
92
|
async def get_workflows(self, owner, repo):
|
|
89
93
|
workflows = []
|
|
90
94
|
url = f"{self.base_url}/repos/{owner}/{repo}/actions/workflows?per_page=100&page=" + "{page}"
|
|
91
|
-
agen = self.api_page_iter(url,
|
|
95
|
+
agen = self.api_page_iter(url, _json=False)
|
|
92
96
|
try:
|
|
93
97
|
async for r in agen:
|
|
94
98
|
if r is None:
|
|
@@ -109,7 +113,7 @@ class github_workflows(github):
|
|
|
109
113
|
for item in j:
|
|
110
114
|
workflows.append(item)
|
|
111
115
|
finally:
|
|
112
|
-
agen.aclose()
|
|
116
|
+
await agen.aclose()
|
|
113
117
|
return workflows
|
|
114
118
|
|
|
115
119
|
async def get_workflow_runs(self, owner, repo, workflow_id):
|
bbot/modules/gowitness.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
import asyncio
|
|
2
3
|
import aiosqlite
|
|
3
4
|
import multiprocessing
|
|
@@ -71,6 +72,11 @@ class gowitness(BaseModule):
|
|
|
71
72
|
if custom_chrome_path.is_file():
|
|
72
73
|
self.chrome_path = custom_chrome_path
|
|
73
74
|
|
|
75
|
+
# fix ubuntu-specific sandbox bug
|
|
76
|
+
chrome_devel_sandbox = self.helpers.tools_dir / "chrome-linux" / "chrome_sandbox"
|
|
77
|
+
if chrome_devel_sandbox.is_file():
|
|
78
|
+
os.environ["CHROME_DEVEL_SANDBOX"] = str(chrome_devel_sandbox)
|
|
79
|
+
|
|
74
80
|
# make sure we have a working chrome install
|
|
75
81
|
chrome_test_pass = False
|
|
76
82
|
for binary in ("chrome", "chromium", "chromium-browser", custom_chrome_path):
|
bbot/modules/hunt.py
CHANGED
bbot/modules/hunterio.py
CHANGED
bbot/modules/iis_shortnames.py
CHANGED
|
@@ -22,10 +22,11 @@ class iis_shortnames(BaseModule):
|
|
|
22
22
|
"created_date": "2022-04-15",
|
|
23
23
|
"author": "@liquidsec",
|
|
24
24
|
}
|
|
25
|
-
options = {"detect_only": True, "max_node_count": 50}
|
|
25
|
+
options = {"detect_only": True, "max_node_count": 50, "speculate_magic_urls": True}
|
|
26
26
|
options_desc = {
|
|
27
27
|
"detect_only": "Only detect the vulnerability and do not run the shortname scanner",
|
|
28
28
|
"max_node_count": "Limit how many nodes to attempt to resolve on any given recursion branch",
|
|
29
|
+
"speculate_magic_urls": "Attempt to discover iis 'magic' special folders",
|
|
29
30
|
}
|
|
30
31
|
in_scope_only = True
|
|
31
32
|
|
|
@@ -131,7 +132,10 @@ class iis_shortnames(BaseModule):
|
|
|
131
132
|
kwargs = {"method": method, "allow_redirects": False, "retries": 2, "timeout": 10}
|
|
132
133
|
for c in valid_chars:
|
|
133
134
|
for file_part in ("stem", "ext"):
|
|
134
|
-
|
|
135
|
+
if file_part == "stem":
|
|
136
|
+
payload = encode_all(f"*{c}*~1*")
|
|
137
|
+
elif file_part == "ext":
|
|
138
|
+
payload = encode_all(f"*~1*{c}*")
|
|
135
139
|
url = f"{target}{payload}{suffix}"
|
|
136
140
|
urls_and_kwargs.append((url, kwargs, (c, file_part)))
|
|
137
141
|
|
|
@@ -236,6 +240,15 @@ class iis_shortnames(BaseModule):
|
|
|
236
240
|
event,
|
|
237
241
|
context="{module} detected low {event.type}: IIS shortname enumeration",
|
|
238
242
|
)
|
|
243
|
+
|
|
244
|
+
if self.config.get("speculate_magic_urls") and "iis-magic-url" not in event.tags:
|
|
245
|
+
magic_url_bin = f"{normalized_url}bin::$INDEX_ALLOCATION/"
|
|
246
|
+
self.debug(f"making IIS magic URL: {magic_url_bin}")
|
|
247
|
+
magic_url_event = self.make_event(
|
|
248
|
+
magic_url_bin, "URL", parent=event, tags=["iis-magic-url", "status-403"]
|
|
249
|
+
)
|
|
250
|
+
await self.scan.modules["iis_shortnames"].incoming_event_queue.put(magic_url_event)
|
|
251
|
+
|
|
239
252
|
if not self.config.get("detect_only"):
|
|
240
253
|
for detection in detections:
|
|
241
254
|
safety_counter = safety_counter_obj()
|
|
@@ -245,7 +258,6 @@ class iis_shortnames(BaseModule):
|
|
|
245
258
|
|
|
246
259
|
if valid_method_confirmed:
|
|
247
260
|
break
|
|
248
|
-
|
|
249
261
|
confirmed_chars, confirmed_exts = await self.solve_valid_chars(
|
|
250
262
|
method, normalized_url, affirmative_status_code
|
|
251
263
|
)
|
|
@@ -261,8 +273,8 @@ class iis_shortnames(BaseModule):
|
|
|
261
273
|
else:
|
|
262
274
|
continue
|
|
263
275
|
|
|
264
|
-
self.
|
|
265
|
-
self.
|
|
276
|
+
self.verbose(f"Confirmed character list: {','.join(confirmed_chars)}")
|
|
277
|
+
self.verbose(f"Confirmed ext character list: {','.join(confirmed_exts)}")
|
|
266
278
|
try:
|
|
267
279
|
file_name_hints = list(
|
|
268
280
|
set(
|
|
@@ -321,14 +333,18 @@ class iis_shortnames(BaseModule):
|
|
|
321
333
|
|
|
322
334
|
for url_hint in url_hint_list:
|
|
323
335
|
if "." in url_hint:
|
|
324
|
-
hint_type = "shortname-
|
|
336
|
+
hint_type = "shortname-endpoint"
|
|
325
337
|
else:
|
|
326
338
|
hint_type = "shortname-directory"
|
|
339
|
+
|
|
340
|
+
tags = [hint_type]
|
|
341
|
+
if "iis-magic-url" in event.tags:
|
|
342
|
+
tags.append("iis-magic-url")
|
|
327
343
|
await self.emit_event(
|
|
328
344
|
f"{normalized_url}/{url_hint}",
|
|
329
345
|
"URL_HINT",
|
|
330
346
|
event,
|
|
331
|
-
tags=
|
|
347
|
+
tags=tags,
|
|
332
348
|
context=f"{{module}} enumerated shortnames at {normalized_url} and found {{event.type}}: {url_hint}",
|
|
333
349
|
)
|
|
334
350
|
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import yara
|
|
2
2
|
import json
|
|
3
3
|
import html
|
|
4
|
+
import time
|
|
4
5
|
import inspect
|
|
5
6
|
import regex as re
|
|
6
7
|
from pathlib import Path
|
|
@@ -776,8 +777,7 @@ class excavate(BaseInternalModule, BaseInterceptModule):
|
|
|
776
777
|
|
|
777
778
|
def __init__(self, excavate):
|
|
778
779
|
super().__init__(excavate)
|
|
779
|
-
|
|
780
|
-
self.yara_rules["hostname_extraction"] = excavate.scan.dns_yara_rules_uncompiled
|
|
780
|
+
self.yara_rules.update(excavate.scan.dns_yara_rules_uncompiled)
|
|
781
781
|
|
|
782
782
|
async def process(self, yara_results, event, yara_rule_settings, discovery_context):
|
|
783
783
|
for identifier in yara_results.keys():
|
|
@@ -882,10 +882,12 @@ class excavate(BaseInternalModule, BaseInterceptModule):
|
|
|
882
882
|
yara.set_config(max_match_data=yara_max_match_data)
|
|
883
883
|
yara_rules_combined = "\n".join(self.yara_rules_dict.values())
|
|
884
884
|
try:
|
|
885
|
-
|
|
885
|
+
start = time.time()
|
|
886
|
+
self.verbose(f"Compiling {len(self.yara_rules_dict):,} YARA rules")
|
|
886
887
|
for rule_name, rule_content in self.yara_rules_dict.items():
|
|
887
888
|
self.debug(f" - {rule_name}")
|
|
888
889
|
self.yara_rules = yara.compile(source=yara_rules_combined)
|
|
890
|
+
self.verbose(f"{len(self.yara_rules_dict):,} YARA rules compiled in {time.time() - start:.2f} seconds")
|
|
889
891
|
except yara.SyntaxError as e:
|
|
890
892
|
self.debug(yara_rules_combined)
|
|
891
893
|
return False, f"Yara Rules failed to compile with error: [{e}]"
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from bbot.modules.internal.base import BaseInternalModule
|
|
3
|
+
from bbot.core.helpers.libmagic import get_magic_info, get_compression
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class unarchive(BaseInternalModule):
|
|
7
|
+
watched_events = ["FILESYSTEM"]
|
|
8
|
+
produced_events = ["FILESYSTEM"]
|
|
9
|
+
flags = ["passive", "safe"]
|
|
10
|
+
meta = {
|
|
11
|
+
"description": "Extract different types of files into folders on the filesystem",
|
|
12
|
+
"created_date": "2024-12-08",
|
|
13
|
+
"author": "@domwhewell-sage",
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
async def setup(self):
|
|
17
|
+
self.ignore_compressions = ["application/java-archive", "application/vnd.android.package-archive"]
|
|
18
|
+
self.compression_methods = {
|
|
19
|
+
"zip": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"],
|
|
20
|
+
"bzip2": ["tar", "--overwrite", "-xvjf", "{filename}", "-C", "{extract_dir}/"],
|
|
21
|
+
"xz": ["tar", "--overwrite", "-xvJf", "{filename}", "-C", "{extract_dir}/"],
|
|
22
|
+
"7z": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"],
|
|
23
|
+
# "rar": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"],
|
|
24
|
+
# "lzma": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"],
|
|
25
|
+
"tar": ["tar", "--overwrite", "-xvf", "{filename}", "-C", "{extract_dir}/"],
|
|
26
|
+
"gzip": ["tar", "--overwrite", "-xvzf", "{filename}", "-C", "{extract_dir}/"],
|
|
27
|
+
}
|
|
28
|
+
return True
|
|
29
|
+
|
|
30
|
+
async def filter_event(self, event):
|
|
31
|
+
if "file" in event.tags:
|
|
32
|
+
if event.data["magic_mime_type"] in self.ignore_compressions:
|
|
33
|
+
return False, f"Ignoring file type: {event.data['magic_mime_type']}, {event.data['path']}"
|
|
34
|
+
if "compression" in event.data:
|
|
35
|
+
if not event.data["compression"] in self.compression_methods:
|
|
36
|
+
return (
|
|
37
|
+
False,
|
|
38
|
+
f"Extract unable to handle file type: {event.data['compression']}, {event.data['path']}",
|
|
39
|
+
)
|
|
40
|
+
else:
|
|
41
|
+
return False, f"Event is not a compressed file: {event.data['path']}"
|
|
42
|
+
else:
|
|
43
|
+
return False, "Event is not a file"
|
|
44
|
+
return True
|
|
45
|
+
|
|
46
|
+
async def handle_event(self, event):
|
|
47
|
+
path = Path(event.data["path"])
|
|
48
|
+
output_dir = path.parent / path.name.replace(".", "_")
|
|
49
|
+
|
|
50
|
+
# Use the appropriate extraction method based on the file type
|
|
51
|
+
self.info(f"Extracting {path} to {output_dir}")
|
|
52
|
+
success = await self.extract_file(path, output_dir)
|
|
53
|
+
|
|
54
|
+
# If the extraction was successful, emit the event
|
|
55
|
+
if success:
|
|
56
|
+
await self.emit_event(
|
|
57
|
+
{"path": str(output_dir)},
|
|
58
|
+
"FILESYSTEM",
|
|
59
|
+
tags=["folder", "unarchived-folder"],
|
|
60
|
+
parent=event,
|
|
61
|
+
context=f'extracted "{path}" to: {output_dir}',
|
|
62
|
+
)
|
|
63
|
+
else:
|
|
64
|
+
output_dir.rmdir()
|
|
65
|
+
|
|
66
|
+
async def extract_file(self, path, output_dir):
|
|
67
|
+
extension, mime_type, description, confidence = get_magic_info(path)
|
|
68
|
+
compression_format = get_compression(mime_type)
|
|
69
|
+
cmd_list = self.compression_methods.get(compression_format, [])
|
|
70
|
+
if cmd_list:
|
|
71
|
+
if not output_dir.exists():
|
|
72
|
+
self.helpers.mkdir(output_dir)
|
|
73
|
+
command = [s.format(filename=path, extract_dir=output_dir) for s in cmd_list]
|
|
74
|
+
try:
|
|
75
|
+
await self.run_process(command, check=True)
|
|
76
|
+
for item in output_dir.iterdir():
|
|
77
|
+
if item.is_file():
|
|
78
|
+
await self.extract_file(item, output_dir / item.stem)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
self.warning(f"Error extracting {path}. Error: {e}")
|
|
81
|
+
return False
|
|
82
|
+
return True
|
bbot/modules/jadx.py
CHANGED
|
@@ -6,7 +6,7 @@ from bbot.modules.internal.base import BaseModule
|
|
|
6
6
|
class jadx(BaseModule):
|
|
7
7
|
watched_events = ["FILESYSTEM"]
|
|
8
8
|
produced_events = ["FILESYSTEM"]
|
|
9
|
-
flags = ["passive", "safe"]
|
|
9
|
+
flags = ["passive", "safe", "code-enum"]
|
|
10
10
|
meta = {
|
|
11
11
|
"description": "Decompile APKs and XAPKs using JADX",
|
|
12
12
|
"created_date": "2024-11-04",
|
|
@@ -60,7 +60,7 @@ class jadx(BaseModule):
|
|
|
60
60
|
await self.emit_event(
|
|
61
61
|
{"path": str(output_dir)},
|
|
62
62
|
"FILESYSTEM",
|
|
63
|
-
tags="folder",
|
|
63
|
+
tags=["folder", "unarchived-folder"],
|
|
64
64
|
parent=event,
|
|
65
65
|
context=f'extracted "{path}" to: {output_dir}',
|
|
66
66
|
)
|
|
@@ -154,7 +154,7 @@ class asset_inventory(CSV):
|
|
|
154
154
|
stats_sorted = sorted(stats[header].items(), key=lambda x: x[-1], reverse=True)
|
|
155
155
|
total = totals[header]
|
|
156
156
|
for k, v in stats_sorted:
|
|
157
|
-
table.append([str(k), f"{v:,}/{total} ({v/total*100:.1f}%)"])
|
|
157
|
+
table.append([str(k), f"{v:,}/{total} ({v / total * 100:.1f}%)"])
|
|
158
158
|
self.log_table(table, table_header, table_name=f"asset-inventory-{header}")
|
|
159
159
|
|
|
160
160
|
if self._file is not None:
|
bbot/modules/output/base.py
CHANGED
|
@@ -14,7 +14,7 @@ class BaseOutputModule(BaseModule):
|
|
|
14
14
|
event_type = f"[{event.type}]"
|
|
15
15
|
event_tags = ""
|
|
16
16
|
if getattr(event, "tags", []):
|
|
17
|
-
event_tags = f
|
|
17
|
+
event_tags = f"\t({', '.join(sorted(getattr(event, 'tags', [])))})"
|
|
18
18
|
event_str = f"{event_type:<20}\t{event.data_human}\t{event.module_sequence}{event_tags}"
|
|
19
19
|
return event_str
|
|
20
20
|
|
bbot/modules/output/discord.py
CHANGED
|
@@ -8,9 +8,10 @@ class Discord(WebhookOutputModule):
|
|
|
8
8
|
"created_date": "2023-08-14",
|
|
9
9
|
"author": "@TheTechromancer",
|
|
10
10
|
}
|
|
11
|
-
options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"}
|
|
11
|
+
options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW", "retries": 10}
|
|
12
12
|
options_desc = {
|
|
13
13
|
"webhook_url": "Discord webhook URL",
|
|
14
14
|
"event_types": "Types of events to send",
|
|
15
15
|
"min_severity": "Only allow VULNERABILITY events of this severity or higher",
|
|
16
|
+
"retries": "Number of times to retry sending the message before skipping the event",
|
|
16
17
|
}
|
bbot/modules/output/slack.py
CHANGED
|
@@ -10,11 +10,12 @@ class Slack(WebhookOutputModule):
|
|
|
10
10
|
"created_date": "2023-08-14",
|
|
11
11
|
"author": "@TheTechromancer",
|
|
12
12
|
}
|
|
13
|
-
options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"}
|
|
13
|
+
options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW", "retries": 10}
|
|
14
14
|
options_desc = {
|
|
15
15
|
"webhook_url": "Discord webhook URL",
|
|
16
16
|
"event_types": "Types of events to send",
|
|
17
17
|
"min_severity": "Only allow VULNERABILITY events of this severity or higher",
|
|
18
|
+
"retries": "Number of times to retry sending the message before skipping the event",
|
|
18
19
|
}
|
|
19
20
|
content_key = "text"
|
|
20
21
|
|