bbot 2.3.0.5546rc0__py3-none-any.whl → 2.3.1.5815rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +1 -1
- bbot/core/engine.py +1 -1
- bbot/core/event/base.py +7 -5
- bbot/core/helpers/async_helpers.py +7 -1
- bbot/core/helpers/depsinstaller/installer.py +7 -2
- bbot/core/helpers/diff.py +13 -4
- bbot/core/helpers/dns/brute.py +8 -2
- bbot/core/helpers/dns/engine.py +3 -2
- bbot/core/helpers/ratelimiter.py +8 -2
- bbot/core/helpers/regexes.py +5 -2
- bbot/core/helpers/web/engine.py +1 -1
- bbot/core/helpers/web/web.py +1 -1
- bbot/core/shared_deps.py +14 -0
- bbot/defaults.yml +44 -0
- bbot/modules/ajaxpro.py +64 -37
- bbot/modules/baddns.py +23 -15
- bbot/modules/baddns_direct.py +2 -2
- bbot/modules/badsecrets.py +2 -2
- bbot/modules/base.py +49 -15
- bbot/modules/censys.py +1 -1
- bbot/modules/deadly/dastardly.py +3 -3
- bbot/modules/deadly/nuclei.py +1 -1
- bbot/modules/dehashed.py +2 -2
- bbot/modules/dnsbrute_mutations.py +3 -1
- bbot/modules/docker_pull.py +1 -1
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +12 -12
- bbot/modules/extractous.py +1 -1
- bbot/modules/ffuf_shortnames.py +107 -48
- bbot/modules/filedownload.py +6 -0
- bbot/modules/generic_ssrf.py +54 -40
- bbot/modules/github_codesearch.py +2 -2
- bbot/modules/github_org.py +16 -20
- bbot/modules/github_workflows.py +6 -2
- bbot/modules/gowitness.py +6 -0
- bbot/modules/hunt.py +1 -1
- bbot/modules/hunterio.py +1 -1
- bbot/modules/iis_shortnames.py +23 -7
- bbot/modules/internal/excavate.py +5 -3
- bbot/modules/internal/unarchive.py +82 -0
- bbot/modules/jadx.py +2 -2
- bbot/modules/output/asset_inventory.py +1 -1
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/discord.py +2 -1
- bbot/modules/output/slack.py +2 -1
- bbot/modules/output/teams.py +10 -25
- bbot/modules/output/web_parameters.py +55 -0
- bbot/modules/paramminer_headers.py +15 -10
- bbot/modules/portfilter.py +41 -0
- bbot/modules/portscan.py +1 -22
- bbot/modules/postman.py +61 -43
- bbot/modules/postman_download.py +10 -147
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +1 -1
- bbot/modules/templates/postman.py +163 -1
- bbot/modules/templates/subdomain_enum.py +1 -1
- bbot/modules/templates/webhook.py +17 -26
- bbot/modules/trufflehog.py +3 -3
- bbot/modules/wappalyzer.py +1 -1
- bbot/modules/zoomeye.py +1 -1
- bbot/presets/kitchen-sink.yml +1 -1
- bbot/presets/nuclei/nuclei-budget.yml +19 -0
- bbot/presets/nuclei/nuclei-intense.yml +28 -0
- bbot/presets/nuclei/nuclei-technology.yml +23 -0
- bbot/presets/nuclei/nuclei.yml +34 -0
- bbot/presets/spider-intense.yml +13 -0
- bbot/scanner/preset/args.py +29 -3
- bbot/scanner/preset/preset.py +43 -24
- bbot/scanner/scanner.py +17 -7
- bbot/test/bbot_fixtures.py +7 -7
- bbot/test/test_step_1/test_bloom_filter.py +2 -2
- bbot/test/test_step_1/test_cli.py +5 -5
- bbot/test/test_step_1/test_dns.py +33 -0
- bbot/test/test_step_1/test_events.py +15 -5
- bbot/test/test_step_1/test_modules_basic.py +21 -21
- bbot/test/test_step_1/test_presets.py +94 -4
- bbot/test/test_step_1/test_regexes.py +13 -13
- bbot/test/test_step_1/test_scan.py +78 -0
- bbot/test/test_step_1/test_web.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_ajaxpro.py +43 -23
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_baddns.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dnscaa.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +9 -9
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +12 -12
- bbot/test/test_step_2/module_tests/test_module_excavate.py +15 -15
- bbot/test/test_step_2/module_tests/test_module_extractous.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +8 -8
- bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_github_codesearch.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +9 -9
- bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +35 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_portfilter.py +48 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +338 -3
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +4 -161
- bbot/test/test_step_2/module_tests/test_module_securitytxt.py +12 -12
- bbot/test/test_step_2/module_tests/test_module_teams.py +10 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_unarchive.py +229 -0
- bbot/test/test_step_2/module_tests/test_module_viewdns.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_web_parameters.py +59 -0
- bbot/test/test_step_2/module_tests/test_module_websocket.py +5 -4
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/METADATA +7 -7
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/RECORD +115 -105
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/WHEEL +1 -1
- bbot/wordlists/ffuf_shortname_candidates.txt +0 -107982
- /bbot/presets/{baddns-thorough.yml → baddns-intense.yml} +0 -0
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/LICENSE +0 -0
- {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/entry_points.txt +0 -0
bbot/__init__.py
CHANGED
bbot/cli.py
CHANGED
|
@@ -247,7 +247,7 @@ async def _main():
|
|
|
247
247
|
log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR")
|
|
248
248
|
log_to_stderr(traceback.format_exc(), level="TRACE")
|
|
249
249
|
|
|
250
|
-
asyncio.create_task(akeyboard_listen())
|
|
250
|
+
keyboard_listen_task = asyncio.create_task(akeyboard_listen()) # noqa F841
|
|
251
251
|
|
|
252
252
|
await scan.async_start_without_generator()
|
|
253
253
|
|
bbot/core/engine.py
CHANGED
|
@@ -79,7 +79,7 @@ class EngineBase:
|
|
|
79
79
|
self.log.debug(f"{self.name}: Timeout after {interval:,} seconds {context}, retrying...")
|
|
80
80
|
retries += 1
|
|
81
81
|
if max_retries is not None and retries > max_retries:
|
|
82
|
-
raise TimeoutError(f"Timed out after {(max_retries+1)*interval:,} seconds {context}")
|
|
82
|
+
raise TimeoutError(f"Timed out after {(max_retries + 1) * interval:,} seconds {context}")
|
|
83
83
|
|
|
84
84
|
def engine_debug(self, *args, **kwargs):
|
|
85
85
|
if self._engine_debug:
|
bbot/core/event/base.py
CHANGED
|
@@ -1032,8 +1032,8 @@ class ClosestHostEvent(DictHostEvent):
|
|
|
1032
1032
|
self.data = self.data
|
|
1033
1033
|
break
|
|
1034
1034
|
# die if we still haven't found a host
|
|
1035
|
-
if not self.host:
|
|
1036
|
-
raise ValueError("No host was found in event parents. Host must be specified!")
|
|
1035
|
+
if not self.host and not self.data.get("path", ""):
|
|
1036
|
+
raise ValueError(f"No host was found in event parents: {self.get_parents()}. Host must be specified!")
|
|
1037
1037
|
|
|
1038
1038
|
|
|
1039
1039
|
class DictPathEvent(DictEvent):
|
|
@@ -1399,14 +1399,16 @@ class HTTP_RESPONSE(URL_UNVERIFIED, DictEvent):
|
|
|
1399
1399
|
return set()
|
|
1400
1400
|
|
|
1401
1401
|
def _pretty_string(self):
|
|
1402
|
-
return f
|
|
1402
|
+
return f"{self.data['hash']['header_mmh3']}:{self.data['hash']['body_mmh3']}"
|
|
1403
1403
|
|
|
1404
1404
|
@property
|
|
1405
1405
|
def raw_response(self):
|
|
1406
1406
|
"""
|
|
1407
1407
|
Formats the status code, headers, and body into a single string formatted as an HTTP/1.1 response.
|
|
1408
1408
|
"""
|
|
1409
|
-
|
|
1409
|
+
raw_header = self.data.get("raw_header", "")
|
|
1410
|
+
body = self.data.get("body", "")
|
|
1411
|
+
return f"{raw_header}{body}"
|
|
1410
1412
|
|
|
1411
1413
|
@property
|
|
1412
1414
|
def http_status(self):
|
|
@@ -1463,7 +1465,7 @@ class VULNERABILITY(ClosestHostEvent):
|
|
|
1463
1465
|
_validate_severity = field_validator("severity")(validators.validate_severity)
|
|
1464
1466
|
|
|
1465
1467
|
def _pretty_string(self):
|
|
1466
|
-
return f
|
|
1468
|
+
return f"[{self.data['severity']}] {self.data['description']}"
|
|
1467
1469
|
|
|
1468
1470
|
|
|
1469
1471
|
class FINDING(ClosestHostEvent):
|
|
@@ -51,12 +51,18 @@ class NamedLock:
|
|
|
51
51
|
class TaskCounter:
|
|
52
52
|
def __init__(self):
|
|
53
53
|
self.tasks = {}
|
|
54
|
-
self.
|
|
54
|
+
self._lock = None
|
|
55
55
|
|
|
56
56
|
@property
|
|
57
57
|
def value(self):
|
|
58
58
|
return sum([t.n for t in self.tasks.values()])
|
|
59
59
|
|
|
60
|
+
@property
|
|
61
|
+
def lock(self):
|
|
62
|
+
if self._lock is None:
|
|
63
|
+
self._lock = asyncio.Lock()
|
|
64
|
+
return self._lock
|
|
65
|
+
|
|
60
66
|
def count(self, task_name, n=1, _log=True):
|
|
61
67
|
if callable(task_name):
|
|
62
68
|
task_name = f"{task_name.__qualname__}()"
|
|
@@ -31,7 +31,6 @@ class DepsInstaller:
|
|
|
31
31
|
"gcc": "gcc",
|
|
32
32
|
"bash": "bash",
|
|
33
33
|
"which": "which",
|
|
34
|
-
"unrar": "unrar-free",
|
|
35
34
|
"tar": "tar",
|
|
36
35
|
# debian why are you like this
|
|
37
36
|
"7z": [
|
|
@@ -47,6 +46,12 @@ class DepsInstaller:
|
|
|
47
46
|
"become": True,
|
|
48
47
|
"when": "ansible_facts['os_family'] != 'Debian'",
|
|
49
48
|
},
|
|
49
|
+
{
|
|
50
|
+
"name": "Install p7zip-plugins (Fedora)",
|
|
51
|
+
"package": {"name": ["p7zip-plugins"], "state": "present"},
|
|
52
|
+
"become": True,
|
|
53
|
+
"when": "ansible_facts['distribution'] == 'Fedora'",
|
|
54
|
+
},
|
|
50
55
|
],
|
|
51
56
|
}
|
|
52
57
|
|
|
@@ -271,7 +276,7 @@ class DepsInstaller:
|
|
|
271
276
|
command["cmd"] += f" && touch {command_status_file}"
|
|
272
277
|
tasks.append(
|
|
273
278
|
{
|
|
274
|
-
"name": f"{module}.deps_shell step {i+1}",
|
|
279
|
+
"name": f"{module}.deps_shell step {i + 1}",
|
|
275
280
|
"ansible.builtin.shell": command,
|
|
276
281
|
"args": {"executable": "/bin/bash", "creates": str(command_status_file)},
|
|
277
282
|
}
|
bbot/core/helpers/diff.py
CHANGED
|
@@ -98,7 +98,9 @@ class HttpCompare:
|
|
|
98
98
|
baseline_1_json = baseline_1.text.split("\n")
|
|
99
99
|
baseline_2_json = baseline_2.text.split("\n")
|
|
100
100
|
|
|
101
|
-
ddiff = DeepDiff(
|
|
101
|
+
ddiff = DeepDiff(
|
|
102
|
+
baseline_1_json, baseline_2_json, ignore_order=True, view="tree", threshold_to_diff_deeper=0
|
|
103
|
+
)
|
|
102
104
|
self.ddiff_filters = []
|
|
103
105
|
|
|
104
106
|
for k in ddiff.keys():
|
|
@@ -135,10 +137,10 @@ class HttpCompare:
|
|
|
135
137
|
for header, value in list(headers.items()):
|
|
136
138
|
if header.lower() in self.baseline_ignore_headers:
|
|
137
139
|
with suppress(KeyError):
|
|
138
|
-
log.debug(f'found ignored header "{header}" in headers_{i+1} and removed')
|
|
140
|
+
log.debug(f'found ignored header "{header}" in headers_{i + 1} and removed')
|
|
139
141
|
del headers[header]
|
|
140
142
|
|
|
141
|
-
ddiff = DeepDiff(headers_1, headers_2, ignore_order=True, view="tree")
|
|
143
|
+
ddiff = DeepDiff(headers_1, headers_2, ignore_order=True, view="tree", threshold_to_diff_deeper=0)
|
|
142
144
|
|
|
143
145
|
for k in ddiff.keys():
|
|
144
146
|
for x in list(ddiff[k]):
|
|
@@ -153,7 +155,14 @@ class HttpCompare:
|
|
|
153
155
|
if content_1 == content_2:
|
|
154
156
|
return True
|
|
155
157
|
|
|
156
|
-
ddiff = DeepDiff(
|
|
158
|
+
ddiff = DeepDiff(
|
|
159
|
+
content_1,
|
|
160
|
+
content_2,
|
|
161
|
+
ignore_order=True,
|
|
162
|
+
view="tree",
|
|
163
|
+
exclude_paths=self.ddiff_filters,
|
|
164
|
+
threshold_to_diff_deeper=0,
|
|
165
|
+
)
|
|
157
166
|
|
|
158
167
|
if len(ddiff.keys()) == 0:
|
|
159
168
|
return True
|
bbot/core/helpers/dns/brute.py
CHANGED
|
@@ -29,11 +29,17 @@ class DNSBrute:
|
|
|
29
29
|
self.devops_mutations = list(self.parent_helper.word_cloud.devops_mutations)
|
|
30
30
|
self.digit_regex = self.parent_helper.re.compile(r"\d+")
|
|
31
31
|
self._resolver_file = None
|
|
32
|
-
self._dnsbrute_lock =
|
|
32
|
+
self._dnsbrute_lock = None
|
|
33
33
|
|
|
34
34
|
async def __call__(self, *args, **kwargs):
|
|
35
35
|
return await self.dnsbrute(*args, **kwargs)
|
|
36
36
|
|
|
37
|
+
@property
|
|
38
|
+
def dnsbrute_lock(self):
|
|
39
|
+
if self._dnsbrute_lock is None:
|
|
40
|
+
self._dnsbrute_lock = asyncio.Lock()
|
|
41
|
+
return self._dnsbrute_lock
|
|
42
|
+
|
|
37
43
|
async def dnsbrute(self, module, domain, subdomains, type=None):
|
|
38
44
|
subdomains = list(subdomains)
|
|
39
45
|
|
|
@@ -119,7 +125,7 @@ class DNSBrute:
|
|
|
119
125
|
)
|
|
120
126
|
subdomains = self.gen_subdomains(subdomains, domain)
|
|
121
127
|
hosts_yielded = set()
|
|
122
|
-
async with self.
|
|
128
|
+
async with self.dnsbrute_lock:
|
|
123
129
|
async for line in module.run_process_live(*command, stderr=subprocess.DEVNULL, input=subdomains):
|
|
124
130
|
try:
|
|
125
131
|
j = json.loads(line)
|
bbot/core/helpers/dns/engine.py
CHANGED
|
@@ -658,7 +658,8 @@ class DNSEngine(EngineServer):
|
|
|
658
658
|
assert self.in_tests, "Can only mock when BBOT_TESTING=True"
|
|
659
659
|
if func_source is None:
|
|
660
660
|
return None
|
|
661
|
-
|
|
662
|
-
|
|
661
|
+
namespace = {}
|
|
662
|
+
exec(func_source, {}, namespace)
|
|
663
|
+
return namespace["custom_lookup"]
|
|
663
664
|
|
|
664
665
|
self.resolver = MockResolver(mock_data, custom_lookup_fn=deserialize_function(custom_lookup_fn))
|
bbot/core/helpers/ratelimiter.py
CHANGED
|
@@ -26,9 +26,15 @@ class RateLimiter:
|
|
|
26
26
|
self.log_interval = 10
|
|
27
27
|
self.current_timestamp = time.time()
|
|
28
28
|
self.count = 0
|
|
29
|
-
self.
|
|
29
|
+
self._lock = None
|
|
30
30
|
self.last_notification = None
|
|
31
31
|
|
|
32
|
+
@property
|
|
33
|
+
def lock(self):
|
|
34
|
+
if self._lock is None:
|
|
35
|
+
self._lock = asyncio.Lock()
|
|
36
|
+
return self._lock
|
|
37
|
+
|
|
32
38
|
async def __aenter__(self):
|
|
33
39
|
async with self.lock:
|
|
34
40
|
while True:
|
|
@@ -44,7 +50,7 @@ class RateLimiter:
|
|
|
44
50
|
else:
|
|
45
51
|
now = time.time()
|
|
46
52
|
if self.last_notification is None or now - self.last_notification >= self.log_interval:
|
|
47
|
-
log.verbose(f"{self.name} rate limit threshold ({self.rate*10:.1f}/s) reached")
|
|
53
|
+
log.verbose(f"{self.name} rate limit threshold ({self.rate * 10:.1f}/s) reached")
|
|
48
54
|
self.last_notification = now
|
|
49
55
|
# Rate limit for the current 0.1 second interval has been reached, wait until the next interval
|
|
50
56
|
await asyncio.sleep(self.current_timestamp + 0.1 - time.time())
|
bbot/core/helpers/regexes.py
CHANGED
|
@@ -38,9 +38,12 @@ _ip_range_regexes = (
|
|
|
38
38
|
)
|
|
39
39
|
ip_range_regexes = [re.compile(r, re.I) for r in _ip_range_regexes]
|
|
40
40
|
|
|
41
|
-
# dns names
|
|
41
|
+
# all dns names including IP addresses and bare hostnames (e.g. "localhost")
|
|
42
42
|
_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.?)+(?:[xX][nN]--)?[^\W_]{1,63}\.?"
|
|
43
|
-
|
|
43
|
+
# dns names with periods (e.g. "www.example.com")
|
|
44
|
+
_dns_name_regex_with_period = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?"
|
|
45
|
+
|
|
46
|
+
dns_name_extraction_regex = re.compile(_dns_name_regex_with_period, re.I)
|
|
44
47
|
dns_name_validation_regex = re.compile(r"^" + _dns_name_regex + r"$", re.I)
|
|
45
48
|
|
|
46
49
|
_email_regex = r"(?:[^\W_][\w\-\.\+']{,100})@" + _dns_name_regex
|
bbot/core/helpers/web/engine.py
CHANGED
|
@@ -152,7 +152,7 @@ class HTTPEngine(EngineServer):
|
|
|
152
152
|
log.verbose(
|
|
153
153
|
f"Size of response from {url} exceeds {bytes_to_human(max_size)}, file will be truncated"
|
|
154
154
|
)
|
|
155
|
-
agen.aclose()
|
|
155
|
+
await agen.aclose()
|
|
156
156
|
break
|
|
157
157
|
total_size += _chunk_size
|
|
158
158
|
chunks.append(chunk)
|
bbot/core/helpers/web/web.py
CHANGED
|
@@ -385,7 +385,7 @@ class WebHelper(EngineClient):
|
|
|
385
385
|
cookies_str = ""
|
|
386
386
|
for k, v in cookies.items():
|
|
387
387
|
cookies_str += f"{k}={v}; "
|
|
388
|
-
curl_command.append(f
|
|
388
|
+
curl_command.append(f"{cookies_str.rstrip(' ')}")
|
|
389
389
|
|
|
390
390
|
path_override = kwargs.get("path_override", None)
|
|
391
391
|
if path_override:
|
bbot/core/shared_deps.py
CHANGED
|
@@ -119,6 +119,20 @@ DEP_CHROMIUM = [
|
|
|
119
119
|
"when": "ansible_facts['os_family'] == 'Debian'",
|
|
120
120
|
"ignore_errors": True,
|
|
121
121
|
},
|
|
122
|
+
# Because Ubuntu is a special snowflake, we have to bend over backwards to fix the chrome sandbox
|
|
123
|
+
# see https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md
|
|
124
|
+
{
|
|
125
|
+
"name": "Chown chrome_sandbox to root:root",
|
|
126
|
+
"command": {"cmd": "chown -R root:root #{BBOT_TOOLS}/chrome-linux/chrome_sandbox"},
|
|
127
|
+
"when": "ansible_facts['os_family'] == 'Debian'",
|
|
128
|
+
"become": True,
|
|
129
|
+
},
|
|
130
|
+
{
|
|
131
|
+
"name": "Chmod chrome_sandbox to 4755",
|
|
132
|
+
"command": {"cmd": "chmod -R 4755 #{BBOT_TOOLS}/chrome-linux/chrome_sandbox"},
|
|
133
|
+
"when": "ansible_facts['os_family'] == 'Debian'",
|
|
134
|
+
"become": True,
|
|
135
|
+
},
|
|
122
136
|
]
|
|
123
137
|
|
|
124
138
|
DEP_MASSCAN = [
|
bbot/defaults.yml
CHANGED
|
@@ -176,6 +176,50 @@ url_extension_blacklist:
|
|
|
176
176
|
# Distribute URLs with these extensions only to httpx (these are omitted from output)
|
|
177
177
|
url_extension_httpx_only:
|
|
178
178
|
- js
|
|
179
|
+
|
|
180
|
+
# These url extensions are almost always static, so we exclude them from modules that fuzz things
|
|
181
|
+
url_extension_static:
|
|
182
|
+
- pdf
|
|
183
|
+
- doc
|
|
184
|
+
- docx
|
|
185
|
+
- xls
|
|
186
|
+
- xlsx
|
|
187
|
+
- ppt
|
|
188
|
+
- pptx
|
|
189
|
+
- txt
|
|
190
|
+
- csv
|
|
191
|
+
- xml
|
|
192
|
+
- yaml
|
|
193
|
+
- ini
|
|
194
|
+
- log
|
|
195
|
+
- conf
|
|
196
|
+
- cfg
|
|
197
|
+
- env
|
|
198
|
+
- md
|
|
199
|
+
- rtf
|
|
200
|
+
- tiff
|
|
201
|
+
- bmp
|
|
202
|
+
- jpg
|
|
203
|
+
- jpeg
|
|
204
|
+
- png
|
|
205
|
+
- gif
|
|
206
|
+
- svg
|
|
207
|
+
- ico
|
|
208
|
+
- mp3
|
|
209
|
+
- wav
|
|
210
|
+
- flac
|
|
211
|
+
- mp4
|
|
212
|
+
- mov
|
|
213
|
+
- avi
|
|
214
|
+
- mkv
|
|
215
|
+
- webm
|
|
216
|
+
- zip
|
|
217
|
+
- tar
|
|
218
|
+
- gz
|
|
219
|
+
- bz2
|
|
220
|
+
- 7z
|
|
221
|
+
- rar
|
|
222
|
+
|
|
179
223
|
# Don't output these types of events (they are still distributed to modules)
|
|
180
224
|
omit_event_types:
|
|
181
225
|
- HTTP_RESPONSE
|
bbot/modules/ajaxpro.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import regex as re
|
|
2
|
+
from urllib.parse import urlparse
|
|
2
3
|
from bbot.modules.base import BaseModule
|
|
3
4
|
|
|
4
5
|
|
|
@@ -18,41 +19,67 @@ class ajaxpro(BaseModule):
|
|
|
18
19
|
}
|
|
19
20
|
|
|
20
21
|
async def handle_event(self, event):
|
|
21
|
-
if event.type == "URL":
|
|
22
|
-
|
|
23
|
-
return False
|
|
24
|
-
for stem in ["ajax", "ajaxpro"]:
|
|
25
|
-
probe_url = f"{event.data}{stem}/whatever.ashx"
|
|
26
|
-
probe = await self.helpers.request(probe_url)
|
|
27
|
-
if probe:
|
|
28
|
-
if probe.status_code == 200:
|
|
29
|
-
probe_confirm = await self.helpers.request(f"{event.data}a/whatever.ashx")
|
|
30
|
-
if probe_confirm:
|
|
31
|
-
if probe_confirm.status_code != 200:
|
|
32
|
-
await self.emit_event(
|
|
33
|
-
{
|
|
34
|
-
"host": str(event.host),
|
|
35
|
-
"url": event.data,
|
|
36
|
-
"description": f"Ajaxpro Detected (Version Unconfirmed) Trigger: [{probe_url}]",
|
|
37
|
-
},
|
|
38
|
-
"FINDING",
|
|
39
|
-
event,
|
|
40
|
-
context="{module} discovered Ajaxpro instance ({event.type}) at {event.data}",
|
|
41
|
-
)
|
|
42
|
-
|
|
22
|
+
if event.type == "URL" and "dir" in event.tags:
|
|
23
|
+
await self.check_url_event(event)
|
|
43
24
|
elif event.type == "HTTP_RESPONSE":
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
25
|
+
await self.check_http_response_event(event)
|
|
26
|
+
|
|
27
|
+
async def check_url_event(self, event):
|
|
28
|
+
for stem in ["ajax", "ajaxpro"]:
|
|
29
|
+
probe_url = f"{event.data}{stem}/whatever.ashx"
|
|
30
|
+
probe = await self.helpers.request(probe_url)
|
|
31
|
+
if probe and probe.status_code == 200:
|
|
32
|
+
confirm_url = f"{event.data}a/whatever.ashx"
|
|
33
|
+
confirm_probe = await self.helpers.request(confirm_url)
|
|
34
|
+
if confirm_probe and confirm_probe.status_code != 200:
|
|
35
|
+
await self.emit_technology(event, probe_url)
|
|
36
|
+
await self.confirm_exploitability(probe_url, event)
|
|
37
|
+
|
|
38
|
+
async def check_http_response_event(self, event):
|
|
39
|
+
resp_body = event.data.get("body")
|
|
40
|
+
if resp_body:
|
|
41
|
+
match = await self.helpers.re.search(self.ajaxpro_regex, resp_body)
|
|
42
|
+
if match:
|
|
43
|
+
ajaxpro_path = match.group(0)
|
|
44
|
+
await self.emit_technology(event, ajaxpro_path)
|
|
45
|
+
await self.confirm_exploitability(ajaxpro_path, event)
|
|
46
|
+
|
|
47
|
+
async def emit_technology(self, event, detection_url):
|
|
48
|
+
url = event.data if event.type == "URL" else event.data["url"]
|
|
49
|
+
await self.emit_event(
|
|
50
|
+
{
|
|
51
|
+
"host": str(event.host),
|
|
52
|
+
"url": url,
|
|
53
|
+
"technology": "ajaxpro",
|
|
54
|
+
},
|
|
55
|
+
"TECHNOLOGY",
|
|
56
|
+
event,
|
|
57
|
+
context=f"{self.meta['description']} discovered Ajaxpro instance ({event.type}) at {url} with trigger {detection_url}",
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# Confirm exploitability of the detected Ajaxpro instance
|
|
61
|
+
async def confirm_exploitability(self, detection_url, event):
|
|
62
|
+
self.debug("Ajaxpro detected, attempting to confirm exploitability")
|
|
63
|
+
parsed_url = urlparse(detection_url)
|
|
64
|
+
base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
|
|
65
|
+
path = parsed_url.path.rsplit("/", 1)[0]
|
|
66
|
+
full_url = f"{base_url}{path}/AjaxPro.Services.ICartService,AjaxPro.2.ashx"
|
|
67
|
+
|
|
68
|
+
# Payload and headers defined inline
|
|
69
|
+
payload = {}
|
|
70
|
+
headers = {"X-Ajaxpro-Method": "AddItem"}
|
|
71
|
+
|
|
72
|
+
probe_response = await self.helpers.request(full_url, method="POST", headers=headers, json=payload)
|
|
73
|
+
if probe_response:
|
|
74
|
+
if "AjaxPro.Services.ICartService" and "MissingMethodException" in probe_response.text:
|
|
75
|
+
await self.emit_event(
|
|
76
|
+
{
|
|
77
|
+
"host": str(event.host),
|
|
78
|
+
"severity": "CRITICAL",
|
|
79
|
+
"url": event.data if event.type == "URL" else event.data["url"],
|
|
80
|
+
"description": f"Ajaxpro Deserialization RCE (CVE-2021-23758) Trigger: [{full_url}]",
|
|
81
|
+
},
|
|
82
|
+
"VULNERABILITY",
|
|
83
|
+
event,
|
|
84
|
+
context=f"{self.meta['description']} discovered Ajaxpro instance ({event.type}) at {detection_url}",
|
|
85
|
+
)
|
bbot/modules/baddns.py
CHANGED
|
@@ -87,10 +87,12 @@ class baddns(BaseModule):
|
|
|
87
87
|
for r in results:
|
|
88
88
|
r_dict = r.to_dict()
|
|
89
89
|
|
|
90
|
-
|
|
90
|
+
confidence = r_dict["confidence"]
|
|
91
|
+
|
|
92
|
+
if confidence in ["CONFIRMED", "PROBABLE"]:
|
|
91
93
|
data = {
|
|
92
94
|
"severity": "MEDIUM",
|
|
93
|
-
"description": f"{r_dict['description']}. Confidence: [{
|
|
95
|
+
"description": f"{r_dict['description']}. Confidence: [{confidence}] Signature: [{r_dict['signature']}] Indicator: [{r_dict['indicator']}] Trigger: [{r_dict['trigger']}] baddns Module: [{r_dict['module']}]",
|
|
94
96
|
"host": str(event.host),
|
|
95
97
|
}
|
|
96
98
|
await self.emit_event(
|
|
@@ -101,20 +103,26 @@ class baddns(BaseModule):
|
|
|
101
103
|
context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {r_dict["description"]}',
|
|
102
104
|
)
|
|
103
105
|
|
|
104
|
-
elif
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
106
|
+
elif confidence in ["UNLIKELY", "POSSIBLE"]:
|
|
107
|
+
if not self.only_high_confidence:
|
|
108
|
+
data = {
|
|
109
|
+
"description": f"{r_dict['description']} Confidence: [{confidence}] Signature: [{r_dict['signature']}] Indicator: [{r_dict['indicator']}] Trigger: [{r_dict['trigger']}] baddns Module: [{r_dict['module']}]",
|
|
110
|
+
"host": str(event.host),
|
|
111
|
+
}
|
|
112
|
+
await self.emit_event(
|
|
113
|
+
data,
|
|
114
|
+
"FINDING",
|
|
115
|
+
event,
|
|
116
|
+
tags=[f"baddns-{module_instance.name.lower()}"],
|
|
117
|
+
context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {r_dict["description"]}',
|
|
118
|
+
)
|
|
119
|
+
else:
|
|
120
|
+
self.debug(
|
|
121
|
+
f"Skipping low-confidence result due to only_high_confidence setting: {confidence}"
|
|
122
|
+
)
|
|
123
|
+
|
|
116
124
|
else:
|
|
117
|
-
self.warning(f"Got unrecognized confidence level: {
|
|
125
|
+
self.warning(f"Got unrecognized confidence level: {confidence}")
|
|
118
126
|
|
|
119
127
|
found_domains = r_dict.get("found_domains", None)
|
|
120
128
|
if found_domains:
|
bbot/modules/baddns_direct.py
CHANGED
|
@@ -49,7 +49,7 @@ class baddns_direct(BaseModule):
|
|
|
49
49
|
"direct_mode": True,
|
|
50
50
|
}
|
|
51
51
|
|
|
52
|
-
CNAME_direct_instance = CNAME_direct_module(event.host, **kwargs)
|
|
52
|
+
CNAME_direct_instance = CNAME_direct_module(str(event.host), **kwargs)
|
|
53
53
|
if await CNAME_direct_instance.dispatch():
|
|
54
54
|
results = CNAME_direct_instance.analyze()
|
|
55
55
|
if results and len(results) > 0:
|
|
@@ -78,7 +78,7 @@ class baddns_direct(BaseModule):
|
|
|
78
78
|
if event.type == "URL":
|
|
79
79
|
if event.scope_distance > 0:
|
|
80
80
|
self.debug(
|
|
81
|
-
f"Rejecting {event.host} due to not being in scope (scope distance: {
|
|
81
|
+
f"Rejecting {event.host} due to not being in scope (scope distance: {event.scope_distance})"
|
|
82
82
|
)
|
|
83
83
|
return False
|
|
84
84
|
if "cdn-cloudflare" not in event.tags:
|
bbot/modules/badsecrets.py
CHANGED
|
@@ -69,7 +69,7 @@ class badsecrets(BaseModule):
|
|
|
69
69
|
if r["type"] == "SecretFound":
|
|
70
70
|
data = {
|
|
71
71
|
"severity": r["description"]["severity"],
|
|
72
|
-
"description": f"Known Secret Found. Secret Type: [{r['description']['secret']}] Secret: [{r['secret']}] Product Type: [{r['description']['product']}] Product: [{self.helpers.truncate_string(r['product'],2000)}] Detecting Module: [{r['detecting_module']}] Details: [{r['details']}]",
|
|
72
|
+
"description": f"Known Secret Found. Secret Type: [{r['description']['secret']}] Secret: [{r['secret']}] Product Type: [{r['description']['product']}] Product: [{self.helpers.truncate_string(r['product'], 2000)}] Detecting Module: [{r['detecting_module']}] Details: [{r['details']}]",
|
|
73
73
|
"url": event.data["url"],
|
|
74
74
|
"host": str(event.host),
|
|
75
75
|
}
|
|
@@ -91,7 +91,7 @@ class badsecrets(BaseModule):
|
|
|
91
91
|
)
|
|
92
92
|
else:
|
|
93
93
|
data = {
|
|
94
|
-
"description": f"Cryptographic Product identified. Product Type: [{r['description']['product']}] Product: [{self.helpers.truncate_string(r['product'],2000)}] Detecting Module: [{r['detecting_module']}]",
|
|
94
|
+
"description": f"Cryptographic Product identified. Product Type: [{r['description']['product']}] Product: [{self.helpers.truncate_string(r['product'], 2000)}] Detecting Module: [{r['detecting_module']}]",
|
|
95
95
|
"url": event.data["url"],
|
|
96
96
|
"host": str(event.host),
|
|
97
97
|
}
|