bbot 2.4.2.6596rc0__py3-none-any.whl → 2.4.2.6611rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/modules/internal/excavate.py +53 -43
- bbot/modules/lightfuzz/submodules/serial.py +1 -1
- bbot/test/test_step_2/module_tests/base.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_excavate.py +14 -0
- {bbot-2.4.2.6596rc0.dist-info → bbot-2.4.2.6611rc0.dist-info}/METADATA +1 -1
- {bbot-2.4.2.6596rc0.dist-info → bbot-2.4.2.6611rc0.dist-info}/RECORD +10 -12
- bbot/modules/dastardly.py +0 -137
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +0 -70
- {bbot-2.4.2.6596rc0.dist-info → bbot-2.4.2.6611rc0.dist-info}/LICENSE +0 -0
- {bbot-2.4.2.6596rc0.dist-info → bbot-2.4.2.6611rc0.dist-info}/WHEEL +0 -0
- {bbot-2.4.2.6596rc0.dist-info → bbot-2.4.2.6611rc0.dist-info}/entry_points.txt +0 -0
bbot/__init__.py
CHANGED
|
@@ -5,7 +5,7 @@ import time
|
|
|
5
5
|
import inspect
|
|
6
6
|
import regex as re
|
|
7
7
|
from pathlib import Path
|
|
8
|
-
from bbot.errors import ExcavateError
|
|
8
|
+
from bbot.errors import ExcavateError, ValidationError
|
|
9
9
|
import bbot.core.helpers.regexes as bbot_regexes
|
|
10
10
|
from bbot.modules.base import BaseInterceptModule
|
|
11
11
|
from bbot.modules.internal.base import BaseInternalModule
|
|
@@ -622,14 +622,15 @@ class excavate(BaseInternalModule, BaseInterceptModule):
|
|
|
622
622
|
base_url += f"?{event.parsed_url.query}"
|
|
623
623
|
url = urljoin(base_url, endpoint)
|
|
624
624
|
|
|
625
|
+
try:
|
|
626
|
+
# Validate the URL before using it
|
|
627
|
+
parsed_url = self.excavate.helpers.validators.validate_url_parsed(url)
|
|
628
|
+
except (ValidationError, ValueError) as e:
|
|
629
|
+
self.excavate.debug(f"Invalid URL [{url}]: {e}")
|
|
630
|
+
continue
|
|
631
|
+
|
|
625
632
|
if self.excavate.helpers.validate_parameter(parameter_name, parameter_type):
|
|
626
633
|
if self.excavate.in_bl(parameter_name) is False:
|
|
627
|
-
parsed_url = urlparse(url)
|
|
628
|
-
if not parsed_url.hostname:
|
|
629
|
-
self.excavate.warning(
|
|
630
|
-
f"Error Parsing reconstructed URL [{url}] during parameter extraction, missing hostname"
|
|
631
|
-
)
|
|
632
|
-
continue
|
|
633
634
|
description = f"HTTP Extracted Parameter [{parameter_name}] ({parameterExtractorSubModule.name} Submodule)"
|
|
634
635
|
data = {
|
|
635
636
|
"host": parsed_url.hostname,
|
|
@@ -848,45 +849,51 @@ class excavate(BaseInternalModule, BaseInterceptModule):
|
|
|
848
849
|
urls_found = 0
|
|
849
850
|
final_url = ""
|
|
850
851
|
for url_str in results:
|
|
851
|
-
|
|
852
|
-
if
|
|
852
|
+
try:
|
|
853
|
+
if identifier == "url_full":
|
|
854
|
+
if not await self.helpers.re.search(self.full_url_regex, url_str):
|
|
855
|
+
self.excavate.debug(
|
|
856
|
+
f"Rejecting potential full URL [{url_str}] as did not match full_url_regex"
|
|
857
|
+
)
|
|
858
|
+
continue
|
|
859
|
+
final_url = url_str
|
|
860
|
+
self.excavate.debug(f"Discovered Full URL [{final_url}]")
|
|
861
|
+
|
|
862
|
+
elif identifier == "url_attr" and hasattr(event, "parsed_url"):
|
|
863
|
+
m = await self.helpers.re.search(self.tag_attribute_regex, url_str)
|
|
864
|
+
if not m:
|
|
865
|
+
self.excavate.debug(
|
|
866
|
+
f"Rejecting potential attribute URL [{url_str}] as did not match tag_attribute_regex"
|
|
867
|
+
)
|
|
868
|
+
continue
|
|
869
|
+
unescaped_url = html.unescape(m.group(1))
|
|
870
|
+
source_url = event.parsed_url.geturl()
|
|
871
|
+
final_url = urldefrag(urljoin(source_url, unescaped_url)).url
|
|
872
|
+
if not await self.helpers.re.search(self.full_url_regex_strict, final_url):
|
|
873
|
+
self.excavate.debug(
|
|
874
|
+
f"Rejecting reconstructed URL [{final_url}] as did not match full_url_regex_strict"
|
|
875
|
+
)
|
|
876
|
+
continue
|
|
853
877
|
self.excavate.debug(
|
|
854
|
-
f"
|
|
878
|
+
f"Reconstructed Full URL [{final_url}] from extracted relative URL [{unescaped_url}] "
|
|
855
879
|
)
|
|
856
|
-
continue
|
|
857
|
-
final_url = url_str
|
|
858
880
|
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
881
|
+
if final_url:
|
|
882
|
+
# Validate the URL before using it
|
|
883
|
+
self.excavate.helpers.validators.validate_url_parsed(final_url)
|
|
884
|
+
if self.excavate.scan.in_scope(final_url):
|
|
885
|
+
urls_found += 1
|
|
886
|
+
await self.report(
|
|
887
|
+
final_url,
|
|
888
|
+
event,
|
|
889
|
+
yara_rule_settings,
|
|
890
|
+
discovery_context,
|
|
891
|
+
event_type="URL_UNVERIFIED",
|
|
892
|
+
urls_found=urls_found,
|
|
865
893
|
)
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
final_url = urldefrag(urljoin(source_url, unescaped_url)).url
|
|
870
|
-
if not await self.helpers.re.search(self.full_url_regex_strict, final_url):
|
|
871
|
-
self.excavate.debug(
|
|
872
|
-
f"Rejecting reconstructed URL [{final_url}] as did not match full_url_regex_strict"
|
|
873
|
-
)
|
|
874
|
-
continue
|
|
875
|
-
self.excavate.debug(
|
|
876
|
-
f"Reconstructed Full URL [{final_url}] from extracted relative URL [{unescaped_url}] "
|
|
877
|
-
)
|
|
878
|
-
|
|
879
|
-
if final_url:
|
|
880
|
-
if self.excavate.scan.in_scope(final_url):
|
|
881
|
-
urls_found += 1
|
|
882
|
-
await self.report(
|
|
883
|
-
final_url,
|
|
884
|
-
event,
|
|
885
|
-
yara_rule_settings,
|
|
886
|
-
discovery_context,
|
|
887
|
-
event_type="URL_UNVERIFIED",
|
|
888
|
-
urls_found=urls_found,
|
|
889
|
-
)
|
|
894
|
+
except (ValidationError, ValueError) as e:
|
|
895
|
+
self.excavate.debug(f"Invalid URL [{url_str if not final_url else final_url}]: {e}")
|
|
896
|
+
continue
|
|
890
897
|
|
|
891
898
|
async def report_prep(self, event_data, event_type, event, tags, **kwargs):
|
|
892
899
|
event_draft = self.excavate.make_event(event_data, event_type, parent=event)
|
|
@@ -1114,7 +1121,10 @@ class excavate(BaseInternalModule, BaseInterceptModule):
|
|
|
1114
1121
|
|
|
1115
1122
|
# Check if rule processing function exists
|
|
1116
1123
|
if rule_name in self.yara_preprocess_dict:
|
|
1117
|
-
|
|
1124
|
+
try:
|
|
1125
|
+
await self.yara_preprocess_dict[rule_name](result, event, discovery_context)
|
|
1126
|
+
except ValidationError as e:
|
|
1127
|
+
self.debug(f"ValidationError in rule {rule_name} for result {result}: {e}")
|
|
1118
1128
|
else:
|
|
1119
1129
|
self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules")
|
|
1120
1130
|
|
|
@@ -22,7 +22,7 @@ class serial(BaseLightfuzz):
|
|
|
22
22
|
CONTROL_PAYLOAD_PHP_RAW = "z:0:{}"
|
|
23
23
|
|
|
24
24
|
BASE64_SERIALIZATION_PAYLOADS = {
|
|
25
|
-
"php_base64": "
|
|
25
|
+
"php_base64": "YToxOntpOjA7aToxO30=",
|
|
26
26
|
"java_base64": "rO0ABXNyABFqYXZhLmxhbmcuQm9vbGVhbs0gcoDVnPruAgABWgAFdmFsdWV4cAA=",
|
|
27
27
|
"java_base64_string_error": "rO0ABXQABHRlc3Q=",
|
|
28
28
|
"java_base64_OptionalDataException": "rO0ABXcEAAAAAAEAAAABc3IAEGphdmEudXRpbC5IYXNoTWFwAAAAAAAAAAECAAJMAARrZXkxYgABAAAAAAAAAAJ4cHcBAAAAB3QABHRlc3Q=",
|
|
@@ -91,9 +91,9 @@ class ModuleTestBase:
|
|
|
91
91
|
async def module_test(
|
|
92
92
|
self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request, caplog, capsys
|
|
93
93
|
):
|
|
94
|
-
#
|
|
94
|
+
# If a test uses docker, we can't run it in the distro tests
|
|
95
95
|
if os.getenv("BBOT_DISTRO_TESTS") and self.skip_distro_tests:
|
|
96
|
-
pytest.skip("Skipping
|
|
96
|
+
pytest.skip("Skipping test since it uses docker")
|
|
97
97
|
|
|
98
98
|
self.log.info(f"Starting {self.name} module test")
|
|
99
99
|
module_test = self.ModuleTest(
|
|
@@ -1418,3 +1418,17 @@ class TestExcavateBadURLs(ModuleTestBase):
|
|
|
1418
1418
|
|
|
1419
1419
|
url_events = [e for e in events if e.type == "URL_UNVERIFIED"]
|
|
1420
1420
|
assert sorted([e.data for e in url_events]) == sorted(["https://ssl/", "http://127.0.0.1:8888/"])
|
|
1421
|
+
|
|
1422
|
+
|
|
1423
|
+
class TestExcavateURL_InvalidPort(TestExcavate):
|
|
1424
|
+
modules_overrides = ["excavate", "httpx", "hunt"]
|
|
1425
|
+
|
|
1426
|
+
async def setup_before_prep(self, module_test):
|
|
1427
|
+
# Test URL with invalid port (greater than 65535)
|
|
1428
|
+
module_test.httpserver.expect_request("/").respond_with_data(
|
|
1429
|
+
'<div><img loading="lazy" src="https://asdffoo.test.notreal:9212952841/whatever.jpg" width="576" height="382" alt="...." /></div>'
|
|
1430
|
+
)
|
|
1431
|
+
|
|
1432
|
+
def check(self, module_test, events):
|
|
1433
|
+
# Verify we got the hostname
|
|
1434
|
+
assert any(e.data == "asdffoo.test.notreal" for e in events)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: bbot
|
|
3
|
-
Version: 2.4.2.
|
|
3
|
+
Version: 2.4.2.6611rc0
|
|
4
4
|
Summary: OSINT automation for hackers.
|
|
5
5
|
License: GPL-3.0
|
|
6
6
|
Keywords: python,cli,automation,osint,threat-intel,intelligence,neo4j,scanner,python-library,hacking,recursion,pentesting,recon,command-line-tool,bugbounty,subdomains,security-tools,subdomain-scanner,osint-framework,attack-surface,subdomain-enumeration,osint-tool
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
bbot/__init__.py,sha256=
|
|
1
|
+
bbot/__init__.py,sha256=Sj6Ww0s5mr9AjzAEu-u6gU8g5aBo7xw-OHJg-aVo95Q,163
|
|
2
2
|
bbot/cli.py,sha256=1QJbANVw9Q3GFM92H2QRV2ds5756ulm08CDZwzwPpeI,11888
|
|
3
3
|
bbot/core/__init__.py,sha256=l255GJE_DvUnWvrRb0J5lG-iMztJ8zVvoweDOfegGtI,46
|
|
4
4
|
bbot/core/config/__init__.py,sha256=zYNw2Me6tsEr8hOOkLb4BQ97GB7Kis2k--G81S8vofU,342
|
|
@@ -83,7 +83,6 @@ bbot/modules/code_repository.py,sha256=x70Z45VnNNMF8BPkHfGWZXsZXw_fStGB3y0-8jbP1
|
|
|
83
83
|
bbot/modules/credshed.py,sha256=HAF5wgRGKIIpdMAe4mIAtkZRLmFYjMFyXtjjst6RJ20,4203
|
|
84
84
|
bbot/modules/crt.py,sha256=6Zm90VKXwYYN6Sab0gwwhTARrtnQIqALJTVtFWMMTGk,1369
|
|
85
85
|
bbot/modules/crt_db.py,sha256=xaIm2457_xGJjnKss73l1HpPn7pLPHksVzejsimTfZA,2198
|
|
86
|
-
bbot/modules/dastardly.py,sha256=lLKfd00UTSXlKd2XUdbv5SYI-oUuLfQcBJ_iuXoWsbU,5325
|
|
87
86
|
bbot/modules/dehashed.py,sha256=iyzWHmJs6zC7FsRhw9_AdkckQKCf_0oNnL9RwG409r0,5071
|
|
88
87
|
bbot/modules/digitorus.py,sha256=XQY0eAQrA7yo8S57tGncP1ARud-yG4LiWxx5VBYID34,1027
|
|
89
88
|
bbot/modules/dnsbimi.py,sha256=A4cqhvhytmEEd-tY4CgFwMLbsVtMjkRY9238Aj8aVtU,6921
|
|
@@ -124,7 +123,7 @@ bbot/modules/internal/aggregate.py,sha256=HnnfTX2GYsOz8IFtfrRX1uXV6rvFx4uG9lmYJF
|
|
|
124
123
|
bbot/modules/internal/base.py,sha256=BXO4Hc7XKaAOaLzolF3krJX1KibPxtek2GTQUgnCHk0,387
|
|
125
124
|
bbot/modules/internal/cloudcheck.py,sha256=ay6MvZFbDvdhAlFPe_kEITM4wRsfRgQJf1DLBTcZ2jM,5138
|
|
126
125
|
bbot/modules/internal/dnsresolve.py,sha256=1fwWChIGpSEIIkswueiIhEwIahQ7YngZ-njFK-RIsfU,15679
|
|
127
|
-
bbot/modules/internal/excavate.py,sha256=
|
|
126
|
+
bbot/modules/internal/excavate.py,sha256=L8tGdfdvxrvfskC1Ms9UtSy-gxudnQcW7Iv5tHNAbW4,63728
|
|
128
127
|
bbot/modules/internal/speculate.py,sha256=ua35Da-f0-fnK0oXtx4DeGJAT19bfqnmLfetSUfJnIk,9262
|
|
129
128
|
bbot/modules/internal/unarchive.py,sha256=sA6KYQnhkyHq0mHwhRESHy9wkaRE43PjPkShWW0mOvM,3763
|
|
130
129
|
bbot/modules/ip2location.py,sha256=yGivX9fzvwvLpnqmYCP2a8SPjTarzrZxfRluog-nkME,2628
|
|
@@ -139,7 +138,7 @@ bbot/modules/lightfuzz/submodules/cmdi.py,sha256=-9pL7Yh7VVCObxuS6Qu2cKEJBstfk0o
|
|
|
139
138
|
bbot/modules/lightfuzz/submodules/crypto.py,sha256=mLWsMbcox9oruNjfdOaLmT7ePMH15K8JN9K5AIB8f8o,22560
|
|
140
139
|
bbot/modules/lightfuzz/submodules/nosqli.py,sha256=K0TlBtpfeBH72q01a3TCQnt9OsznA9kfRYVTe7Vmers,9399
|
|
141
140
|
bbot/modules/lightfuzz/submodules/path.py,sha256=cvfna9P5Cicmc3p3BrzlY0PG1slcvJkeMzZu4i2nwO0,7744
|
|
142
|
-
bbot/modules/lightfuzz/submodules/serial.py,sha256=
|
|
141
|
+
bbot/modules/lightfuzz/submodules/serial.py,sha256=i3TdGV7M0G5thn1SFyKrod9nrm9UPV8kN3sd2-tvmEc,8528
|
|
143
142
|
bbot/modules/lightfuzz/submodules/sqli.py,sha256=42TTB3UglMqnlxl7p2lUx14GWjbY9b6X7K9jWB5Mf9I,8486
|
|
144
143
|
bbot/modules/lightfuzz/submodules/ssti.py,sha256=Pib49rXFuf567msnlec-A1Tnvolw4aILjqn7INLWQTY,1413
|
|
145
144
|
bbot/modules/lightfuzz/submodules/xss.py,sha256=VP15TBeRjglIRjLvwmHJaOCNQOWS7R4WVAZ-VRNe198,9503
|
|
@@ -295,7 +294,7 @@ bbot/test/test_step_1/test_web.py,sha256=qzMb5v_1l6fK6SvJZoHpBI3Zb7iaHU_VnenQ8UQ
|
|
|
295
294
|
bbot/test/test_step_1/test_web_envelopes.py,sha256=28cwm_HZvdGo__uiaShO2AwTJ728FTKwpESRB418AIc,18259
|
|
296
295
|
bbot/test/test_step_2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
297
296
|
bbot/test/test_step_2/module_tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
298
|
-
bbot/test/test_step_2/module_tests/base.py,sha256=
|
|
297
|
+
bbot/test/test_step_2/module_tests/base.py,sha256=tLaO3Csb4DPv4Nuu5xjEPdYhsj70f_vZVV0voTisjyM,5942
|
|
299
298
|
bbot/test/test_step_2/module_tests/test_module_affiliates.py,sha256=d6uAzb_MF4oNGFEBG7Y6T2y0unWpf1gqNxUXRaYqOdk,673
|
|
300
299
|
bbot/test/test_step_2/module_tests/test_module_aggregate.py,sha256=hjxbMxAEFhS7W8RamBrM1t6T-tsLHq95MmQVfrYsock,487
|
|
301
300
|
bbot/test/test_step_2/module_tests/test_module_ajaxpro.py,sha256=S2pFV0TgOJ01SMHnIxcoBkGZ8SAaQVY9o32DOFoZ1u4,3857
|
|
@@ -330,7 +329,6 @@ bbot/test/test_step_2/module_tests/test_module_credshed.py,sha256=ipkCFL7YmZBLWW
|
|
|
330
329
|
bbot/test/test_step_2/module_tests/test_module_crt.py,sha256=V15tE1jcXdXJEzEEdAJvSMRWhKBFtxBBUJ_eewvV3U4,717
|
|
331
330
|
bbot/test/test_step_2/module_tests/test_module_crt_db.py,sha256=R0CvLnzhN5T7XNPVSDRYD9vSwYMTNED6w-EPGoWPVII,849
|
|
332
331
|
bbot/test/test_step_2/module_tests/test_module_csv.py,sha256=UJqMqdiPjx-UjJw10OoVMAj378wu5mWIq0v04TCljTM,579
|
|
333
|
-
bbot/test/test_step_2/module_tests/test_module_dastardly.py,sha256=jjpJD9mdCcbaJgnG63xE-J_Qqpjt9hpm_WvfHDLfTsc,2353
|
|
334
332
|
bbot/test/test_step_2/module_tests/test_module_dehashed.py,sha256=BZ0LFO4xBwDsXzsqjZCYRMTbXdwwUEma2OeJh8YQIEs,3625
|
|
335
333
|
bbot/test/test_step_2/module_tests/test_module_digitorus.py,sha256=1GwxQGny6TxHsV8Fx7cR-aaLU8ZZkcF065VM_XoG1Hs,1612
|
|
336
334
|
bbot/test/test_step_2/module_tests/test_module_discord.py,sha256=Z66fGb-kkdZTQfUh6WZiM35Ad-gDyvwxlA7mUUB2vnQ,1838
|
|
@@ -347,7 +345,7 @@ bbot/test/test_step_2/module_tests/test_module_dockerhub.py,sha256=9T8CFcFP32MOp
|
|
|
347
345
|
bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py,sha256=Q7M3hrbEwOuORZXPS-pIGFTRzB2-g4cEvGtsEcTp7t8,8049
|
|
348
346
|
bbot/test/test_step_2/module_tests/test_module_emailformat.py,sha256=cKxBPnEQ4AiRKV_-hSYEE6756ypst3hi6MN0L5RTukY,461
|
|
349
347
|
bbot/test/test_step_2/module_tests/test_module_emails.py,sha256=bZjtO8N3GG2_g6SUEYprAFLcsi7SlwNPJJ0nODfrWYU,944
|
|
350
|
-
bbot/test/test_step_2/module_tests/test_module_excavate.py,sha256=
|
|
348
|
+
bbot/test/test_step_2/module_tests/test_module_excavate.py,sha256=hoVQnZYb_tI1FlxXRsPaGhk1qj8hyu1GgBEb7ByE3Q0,61336
|
|
351
349
|
bbot/test/test_step_2/module_tests/test_module_extractous.py,sha256=PuTE5rkEIFPwU9lhCYpTgNSkrVjcXm8PClbfOkfRS84,17973
|
|
352
350
|
bbot/test/test_step_2/module_tests/test_module_ffuf.py,sha256=z8ihAM1WYss7QGXIjbi67cekg8iOemDjaM8YR9_qSEs,4100
|
|
353
351
|
bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py,sha256=0-a9J-gq8bUtmxl_-QPVidwZ9KkCvgvoG30Ot3a8lqM,8406
|
|
@@ -450,8 +448,8 @@ bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt,sha256=ZSIVebs7ptMvHx
|
|
|
450
448
|
bbot/wordlists/top_open_ports_nmap.txt,sha256=LmdFYkfapSxn1pVuQC2LkOIY2hMLgG-Xts7DVtYzweM,42727
|
|
451
449
|
bbot/wordlists/valid_url_schemes.txt,sha256=0B_VAr9Dv7aYhwi6JSBDU-3M76vNtzN0qEC_RNLo7HE,3310
|
|
452
450
|
bbot/wordlists/wordninja_dns.txt.gz,sha256=DYHvvfW0TvzrVwyprqODAk4tGOxv5ezNmCPSdPuDUnQ,570241
|
|
453
|
-
bbot-2.4.2.
|
|
454
|
-
bbot-2.4.2.
|
|
455
|
-
bbot-2.4.2.
|
|
456
|
-
bbot-2.4.2.
|
|
457
|
-
bbot-2.4.2.
|
|
451
|
+
bbot-2.4.2.6611rc0.dist-info/LICENSE,sha256=GzeCzK17hhQQDNow0_r0L8OfLpeTKQjFQwBQU7ZUymg,32473
|
|
452
|
+
bbot-2.4.2.6611rc0.dist-info/METADATA,sha256=LDoYczD9e_LGiKrrwmi9WHdJDFoT_v57p5oNaJAFfIc,18308
|
|
453
|
+
bbot-2.4.2.6611rc0.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
454
|
+
bbot-2.4.2.6611rc0.dist-info/entry_points.txt,sha256=cWjvcU_lLrzzJgjcjF7yeGuRA_eDS8pQ-kmPUAyOBfo,38
|
|
455
|
+
bbot-2.4.2.6611rc0.dist-info/RECORD,,
|
bbot/modules/dastardly.py
DELETED
|
@@ -1,137 +0,0 @@
|
|
|
1
|
-
from lxml import etree
|
|
2
|
-
from bbot.modules.base import BaseModule
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
class dastardly(BaseModule):
|
|
6
|
-
watched_events = ["HTTP_RESPONSE"]
|
|
7
|
-
produced_events = ["FINDING", "VULNERABILITY"]
|
|
8
|
-
flags = ["active", "aggressive", "slow", "web-thorough", "deadly"]
|
|
9
|
-
meta = {
|
|
10
|
-
"description": "Lightweight web application security scanner",
|
|
11
|
-
"created_date": "2023-12-11",
|
|
12
|
-
"author": "@domwhewell-sage",
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
deps_pip = ["lxml~=5.3.0"]
|
|
16
|
-
deps_common = ["docker"]
|
|
17
|
-
per_hostport_only = True
|
|
18
|
-
|
|
19
|
-
default_discovery_context = "{module} performed a light web scan against {event.parent.data['url']} and discovered {event.data['description']} at {event.data['url']}"
|
|
20
|
-
|
|
21
|
-
async def setup(self):
|
|
22
|
-
await self.run_process("systemctl", "start", "docker", sudo=True)
|
|
23
|
-
await self.run_process("docker", "pull", "public.ecr.aws/portswigger/dastardly:latest", sudo=True)
|
|
24
|
-
self.output_dir = self.scan.home / "dastardly"
|
|
25
|
-
self.helpers.mkdir(self.output_dir)
|
|
26
|
-
return True
|
|
27
|
-
|
|
28
|
-
async def filter_event(self, event):
|
|
29
|
-
# Reject redirects. This helps to avoid scanning the same site twice.
|
|
30
|
-
is_redirect = str(event.data["status_code"]).startswith("30")
|
|
31
|
-
if is_redirect:
|
|
32
|
-
return False, "URL is a redirect"
|
|
33
|
-
return True
|
|
34
|
-
|
|
35
|
-
async def handle_event(self, event):
|
|
36
|
-
host = event.parsed_url._replace(path="/").geturl()
|
|
37
|
-
self.verbose(f"Running Dastardly scan against {host}")
|
|
38
|
-
command, output_file = self.construct_command(host)
|
|
39
|
-
finished_proc = await self.run_process(command, sudo=True)
|
|
40
|
-
self.debug(f"dastardly stdout: {getattr(finished_proc, 'stdout', '')}")
|
|
41
|
-
self.debug(f"dastardly stderr: {getattr(finished_proc, 'stderr', '')}")
|
|
42
|
-
for testsuite in self.parse_dastardly_xml(output_file):
|
|
43
|
-
url = testsuite.endpoint
|
|
44
|
-
for testcase in testsuite.testcases:
|
|
45
|
-
for failure in testcase.failures:
|
|
46
|
-
if failure.severity == "Info":
|
|
47
|
-
await self.emit_event(
|
|
48
|
-
{
|
|
49
|
-
"host": str(event.host),
|
|
50
|
-
"url": url,
|
|
51
|
-
"description": failure.instance,
|
|
52
|
-
},
|
|
53
|
-
"FINDING",
|
|
54
|
-
event,
|
|
55
|
-
context=f"{{module}} executed web scan against {host} and identified {{event.type}}: {failure.instance}",
|
|
56
|
-
)
|
|
57
|
-
else:
|
|
58
|
-
await self.emit_event(
|
|
59
|
-
{
|
|
60
|
-
"severity": failure.severity,
|
|
61
|
-
"host": str(event.host),
|
|
62
|
-
"url": url,
|
|
63
|
-
"description": failure.instance,
|
|
64
|
-
},
|
|
65
|
-
"VULNERABILITY",
|
|
66
|
-
event,
|
|
67
|
-
context=f"{{module}} executed web scan against {host} and identified {failure.severity.lower()} {{event.type}}: {failure.instance}",
|
|
68
|
-
)
|
|
69
|
-
|
|
70
|
-
def construct_command(self, target):
|
|
71
|
-
date_time = self.helpers.make_date()
|
|
72
|
-
file_name = self.helpers.tagify(target)
|
|
73
|
-
temp_path = self.output_dir / f"{date_time}_{file_name}.xml"
|
|
74
|
-
command = [
|
|
75
|
-
"docker",
|
|
76
|
-
"run",
|
|
77
|
-
"--user",
|
|
78
|
-
"0",
|
|
79
|
-
"--rm",
|
|
80
|
-
"-v",
|
|
81
|
-
f"{self.output_dir}:/dastardly",
|
|
82
|
-
"-e",
|
|
83
|
-
f"BURP_START_URL={target}",
|
|
84
|
-
"-e",
|
|
85
|
-
f"BURP_REPORT_FILE_PATH=/dastardly/{temp_path.name}",
|
|
86
|
-
"public.ecr.aws/portswigger/dastardly:latest",
|
|
87
|
-
]
|
|
88
|
-
return command, temp_path
|
|
89
|
-
|
|
90
|
-
def parse_dastardly_xml(self, xml_file):
|
|
91
|
-
try:
|
|
92
|
-
with open(xml_file, "rb") as f:
|
|
93
|
-
et = etree.parse(f, parser=etree.XMLParser(recover=True, resolve_entities=False))
|
|
94
|
-
for testsuite in et.iter("testsuite"):
|
|
95
|
-
yield TestSuite(testsuite)
|
|
96
|
-
except FileNotFoundError:
|
|
97
|
-
self.debug(f"Could not find Dastardly XML file at {xml_file}")
|
|
98
|
-
except OSError as e:
|
|
99
|
-
self.verbose(f"Error opening Dastardly XML file at {xml_file}: {e}")
|
|
100
|
-
except etree.ParseError as e:
|
|
101
|
-
self.warning(f"Error parsing Dastardly XML at {xml_file}: {e}")
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
class Failure:
|
|
105
|
-
def __init__(self, xml):
|
|
106
|
-
self.etree = xml
|
|
107
|
-
|
|
108
|
-
# instance information
|
|
109
|
-
self.instance = self.etree.attrib.get("message", "")
|
|
110
|
-
self.severity = self.etree.attrib.get("type", "")
|
|
111
|
-
self.text = self.etree.text
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
class TestCase:
|
|
115
|
-
def __init__(self, xml):
|
|
116
|
-
self.etree = xml
|
|
117
|
-
|
|
118
|
-
# title information
|
|
119
|
-
self.title = self.etree.attrib.get("name", "")
|
|
120
|
-
|
|
121
|
-
# findings / failures(as dastardly names them)
|
|
122
|
-
self.failures = []
|
|
123
|
-
for failure in self.etree.findall("failure"):
|
|
124
|
-
self.failures.append(Failure(failure))
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
class TestSuite:
|
|
128
|
-
def __init__(self, xml):
|
|
129
|
-
self.etree = xml
|
|
130
|
-
|
|
131
|
-
# endpoint information
|
|
132
|
-
self.endpoint = self.etree.attrib.get("name", "")
|
|
133
|
-
|
|
134
|
-
# test cases
|
|
135
|
-
self.testcases = []
|
|
136
|
-
for testcase in self.etree.findall("testcase"):
|
|
137
|
-
self.testcases.append(TestCase(testcase))
|
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
from werkzeug import Response
|
|
3
|
-
|
|
4
|
-
from .base import ModuleTestBase
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
class TestDastardly(ModuleTestBase):
|
|
8
|
-
targets = ["http://127.0.0.1:5556/"]
|
|
9
|
-
modules_overrides = ["httpx", "dastardly"]
|
|
10
|
-
skip_distro_tests = True
|
|
11
|
-
|
|
12
|
-
web_response = """<!DOCTYPE html>
|
|
13
|
-
<html>
|
|
14
|
-
<body>
|
|
15
|
-
<a href="/test?test=yes">visit this<a/>
|
|
16
|
-
</body>
|
|
17
|
-
</html>"""
|
|
18
|
-
|
|
19
|
-
def xss_handler(self, request):
|
|
20
|
-
response = f"""<!DOCTYPE html>
|
|
21
|
-
<html>
|
|
22
|
-
<head>
|
|
23
|
-
<title>Email Form</title>
|
|
24
|
-
</head>
|
|
25
|
-
<body>
|
|
26
|
-
{request.args.get("test", "")}
|
|
27
|
-
</body>
|
|
28
|
-
</html>"""
|
|
29
|
-
return Response(response, content_type="text/html")
|
|
30
|
-
|
|
31
|
-
async def get_docker_ip(self, module_test):
|
|
32
|
-
docker_ip = "172.17.0.1"
|
|
33
|
-
try:
|
|
34
|
-
ip_output = await module_test.scan.helpers.run(["ip", "-j", "-4", "a", "show", "dev", "docker0"])
|
|
35
|
-
interface_json = json.loads(ip_output.stdout)
|
|
36
|
-
docker_ip = interface_json[0]["addr_info"][0]["local"]
|
|
37
|
-
except Exception:
|
|
38
|
-
pass
|
|
39
|
-
return docker_ip
|
|
40
|
-
|
|
41
|
-
async def setup_after_prep(self, module_test):
|
|
42
|
-
httpserver = module_test.request_fixture.getfixturevalue("bbot_httpserver_allinterfaces")
|
|
43
|
-
httpserver.expect_request("/").respond_with_data(self.web_response)
|
|
44
|
-
httpserver.expect_request("/test").respond_with_handler(self.xss_handler)
|
|
45
|
-
|
|
46
|
-
# get docker IP
|
|
47
|
-
docker_ip = await self.get_docker_ip(module_test)
|
|
48
|
-
module_test.scan.target.seeds.add(docker_ip)
|
|
49
|
-
|
|
50
|
-
# replace 127.0.0.1 with docker host IP to allow dastardly access to local http server
|
|
51
|
-
old_filter_event = module_test.module.filter_event
|
|
52
|
-
|
|
53
|
-
def new_filter_event(event):
|
|
54
|
-
self.new_url = f"http://{docker_ip}:5556/"
|
|
55
|
-
event.data["url"] = self.new_url
|
|
56
|
-
event.parsed_url = module_test.scan.helpers.urlparse(self.new_url)
|
|
57
|
-
return old_filter_event(event)
|
|
58
|
-
|
|
59
|
-
module_test.monkeypatch.setattr(module_test.module, "filter_event", new_filter_event)
|
|
60
|
-
|
|
61
|
-
def check(self, module_test, events):
|
|
62
|
-
assert 1 == len(
|
|
63
|
-
[
|
|
64
|
-
e
|
|
65
|
-
for e in events
|
|
66
|
-
if e.type == "VULNERABILITY"
|
|
67
|
-
and f"{self.new_url}test" in e.data["description"]
|
|
68
|
-
and "Cross-site scripting".lower() in e.data["description"].lower()
|
|
69
|
-
]
|
|
70
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|