bbot 2.4.2.6608rc0__py3-none-any.whl → 2.4.2.6615rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

bbot/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # version placeholder (replaced by poetry-dynamic-versioning)
2
- __version__ = "v2.4.2.6608rc"
2
+ __version__ = "v2.4.2.6615rc"
3
3
 
4
4
  from .scanner import Scanner, Preset
5
5
 
bbot/modules/dehashed.py CHANGED
@@ -13,23 +13,23 @@ class dehashed(subdomain_enum):
13
13
  "author": "@SpamFaux",
14
14
  "auth_required": True,
15
15
  }
16
- options = {"username": "", "api_key": ""}
17
- options_desc = {"username": "Email Address associated with your API key", "api_key": "DeHashed API Key"}
16
+ options = {"api_key": ""}
17
+ options_desc = {"api_key": "DeHashed API Key"}
18
18
  target_only = True
19
19
 
20
- base_url = "https://api.dehashed.com/search"
20
+ base_url = "https://api.dehashed.com/v2/search"
21
21
 
22
22
  async def setup(self):
23
- self.username = self.config.get("username", "")
24
23
  self.api_key = self.config.get("api_key", "")
25
- self.auth = (self.username, self.api_key)
26
24
  self.headers = {
27
25
  "Accept": "application/json",
26
+ "Content-Type": "application/json",
27
+ "Dehashed-Api-Key": self.api_key,
28
28
  }
29
29
 
30
30
  # soft-fail if we don't have the necessary information to make queries
31
- if not (self.username and self.api_key):
32
- return None, "No username / API key set"
31
+ if not self.api_key:
32
+ return None, "No API key set"
33
33
 
34
34
  return await super().setup()
35
35
 
@@ -38,29 +38,31 @@ class dehashed(subdomain_enum):
38
38
  async for entries in self.query(query):
39
39
  for entry in entries:
40
40
  # we have to clean up the email field because dehashed does a poor job of it
41
- email_str = entry.get("email", "").replace("\\", "")
42
- found_emails = list(await self.helpers.re.extract_emails(email_str))
43
- if not found_emails:
44
- self.debug(f"Invalid email from dehashed.com: {email_str}")
45
- continue
46
- email = found_emails[0]
41
+ emails = []
42
+ for email in entry.get("email", []):
43
+ email_str = email.replace("\\", "")
44
+ found_emails = list(await self.helpers.re.extract_emails(email_str))
45
+ if not found_emails:
46
+ self.debug(f"Invalid email from dehashed.com: {email_str}")
47
+ continue
48
+ emails += found_emails
47
49
 
48
- user = entry.get("username", "")
49
- pw = entry.get("password", "")
50
- h_pw = entry.get("hashed_password", "")
50
+ users = entry.get("username", [])
51
+ pws = entry.get("password", [])
52
+ h_pws = entry.get("hashed_password", [])
51
53
  db_name = entry.get("database_name", "")
52
54
 
53
55
  tags = []
54
56
  if db_name:
55
57
  tags = [f"db-{db_name}"]
56
- if email:
58
+ for email in emails:
57
59
  email_event = self.make_event(email, "EMAIL_ADDRESS", parent=event, tags=tags)
58
60
  if email_event is not None:
59
61
  await self.emit_event(
60
62
  email_event,
61
63
  context=f'{{module}} searched API for "{query}" and found {{event.type}}: {{event.data}}',
62
64
  )
63
- if user:
65
+ for user in users:
64
66
  await self.emit_event(
65
67
  f"{email}:{user}",
66
68
  "USERNAME",
@@ -68,7 +70,7 @@ class dehashed(subdomain_enum):
68
70
  tags=tags,
69
71
  context=f"{{module}} found {email} with {{event.type}}: {{event.data}}",
70
72
  )
71
- if pw:
73
+ for pw in pws:
72
74
  await self.emit_event(
73
75
  f"{email}:{pw}",
74
76
  "PASSWORD",
@@ -76,7 +78,7 @@ class dehashed(subdomain_enum):
76
78
  tags=tags,
77
79
  context=f"{{module}} found {email} with {{event.type}}: {{event.data}}",
78
80
  )
79
- if h_pw:
81
+ for h_pw in h_pws:
80
82
  await self.emit_event(
81
83
  f"{email}:{h_pw}",
82
84
  "HASHED_PASSWORD",
@@ -86,30 +88,33 @@ class dehashed(subdomain_enum):
86
88
  )
87
89
 
88
90
  async def query(self, domain):
89
- query = f"domain:{domain}"
90
- url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}"
91
- page = 0
92
- num_entries = 0
93
- agen = self.api_page_iter(url=url, auth=self.auth, headers=self.headers, _json=False)
91
+ url = self.base_url
92
+ json = {
93
+ "query": "",
94
+ "page": 1,
95
+ "size": 10000, # The maximum permitted size and pagination.
96
+ }
97
+ json["query"] = f"domain:{domain}"
98
+ json["page"] = 1
99
+ max_pages = 1
100
+ agen = self.api_page_iter(url=url, headers=self.headers, _json=False, method="POST", json=json)
94
101
  async for result in agen:
95
102
  result_json = {}
96
103
  with suppress(Exception):
97
104
  result_json = result.json()
98
105
  total = result_json.get("total", 0)
99
106
  entries = result_json.get("entries", [])
100
- if entries is None:
101
- entries = []
102
- num_entries += len(entries)
103
- page += 1
104
- if (page >= 3) or (not entries):
105
- if result is not None and result.status_code != 200:
106
- self.warning(
107
- f"Error retrieving results from dehashed.com (status code {result.status_code}): {result.text}"
108
- )
109
- elif (page >= 3) and (total > num_entries):
110
- self.info(
111
- f"{domain} has {total:,} results in Dehashed. The API can only process the first 30,000 results. Please check dehashed.com to get the remaining results."
112
- )
107
+ json["page"] += 1
108
+ if result is not None and result.status_code != 200:
109
+ self.warning(
110
+ f"Error retrieving results from dehashed.com (status code {result.status_code}): {result.text}"
111
+ )
112
+ elif (json["page"] > max_pages) and (total > (json["size"] * max_pages)):
113
+ self.info(
114
+ f"{domain} has {total:,} results in Dehashed. The API can only process the first 10,000 results. Please check dehashed.com to get the remaining results."
115
+ )
116
+ if entries:
117
+ yield entries
118
+ if not entries or json["page"] > max_pages:
113
119
  await agen.aclose()
114
120
  break
115
- yield entries
@@ -91,9 +91,9 @@ class ModuleTestBase:
91
91
  async def module_test(
92
92
  self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request, caplog, capsys
93
93
  ):
94
- # Skip dastardly test if we're in the distro tests (because dastardly uses docker)
94
+ # If a test uses docker, we can't run it in the distro tests
95
95
  if os.getenv("BBOT_DISTRO_TESTS") and self.skip_distro_tests:
96
- pytest.skip("Skipping module_test for dastardly module due to BBOT_DISTRO_TESTS environment variable")
96
+ pytest.skip("Skipping test since it uses docker")
97
97
 
98
98
  self.log.info(f"Starting {self.name} module test")
99
99
  module_test = self.ModuleTest(
@@ -1,52 +1,44 @@
1
1
  from .base import ModuleTestBase
2
2
 
3
- dehashed_domain_response = {
4
- "balance": 10000,
5
- "entries": [
6
- {
7
- "id": "4363462346",
8
- "email": "bob@blacklanternsecurity.com",
9
- "ip_address": "",
10
- "username": "bob@bob.com",
11
- "password": "",
12
- "hashed_password": "$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve",
13
- "name": "Bob Smith",
14
- "vin": "",
15
- "address": "",
16
- "phone": "+91283423839",
17
- "database_name": "eatstreet",
18
- },
19
- {
20
- "id": "234623453454",
21
- "email": "tim@blacklanternsecurity.com",
22
- "ip_address": "",
23
- "username": "timmy",
24
- "password": "TimTamSlam69",
25
- "hashed_password": "",
26
- "name": "Tim Tam",
27
- "vin": "",
28
- "address": "",
29
- "phone": "+123455667",
30
- "database_name": "eatstreet",
31
- },
32
- ],
33
- "success": True,
34
- "took": "61µs",
35
- "total": 2,
36
- }
37
-
38
3
 
39
4
  class TestDehashed(ModuleTestBase):
40
5
  modules_overrides = ["dehashed", "speculate"]
41
6
  config_overrides = {
42
7
  "scope": {"report_distance": 2},
43
- "modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}},
8
+ "modules": {"dehashed": {"api_key": "deadbeef"}},
44
9
  }
45
10
 
46
11
  async def setup_before_prep(self, module_test):
47
12
  module_test.httpx_mock.add_response(
48
- url="https://api.dehashed.com/search?query=domain:blacklanternsecurity.com&size=10000&page=1",
49
- json=dehashed_domain_response,
13
+ url="https://api.dehashed.com/v2/search",
14
+ method="POST",
15
+ json={
16
+ "balance": 10000,
17
+ "entries": [
18
+ {
19
+ "id": "4363462346",
20
+ "email": ["bob@blacklanternsecurity.com"],
21
+ "ip_address": ["127.0.0.9"],
22
+ "username": ["bob@bob.com"],
23
+ "hashed_password": ["$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve"],
24
+ "name": ["Bob Smith"],
25
+ "phone": ["+91283423839"],
26
+ "database_name": "eatstreet",
27
+ "raw_record": {"le_only": True, "unstructured": True},
28
+ },
29
+ {
30
+ "id": "234623453454",
31
+ "email": ["tim@blacklanternsecurity.com"],
32
+ "username": ["timmy"],
33
+ "password": ["TimTamSlam69"],
34
+ "name": "Tim Tam",
35
+ "phone": ["+123455667"],
36
+ "database_name": "eatstreet",
37
+ },
38
+ ],
39
+ "took": "61ms",
40
+ "total": 2,
41
+ },
50
42
  )
51
43
  await module_test.mock_dns(
52
44
  {
@@ -100,3 +92,68 @@ class TestDehashed(ModuleTestBase):
100
92
  [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"]
101
93
  )
102
94
  assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:timmy"])
95
+
96
+
97
+ class TestDehashedBadEmail(TestDehashed):
98
+ async def setup_before_prep(self, module_test):
99
+ module_test.httpx_mock.add_response(
100
+ url="https://api.dehashed.com/v2/search",
101
+ method="POST",
102
+ json={
103
+ "balance": 10000,
104
+ "entries": [
105
+ {
106
+ "id": "EZxg4Lz-INLUt6uRXZaV",
107
+ "email": ["foo.example.com"],
108
+ "database_name": "Collections",
109
+ },
110
+ ],
111
+ "took": "41ms",
112
+ "total": 1,
113
+ },
114
+ )
115
+
116
+ def check(self, module_test, events):
117
+ debug_log_content = open(module_test.scan.home / "debug.log").read()
118
+ assert "Invalid email from dehashed.com: foo.example.com" in debug_log_content
119
+
120
+
121
+ class TestDehashedHTTPError(TestDehashed):
122
+ async def setup_before_prep(self, module_test):
123
+ module_test.httpx_mock.add_response(
124
+ url="https://api.dehashed.com/v2/search",
125
+ method="POST",
126
+ json={"error": "issue with request body"},
127
+ status_code=400,
128
+ )
129
+
130
+ def check(self, module_test, events):
131
+ scan_log_content = open(module_test.scan.home / "scan.log").read()
132
+ assert (
133
+ 'Error retrieving results from dehashed.com (status code 400): {"error":"issue with request body"}'
134
+ in scan_log_content
135
+ )
136
+
137
+
138
+ class TestDehashedTooManyResults(TestDehashed):
139
+ async def setup_before_prep(self, module_test):
140
+ module_test.httpx_mock.add_response(
141
+ url="https://api.dehashed.com/v2/search",
142
+ method="POST",
143
+ json={
144
+ "balance": 10000,
145
+ "entries": [
146
+ {
147
+ "id": "VXhNxj46SGsW4Lworh-G",
148
+ "email": ["bob@bob.com"],
149
+ "database_name": "Collections",
150
+ },
151
+ ],
152
+ "took": "40ms",
153
+ "total": 10001,
154
+ },
155
+ )
156
+
157
+ def check(self, module_test, events):
158
+ scan_log_content = open(module_test.scan.home / "scan.log").read()
159
+ assert "has 10,001 results in Dehashed. The API can only process the first 10,000 results." in scan_log_content
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bbot
3
- Version: 2.4.2.6608rc0
3
+ Version: 2.4.2.6615rc0
4
4
  Summary: OSINT automation for hackers.
5
5
  License: GPL-3.0
6
6
  Keywords: python,cli,automation,osint,threat-intel,intelligence,neo4j,scanner,python-library,hacking,recursion,pentesting,recon,command-line-tool,bugbounty,subdomains,security-tools,subdomain-scanner,osint-framework,attack-surface,subdomain-enumeration,osint-tool
@@ -1,4 +1,4 @@
1
- bbot/__init__.py,sha256=f4ki_KyS_oJVQYdFblwwJ7F8FE7Xj0I-rDoOOdUzd9E,163
1
+ bbot/__init__.py,sha256=WtQlPfgPAVNvoBoKj72lcIDazIAJx93c82MqqeYuhNU,163
2
2
  bbot/cli.py,sha256=1QJbANVw9Q3GFM92H2QRV2ds5756ulm08CDZwzwPpeI,11888
3
3
  bbot/core/__init__.py,sha256=l255GJE_DvUnWvrRb0J5lG-iMztJ8zVvoweDOfegGtI,46
4
4
  bbot/core/config/__init__.py,sha256=zYNw2Me6tsEr8hOOkLb4BQ97GB7Kis2k--G81S8vofU,342
@@ -83,8 +83,7 @@ bbot/modules/code_repository.py,sha256=x70Z45VnNNMF8BPkHfGWZXsZXw_fStGB3y0-8jbP1
83
83
  bbot/modules/credshed.py,sha256=HAF5wgRGKIIpdMAe4mIAtkZRLmFYjMFyXtjjst6RJ20,4203
84
84
  bbot/modules/crt.py,sha256=6Zm90VKXwYYN6Sab0gwwhTARrtnQIqALJTVtFWMMTGk,1369
85
85
  bbot/modules/crt_db.py,sha256=xaIm2457_xGJjnKss73l1HpPn7pLPHksVzejsimTfZA,2198
86
- bbot/modules/dastardly.py,sha256=lLKfd00UTSXlKd2XUdbv5SYI-oUuLfQcBJ_iuXoWsbU,5325
87
- bbot/modules/dehashed.py,sha256=iyzWHmJs6zC7FsRhw9_AdkckQKCf_0oNnL9RwG409r0,5071
86
+ bbot/modules/dehashed.py,sha256=0lzcqMEgwRmprwurZ2-8Y8aOO4KTueJgpY_vh0DWQwA,5155
88
87
  bbot/modules/digitorus.py,sha256=XQY0eAQrA7yo8S57tGncP1ARud-yG4LiWxx5VBYID34,1027
89
88
  bbot/modules/dnsbimi.py,sha256=A4cqhvhytmEEd-tY4CgFwMLbsVtMjkRY9238Aj8aVtU,6921
90
89
  bbot/modules/dnsbrute.py,sha256=Y2bSbG2IcwIJID1FSQ6Qe9fdpWwG7GIO-wVQw7MdQFM,2439
@@ -295,7 +294,7 @@ bbot/test/test_step_1/test_web.py,sha256=qzMb5v_1l6fK6SvJZoHpBI3Zb7iaHU_VnenQ8UQ
295
294
  bbot/test/test_step_1/test_web_envelopes.py,sha256=28cwm_HZvdGo__uiaShO2AwTJ728FTKwpESRB418AIc,18259
296
295
  bbot/test/test_step_2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
297
296
  bbot/test/test_step_2/module_tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
298
- bbot/test/test_step_2/module_tests/base.py,sha256=ZlmcRyiui3FrNIlErxDL6LjR1xg2pomyMVzmbROenq0,6017
297
+ bbot/test/test_step_2/module_tests/base.py,sha256=tLaO3Csb4DPv4Nuu5xjEPdYhsj70f_vZVV0voTisjyM,5942
299
298
  bbot/test/test_step_2/module_tests/test_module_affiliates.py,sha256=d6uAzb_MF4oNGFEBG7Y6T2y0unWpf1gqNxUXRaYqOdk,673
300
299
  bbot/test/test_step_2/module_tests/test_module_aggregate.py,sha256=hjxbMxAEFhS7W8RamBrM1t6T-tsLHq95MmQVfrYsock,487
301
300
  bbot/test/test_step_2/module_tests/test_module_ajaxpro.py,sha256=S2pFV0TgOJ01SMHnIxcoBkGZ8SAaQVY9o32DOFoZ1u4,3857
@@ -330,8 +329,7 @@ bbot/test/test_step_2/module_tests/test_module_credshed.py,sha256=ipkCFL7YmZBLWW
330
329
  bbot/test/test_step_2/module_tests/test_module_crt.py,sha256=V15tE1jcXdXJEzEEdAJvSMRWhKBFtxBBUJ_eewvV3U4,717
331
330
  bbot/test/test_step_2/module_tests/test_module_crt_db.py,sha256=R0CvLnzhN5T7XNPVSDRYD9vSwYMTNED6w-EPGoWPVII,849
332
331
  bbot/test/test_step_2/module_tests/test_module_csv.py,sha256=UJqMqdiPjx-UjJw10OoVMAj378wu5mWIq0v04TCljTM,579
333
- bbot/test/test_step_2/module_tests/test_module_dastardly.py,sha256=jjpJD9mdCcbaJgnG63xE-J_Qqpjt9hpm_WvfHDLfTsc,2353
334
- bbot/test/test_step_2/module_tests/test_module_dehashed.py,sha256=BZ0LFO4xBwDsXzsqjZCYRMTbXdwwUEma2OeJh8YQIEs,3625
332
+ bbot/test/test_step_2/module_tests/test_module_dehashed.py,sha256=Oi2McmPf873xWWUahYNh-CquDfLFamATyODjAZL33vU,6009
335
333
  bbot/test/test_step_2/module_tests/test_module_digitorus.py,sha256=1GwxQGny6TxHsV8Fx7cR-aaLU8ZZkcF065VM_XoG1Hs,1612
336
334
  bbot/test/test_step_2/module_tests/test_module_discord.py,sha256=Z66fGb-kkdZTQfUh6WZiM35Ad-gDyvwxlA7mUUB2vnQ,1838
337
335
  bbot/test/test_step_2/module_tests/test_module_dnsbimi.py,sha256=nAhNAuH2hS7r4KYzhuPMocae1ifoIRMESYio8L577lg,4393
@@ -450,8 +448,8 @@ bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt,sha256=ZSIVebs7ptMvHx
450
448
  bbot/wordlists/top_open_ports_nmap.txt,sha256=LmdFYkfapSxn1pVuQC2LkOIY2hMLgG-Xts7DVtYzweM,42727
451
449
  bbot/wordlists/valid_url_schemes.txt,sha256=0B_VAr9Dv7aYhwi6JSBDU-3M76vNtzN0qEC_RNLo7HE,3310
452
450
  bbot/wordlists/wordninja_dns.txt.gz,sha256=DYHvvfW0TvzrVwyprqODAk4tGOxv5ezNmCPSdPuDUnQ,570241
453
- bbot-2.4.2.6608rc0.dist-info/LICENSE,sha256=GzeCzK17hhQQDNow0_r0L8OfLpeTKQjFQwBQU7ZUymg,32473
454
- bbot-2.4.2.6608rc0.dist-info/METADATA,sha256=ZB2AKLHmK3TscBywNyHDAqDpd25QDc90iPu5X_NDksI,18308
455
- bbot-2.4.2.6608rc0.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
456
- bbot-2.4.2.6608rc0.dist-info/entry_points.txt,sha256=cWjvcU_lLrzzJgjcjF7yeGuRA_eDS8pQ-kmPUAyOBfo,38
457
- bbot-2.4.2.6608rc0.dist-info/RECORD,,
451
+ bbot-2.4.2.6615rc0.dist-info/LICENSE,sha256=GzeCzK17hhQQDNow0_r0L8OfLpeTKQjFQwBQU7ZUymg,32473
452
+ bbot-2.4.2.6615rc0.dist-info/METADATA,sha256=zMFFr-b2zwl7kcxtDOUqz8TPuk0-KGGywvBGXnFJL90,18308
453
+ bbot-2.4.2.6615rc0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
454
+ bbot-2.4.2.6615rc0.dist-info/entry_points.txt,sha256=cWjvcU_lLrzzJgjcjF7yeGuRA_eDS8pQ-kmPUAyOBfo,38
455
+ bbot-2.4.2.6615rc0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.1.2
2
+ Generator: poetry-core 2.1.3
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
bbot/modules/dastardly.py DELETED
@@ -1,137 +0,0 @@
1
- from lxml import etree
2
- from bbot.modules.base import BaseModule
3
-
4
-
5
- class dastardly(BaseModule):
6
- watched_events = ["HTTP_RESPONSE"]
7
- produced_events = ["FINDING", "VULNERABILITY"]
8
- flags = ["active", "aggressive", "slow", "web-thorough", "deadly"]
9
- meta = {
10
- "description": "Lightweight web application security scanner",
11
- "created_date": "2023-12-11",
12
- "author": "@domwhewell-sage",
13
- }
14
-
15
- deps_pip = ["lxml~=5.3.0"]
16
- deps_common = ["docker"]
17
- per_hostport_only = True
18
-
19
- default_discovery_context = "{module} performed a light web scan against {event.parent.data['url']} and discovered {event.data['description']} at {event.data['url']}"
20
-
21
- async def setup(self):
22
- await self.run_process("systemctl", "start", "docker", sudo=True)
23
- await self.run_process("docker", "pull", "public.ecr.aws/portswigger/dastardly:latest", sudo=True)
24
- self.output_dir = self.scan.home / "dastardly"
25
- self.helpers.mkdir(self.output_dir)
26
- return True
27
-
28
- async def filter_event(self, event):
29
- # Reject redirects. This helps to avoid scanning the same site twice.
30
- is_redirect = str(event.data["status_code"]).startswith("30")
31
- if is_redirect:
32
- return False, "URL is a redirect"
33
- return True
34
-
35
- async def handle_event(self, event):
36
- host = event.parsed_url._replace(path="/").geturl()
37
- self.verbose(f"Running Dastardly scan against {host}")
38
- command, output_file = self.construct_command(host)
39
- finished_proc = await self.run_process(command, sudo=True)
40
- self.debug(f"dastardly stdout: {getattr(finished_proc, 'stdout', '')}")
41
- self.debug(f"dastardly stderr: {getattr(finished_proc, 'stderr', '')}")
42
- for testsuite in self.parse_dastardly_xml(output_file):
43
- url = testsuite.endpoint
44
- for testcase in testsuite.testcases:
45
- for failure in testcase.failures:
46
- if failure.severity == "Info":
47
- await self.emit_event(
48
- {
49
- "host": str(event.host),
50
- "url": url,
51
- "description": failure.instance,
52
- },
53
- "FINDING",
54
- event,
55
- context=f"{{module}} executed web scan against {host} and identified {{event.type}}: {failure.instance}",
56
- )
57
- else:
58
- await self.emit_event(
59
- {
60
- "severity": failure.severity,
61
- "host": str(event.host),
62
- "url": url,
63
- "description": failure.instance,
64
- },
65
- "VULNERABILITY",
66
- event,
67
- context=f"{{module}} executed web scan against {host} and identified {failure.severity.lower()} {{event.type}}: {failure.instance}",
68
- )
69
-
70
- def construct_command(self, target):
71
- date_time = self.helpers.make_date()
72
- file_name = self.helpers.tagify(target)
73
- temp_path = self.output_dir / f"{date_time}_{file_name}.xml"
74
- command = [
75
- "docker",
76
- "run",
77
- "--user",
78
- "0",
79
- "--rm",
80
- "-v",
81
- f"{self.output_dir}:/dastardly",
82
- "-e",
83
- f"BURP_START_URL={target}",
84
- "-e",
85
- f"BURP_REPORT_FILE_PATH=/dastardly/{temp_path.name}",
86
- "public.ecr.aws/portswigger/dastardly:latest",
87
- ]
88
- return command, temp_path
89
-
90
- def parse_dastardly_xml(self, xml_file):
91
- try:
92
- with open(xml_file, "rb") as f:
93
- et = etree.parse(f, parser=etree.XMLParser(recover=True, resolve_entities=False))
94
- for testsuite in et.iter("testsuite"):
95
- yield TestSuite(testsuite)
96
- except FileNotFoundError:
97
- self.debug(f"Could not find Dastardly XML file at {xml_file}")
98
- except OSError as e:
99
- self.verbose(f"Error opening Dastardly XML file at {xml_file}: {e}")
100
- except etree.ParseError as e:
101
- self.warning(f"Error parsing Dastardly XML at {xml_file}: {e}")
102
-
103
-
104
- class Failure:
105
- def __init__(self, xml):
106
- self.etree = xml
107
-
108
- # instance information
109
- self.instance = self.etree.attrib.get("message", "")
110
- self.severity = self.etree.attrib.get("type", "")
111
- self.text = self.etree.text
112
-
113
-
114
- class TestCase:
115
- def __init__(self, xml):
116
- self.etree = xml
117
-
118
- # title information
119
- self.title = self.etree.attrib.get("name", "")
120
-
121
- # findings / failures(as dastardly names them)
122
- self.failures = []
123
- for failure in self.etree.findall("failure"):
124
- self.failures.append(Failure(failure))
125
-
126
-
127
- class TestSuite:
128
- def __init__(self, xml):
129
- self.etree = xml
130
-
131
- # endpoint information
132
- self.endpoint = self.etree.attrib.get("name", "")
133
-
134
- # test cases
135
- self.testcases = []
136
- for testcase in self.etree.findall("testcase"):
137
- self.testcases.append(TestCase(testcase))
@@ -1,70 +0,0 @@
1
- import json
2
- from werkzeug import Response
3
-
4
- from .base import ModuleTestBase
5
-
6
-
7
- class TestDastardly(ModuleTestBase):
8
- targets = ["http://127.0.0.1:5556/"]
9
- modules_overrides = ["httpx", "dastardly"]
10
- skip_distro_tests = True
11
-
12
- web_response = """<!DOCTYPE html>
13
- <html>
14
- <body>
15
- <a href="/test?test=yes">visit this<a/>
16
- </body>
17
- </html>"""
18
-
19
- def xss_handler(self, request):
20
- response = f"""<!DOCTYPE html>
21
- <html>
22
- <head>
23
- <title>Email Form</title>
24
- </head>
25
- <body>
26
- {request.args.get("test", "")}
27
- </body>
28
- </html>"""
29
- return Response(response, content_type="text/html")
30
-
31
- async def get_docker_ip(self, module_test):
32
- docker_ip = "172.17.0.1"
33
- try:
34
- ip_output = await module_test.scan.helpers.run(["ip", "-j", "-4", "a", "show", "dev", "docker0"])
35
- interface_json = json.loads(ip_output.stdout)
36
- docker_ip = interface_json[0]["addr_info"][0]["local"]
37
- except Exception:
38
- pass
39
- return docker_ip
40
-
41
- async def setup_after_prep(self, module_test):
42
- httpserver = module_test.request_fixture.getfixturevalue("bbot_httpserver_allinterfaces")
43
- httpserver.expect_request("/").respond_with_data(self.web_response)
44
- httpserver.expect_request("/test").respond_with_handler(self.xss_handler)
45
-
46
- # get docker IP
47
- docker_ip = await self.get_docker_ip(module_test)
48
- module_test.scan.target.seeds.add(docker_ip)
49
-
50
- # replace 127.0.0.1 with docker host IP to allow dastardly access to local http server
51
- old_filter_event = module_test.module.filter_event
52
-
53
- def new_filter_event(event):
54
- self.new_url = f"http://{docker_ip}:5556/"
55
- event.data["url"] = self.new_url
56
- event.parsed_url = module_test.scan.helpers.urlparse(self.new_url)
57
- return old_filter_event(event)
58
-
59
- module_test.monkeypatch.setattr(module_test.module, "filter_event", new_filter_event)
60
-
61
- def check(self, module_test, events):
62
- assert 1 == len(
63
- [
64
- e
65
- for e in events
66
- if e.type == "VULNERABILITY"
67
- and f"{self.new_url}test" in e.data["description"]
68
- and "Cross-site scripting".lower() in e.data["description"].lower()
69
- ]
70
- )