bbot 2.3.0.5546rc0__py3-none-any.whl → 2.3.1.5815rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (116) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +1 -1
  3. bbot/core/engine.py +1 -1
  4. bbot/core/event/base.py +7 -5
  5. bbot/core/helpers/async_helpers.py +7 -1
  6. bbot/core/helpers/depsinstaller/installer.py +7 -2
  7. bbot/core/helpers/diff.py +13 -4
  8. bbot/core/helpers/dns/brute.py +8 -2
  9. bbot/core/helpers/dns/engine.py +3 -2
  10. bbot/core/helpers/ratelimiter.py +8 -2
  11. bbot/core/helpers/regexes.py +5 -2
  12. bbot/core/helpers/web/engine.py +1 -1
  13. bbot/core/helpers/web/web.py +1 -1
  14. bbot/core/shared_deps.py +14 -0
  15. bbot/defaults.yml +44 -0
  16. bbot/modules/ajaxpro.py +64 -37
  17. bbot/modules/baddns.py +23 -15
  18. bbot/modules/baddns_direct.py +2 -2
  19. bbot/modules/badsecrets.py +2 -2
  20. bbot/modules/base.py +49 -15
  21. bbot/modules/censys.py +1 -1
  22. bbot/modules/deadly/dastardly.py +3 -3
  23. bbot/modules/deadly/nuclei.py +1 -1
  24. bbot/modules/dehashed.py +2 -2
  25. bbot/modules/dnsbrute_mutations.py +3 -1
  26. bbot/modules/docker_pull.py +1 -1
  27. bbot/modules/dockerhub.py +2 -2
  28. bbot/modules/dotnetnuke.py +12 -12
  29. bbot/modules/extractous.py +1 -1
  30. bbot/modules/ffuf_shortnames.py +107 -48
  31. bbot/modules/filedownload.py +6 -0
  32. bbot/modules/generic_ssrf.py +54 -40
  33. bbot/modules/github_codesearch.py +2 -2
  34. bbot/modules/github_org.py +16 -20
  35. bbot/modules/github_workflows.py +6 -2
  36. bbot/modules/gowitness.py +6 -0
  37. bbot/modules/hunt.py +1 -1
  38. bbot/modules/hunterio.py +1 -1
  39. bbot/modules/iis_shortnames.py +23 -7
  40. bbot/modules/internal/excavate.py +5 -3
  41. bbot/modules/internal/unarchive.py +82 -0
  42. bbot/modules/jadx.py +2 -2
  43. bbot/modules/output/asset_inventory.py +1 -1
  44. bbot/modules/output/base.py +1 -1
  45. bbot/modules/output/discord.py +2 -1
  46. bbot/modules/output/slack.py +2 -1
  47. bbot/modules/output/teams.py +10 -25
  48. bbot/modules/output/web_parameters.py +55 -0
  49. bbot/modules/paramminer_headers.py +15 -10
  50. bbot/modules/portfilter.py +41 -0
  51. bbot/modules/portscan.py +1 -22
  52. bbot/modules/postman.py +61 -43
  53. bbot/modules/postman_download.py +10 -147
  54. bbot/modules/sitedossier.py +1 -1
  55. bbot/modules/skymem.py +1 -1
  56. bbot/modules/templates/postman.py +163 -1
  57. bbot/modules/templates/subdomain_enum.py +1 -1
  58. bbot/modules/templates/webhook.py +17 -26
  59. bbot/modules/trufflehog.py +3 -3
  60. bbot/modules/wappalyzer.py +1 -1
  61. bbot/modules/zoomeye.py +1 -1
  62. bbot/presets/kitchen-sink.yml +1 -1
  63. bbot/presets/nuclei/nuclei-budget.yml +19 -0
  64. bbot/presets/nuclei/nuclei-intense.yml +28 -0
  65. bbot/presets/nuclei/nuclei-technology.yml +23 -0
  66. bbot/presets/nuclei/nuclei.yml +34 -0
  67. bbot/presets/spider-intense.yml +13 -0
  68. bbot/scanner/preset/args.py +29 -3
  69. bbot/scanner/preset/preset.py +43 -24
  70. bbot/scanner/scanner.py +17 -7
  71. bbot/test/bbot_fixtures.py +7 -7
  72. bbot/test/test_step_1/test_bloom_filter.py +2 -2
  73. bbot/test/test_step_1/test_cli.py +5 -5
  74. bbot/test/test_step_1/test_dns.py +33 -0
  75. bbot/test/test_step_1/test_events.py +15 -5
  76. bbot/test/test_step_1/test_modules_basic.py +21 -21
  77. bbot/test/test_step_1/test_presets.py +94 -4
  78. bbot/test/test_step_1/test_regexes.py +13 -13
  79. bbot/test/test_step_1/test_scan.py +78 -0
  80. bbot/test/test_step_1/test_web.py +4 -4
  81. bbot/test/test_step_2/module_tests/test_module_ajaxpro.py +43 -23
  82. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +3 -3
  83. bbot/test/test_step_2/module_tests/test_module_baddns.py +3 -3
  84. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +6 -6
  85. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +3 -3
  86. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +3 -3
  87. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +3 -3
  88. bbot/test/test_step_2/module_tests/test_module_dnscaa.py +6 -6
  89. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +9 -9
  90. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +12 -12
  91. bbot/test/test_step_2/module_tests/test_module_excavate.py +15 -15
  92. bbot/test/test_step_2/module_tests/test_module_extractous.py +3 -3
  93. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +8 -8
  94. bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py +3 -1
  95. bbot/test/test_step_2/module_tests/test_module_github_codesearch.py +3 -3
  96. bbot/test/test_step_2/module_tests/test_module_gowitness.py +9 -9
  97. bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py +1 -1
  98. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +35 -1
  99. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +3 -3
  100. bbot/test/test_step_2/module_tests/test_module_portfilter.py +48 -0
  101. bbot/test/test_step_2/module_tests/test_module_postman.py +338 -3
  102. bbot/test/test_step_2/module_tests/test_module_postman_download.py +4 -161
  103. bbot/test/test_step_2/module_tests/test_module_securitytxt.py +12 -12
  104. bbot/test/test_step_2/module_tests/test_module_teams.py +10 -1
  105. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +1 -1
  106. bbot/test/test_step_2/module_tests/test_module_unarchive.py +229 -0
  107. bbot/test/test_step_2/module_tests/test_module_viewdns.py +3 -3
  108. bbot/test/test_step_2/module_tests/test_module_web_parameters.py +59 -0
  109. bbot/test/test_step_2/module_tests/test_module_websocket.py +5 -4
  110. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/METADATA +7 -7
  111. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/RECORD +115 -105
  112. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/WHEEL +1 -1
  113. bbot/wordlists/ffuf_shortname_candidates.txt +0 -107982
  114. /bbot/presets/{baddns-thorough.yml → baddns-intense.yml} +0 -0
  115. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/LICENSE +0 -0
  116. {bbot-2.3.0.5546rc0.dist-info → bbot-2.3.1.5815rc0.dist-info}/entry_points.txt +0 -0
bbot/modules/base.py CHANGED
@@ -160,8 +160,7 @@ class BaseModule:
160
160
  self._api_request_failures = 0
161
161
 
162
162
  self._tasks = []
163
- self._event_received = asyncio.Condition()
164
- self._event_queued = asyncio.Condition()
163
+ self._event_received = None
165
164
 
166
165
  # used for optional "per host" tracking
167
166
  self._per_host_tracker = set()
@@ -409,6 +408,12 @@ class BaseModule:
409
408
  """
410
409
  return getattr(self, "api_key", "")
411
410
 
411
+ @property
412
+ def event_received(self):
413
+ if self._event_received is None:
414
+ self._event_received = asyncio.Condition()
415
+ return self._event_received
416
+
412
417
  def get_watched_events(self):
413
418
  """Retrieve the set of events that the module is interested in observing.
414
419
 
@@ -658,11 +663,12 @@ class BaseModule:
658
663
  await asyncio.sleep(0.1)
659
664
  continue
660
665
 
666
+ # if batch wasn't big enough, we wait for the next event before continuing
661
667
  if self.batch_size > 1:
662
668
  submitted = await self._handle_batch()
663
669
  if not submitted:
664
- async with self._event_received:
665
- await self._event_received.wait()
670
+ async with self.event_received:
671
+ await self.event_received.wait()
666
672
 
667
673
  else:
668
674
  try:
@@ -874,8 +880,8 @@ class BaseModule:
874
880
  self.debug(f"Queueing {event} because {reason}")
875
881
  try:
876
882
  self.incoming_event_queue.put_nowait(event)
877
- async with self._event_received:
878
- self._event_received.notify()
883
+ async with self.event_received:
884
+ self.event_received.notify()
879
885
  if event.type != "FINISHED":
880
886
  self.scan._new_activity = True
881
887
  except AttributeError:
@@ -1148,7 +1154,7 @@ class BaseModule:
1148
1154
  kwargs["url"] = new_url
1149
1155
 
1150
1156
  r = await self.helpers.request(**kwargs)
1151
- success = False if r is None else r.is_success
1157
+ success = r is not None and self._api_response_is_success(r)
1152
1158
 
1153
1159
  if success:
1154
1160
  self._api_request_failures = 0
@@ -1163,11 +1169,13 @@ class BaseModule:
1163
1169
  )
1164
1170
  else:
1165
1171
  # sleep for a bit if we're being rate limited
1166
- if status_code == 429:
1172
+ retry_after = self._get_retry_after(r)
1173
+ if retry_after or status_code == 429:
1174
+ sleep_interval = int(retry_after) if retry_after is not None else self._429_sleep_interval
1167
1175
  self.verbose(
1168
- f"Sleeping for {self._429_sleep_interval:,} seconds due to rate limit (HTTP status: 429)"
1176
+ f"Sleeping for {sleep_interval:,} seconds due to rate limit (HTTP status: {status_code})"
1169
1177
  )
1170
- await asyncio.sleep(self._429_sleep_interval)
1178
+ await asyncio.sleep(sleep_interval)
1171
1179
  elif self._api_keys:
1172
1180
  # if request failed, cycle API keys and try again
1173
1181
  self.cycle_api_key()
@@ -1176,7 +1184,30 @@ class BaseModule:
1176
1184
 
1177
1185
  return r
1178
1186
 
1179
- async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **requests_kwargs):
1187
+ def _get_retry_after(self, r):
1188
+ # try to get retry_after from headers first
1189
+ headers = getattr(r, "headers", {})
1190
+ retry_after = headers.get("Retry-After", None)
1191
+ if retry_after is None:
1192
+ # then look in body json
1193
+ with suppress(Exception):
1194
+ body_json = r.json()
1195
+ if isinstance(body_json, dict):
1196
+ retry_after = body_json.get("retry_after", None)
1197
+ if retry_after is not None:
1198
+ return float(retry_after)
1199
+
1200
+ def _prepare_api_iter_req(self, url, page, page_size, offset, **requests_kwargs):
1201
+ """
1202
+ Default function for preparing an API request for iterating through paginated data.
1203
+ """
1204
+ url = self.helpers.safe_format(url, page=page, page_size=page_size, offset=offset)
1205
+ return url, requests_kwargs
1206
+
1207
+ def _api_response_is_success(self, r):
1208
+ return r.is_success
1209
+
1210
+ async def api_page_iter(self, url, page_size=100, _json=True, next_key=None, iter_key=None, **requests_kwargs):
1180
1211
  """
1181
1212
  An asynchronous generator function for iterating through paginated API data.
1182
1213
 
@@ -1189,6 +1220,7 @@ class BaseModule:
1189
1220
  page_size (int, optional): The number of items per page. Defaults to 100.
1190
1221
  json (bool, optional): If True, attempts to deserialize the response content to a JSON object. Defaults to True.
1191
1222
  next_key (callable, optional): A function that takes the last page's data and returns the URL for the next page. Defaults to None.
1223
+ iter_key (callable, optional): A function that builds each new request based on the page number, page size, and offset. Defaults to a simple implementation that autoreplaces {page} and {page_size} in the url.
1192
1224
  **requests_kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function.
1193
1225
 
1194
1226
  Yields:
@@ -1206,11 +1238,13 @@ class BaseModule:
1206
1238
  >>> if not subdomains:
1207
1239
  >>> break
1208
1240
  >>> finally:
1209
- >>> agen.aclose()
1241
+ >>> await agen.aclose()
1210
1242
  """
1211
1243
  page = 1
1212
1244
  offset = 0
1213
1245
  result = None
1246
+ if iter_key is None:
1247
+ iter_key = self._prepare_api_iter_req
1214
1248
  while 1:
1215
1249
  if result and callable(next_key):
1216
1250
  try:
@@ -1219,13 +1253,13 @@ class BaseModule:
1219
1253
  self.debug(f"Failed to extract next page of results from {url}: {e}")
1220
1254
  self.debug(traceback.format_exc())
1221
1255
  else:
1222
- new_url = self.helpers.safe_format(url, page=page, page_size=page_size, offset=offset)
1223
- result = await self.api_request(new_url, **requests_kwargs)
1256
+ new_url, new_kwargs = iter_key(url, page, page_size, offset, **requests_kwargs)
1257
+ result = await self.api_request(new_url, **new_kwargs)
1224
1258
  if result is None:
1225
1259
  self.verbose(f"api_page_iter() got no response for {url}")
1226
1260
  break
1227
1261
  try:
1228
- if json:
1262
+ if _json:
1229
1263
  result = result.json()
1230
1264
  yield result
1231
1265
  except Exception:
bbot/modules/censys.py CHANGED
@@ -72,7 +72,7 @@ class censys(subdomain_enum_apikey):
72
72
  error = d.get("error", "")
73
73
  if error:
74
74
  self.warning(error)
75
- self.verbose(f'Non-200 Status code: {resp.status_code} for query "{query}", page #{i+1}')
75
+ self.verbose(f'Non-200 Status code: {resp.status_code} for query "{query}", page #{i + 1}')
76
76
  self.debug(f"Response: {resp.text}")
77
77
  break
78
78
  else:
@@ -12,7 +12,7 @@ class dastardly(BaseModule):
12
12
  "author": "@domwhewell-sage",
13
13
  }
14
14
 
15
- deps_pip = ["lxml~=4.9.2"]
15
+ deps_pip = ["lxml~=5.3.0"]
16
16
  deps_common = ["docker"]
17
17
  per_hostport_only = True
18
18
 
@@ -37,8 +37,8 @@ class dastardly(BaseModule):
37
37
  self.verbose(f"Running Dastardly scan against {host}")
38
38
  command, output_file = self.construct_command(host)
39
39
  finished_proc = await self.run_process(command, sudo=True)
40
- self.debug(f'dastardly stdout: {getattr(finished_proc, "stdout", "")}')
41
- self.debug(f'dastardly stderr: {getattr(finished_proc, "stderr", "")}')
40
+ self.debug(f"dastardly stdout: {getattr(finished_proc, 'stdout', '')}")
41
+ self.debug(f"dastardly stderr: {getattr(finished_proc, 'stderr', '')}")
42
42
  for testsuite in self.parse_dastardly_xml(output_file):
43
43
  url = testsuite.endpoint
44
44
  for testcase in testsuite.testcases:
@@ -15,7 +15,7 @@ class nuclei(BaseModule):
15
15
  }
16
16
 
17
17
  options = {
18
- "version": "3.3.7",
18
+ "version": "3.3.8",
19
19
  "tags": "",
20
20
  "templates": "",
21
21
  "severity": "",
bbot/modules/dehashed.py CHANGED
@@ -90,7 +90,7 @@ class dehashed(subdomain_enum):
90
90
  url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}"
91
91
  page = 0
92
92
  num_entries = 0
93
- agen = self.api_page_iter(url=url, auth=self.auth, headers=self.headers, json=False)
93
+ agen = self.api_page_iter(url=url, auth=self.auth, headers=self.headers, _json=False)
94
94
  async for result in agen:
95
95
  result_json = {}
96
96
  with suppress(Exception):
@@ -110,6 +110,6 @@ class dehashed(subdomain_enum):
110
110
  self.info(
111
111
  f"{domain} has {total:,} results in Dehashed. The API can only process the first 30,000 results. Please check dehashed.com to get the remaining results."
112
112
  )
113
- agen.aclose()
113
+ await agen.aclose()
114
114
  break
115
115
  yield entries
@@ -121,7 +121,9 @@ class dnsbrute_mutations(BaseModule):
121
121
  break
122
122
 
123
123
  if mutations:
124
- self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(trimmed_found)})")
124
+ self.info(
125
+ f"Trying {len(mutations):,} mutations against {domain} ({i + 1}/{len(trimmed_found)})"
126
+ )
125
127
  results = await self.helpers.dns.brute(self, query, mutations)
126
128
  try:
127
129
  mutation_run = self._mutation_run_counter[domain]
@@ -191,7 +191,7 @@ class docker_pull(BaseModule):
191
191
  layer_filenames = []
192
192
  layer_digests = await self.get_layers(manifest)
193
193
  for i, layer_digest in enumerate(layer_digests):
194
- self.verbose(f"Downloading layer {i+1}/{len(layer_digests)} from {repository}:{tag}")
194
+ self.verbose(f"Downloading layer {i + 1}/{len(layer_digests)} from {repository}:{tag}")
195
195
  blob, layer_filename = await self.download_and_get_filename(registry, repository, layer_digest)
196
196
  layer_filenames.append(layer_filename)
197
197
  await self.write_file_to_tar(tar, layer_filename, blob)
bbot/modules/dockerhub.py CHANGED
@@ -64,7 +64,7 @@ class dockerhub(BaseModule):
64
64
  async def get_repos(self, username):
65
65
  repos = []
66
66
  url = f"{self.api_url}/repositories/{username}?page_size=25&page=" + "{page}"
67
- agen = self.api_page_iter(url, json=False)
67
+ agen = self.api_page_iter(url, _json=False)
68
68
  try:
69
69
  async for r in agen:
70
70
  if r is None:
@@ -85,5 +85,5 @@ class dockerhub(BaseModule):
85
85
  if image_name and namespace:
86
86
  repos.append("https://hub.docker.com/r/" + namespace + "/" + image_name)
87
87
  finally:
88
- agen.aclose()
88
+ await agen.aclose()
89
89
  return repos
@@ -95,7 +95,7 @@ class dotnetnuke(BaseModule):
95
95
 
96
96
  if detected is True:
97
97
  # DNNPersonalization Deserialization Detection
98
- for probe_url in [f'{event.data["url"]}/__', f'{event.data["url"]}/', f'{event.data["url"]}']:
98
+ for probe_url in [f"{event.data['url']}/__", f"{event.data['url']}/", f"{event.data['url']}"]:
99
99
  result = await self.helpers.request(probe_url, cookies=self.exploit_probe)
100
100
  if result:
101
101
  if "for 16-bit app support" in result.text and "[extensions]" in result.text:
@@ -115,7 +115,7 @@ class dotnetnuke(BaseModule):
115
115
  if "endpoint" not in event.tags:
116
116
  # NewsArticlesSlider ImageHandler.ashx File Read
117
117
  result = await self.helpers.request(
118
- f'{event.data["url"]}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx?img=~/web.config'
118
+ f"{event.data['url']}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx?img=~/web.config"
119
119
  )
120
120
  if result:
121
121
  if "<configuration>" in result.text:
@@ -125,16 +125,16 @@ class dotnetnuke(BaseModule):
125
125
  "severity": "CRITICAL",
126
126
  "description": description,
127
127
  "host": str(event.host),
128
- "url": f'{event.data["url"]}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx',
128
+ "url": f"{event.data['url']}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx",
129
129
  },
130
130
  "VULNERABILITY",
131
131
  event,
132
- context=f'{{module}} scanned {event.data["url"]} and found critical {{event.type}}: {description}',
132
+ context=f"{{module}} scanned {event.data['url']} and found critical {{event.type}}: {description}",
133
133
  )
134
134
 
135
135
  # DNNArticle GetCSS.ashx File Read
136
136
  result = await self.helpers.request(
137
- f'{event.data["url"]}/DesktopModules/DNNArticle/getcss.ashx?CP=%2fweb.config&smid=512&portalid=3'
137
+ f"{event.data['url']}/DesktopModules/DNNArticle/getcss.ashx?CP=%2fweb.config&smid=512&portalid=3"
138
138
  )
139
139
  if result:
140
140
  if "<configuration>" in result.text:
@@ -144,19 +144,19 @@ class dotnetnuke(BaseModule):
144
144
  "severity": "CRITICAL",
145
145
  "description": description,
146
146
  "host": str(event.host),
147
- "url": f'{event.data["url"]}/Desktopmodules/DNNArticle/GetCSS.ashx/?CP=%2fweb.config',
147
+ "url": f"{event.data['url']}/Desktopmodules/DNNArticle/GetCSS.ashx/?CP=%2fweb.config",
148
148
  },
149
149
  "VULNERABILITY",
150
150
  event,
151
- context=f'{{module}} scanned {event.data["url"]} and found critical {{event.type}}: {description}',
151
+ context=f"{{module}} scanned {event.data['url']} and found critical {{event.type}}: {description}",
152
152
  )
153
153
 
154
154
  # InstallWizard SuperUser Privilege Escalation
155
- result = await self.helpers.request(f'{event.data["url"]}/Install/InstallWizard.aspx')
155
+ result = await self.helpers.request(f"{event.data['url']}/Install/InstallWizard.aspx")
156
156
  if result:
157
157
  if result.status_code == 200:
158
158
  result_confirm = await self.helpers.request(
159
- f'{event.data["url"]}/Install/InstallWizard.aspx?__viewstate=1'
159
+ f"{event.data['url']}/Install/InstallWizard.aspx?__viewstate=1"
160
160
  )
161
161
  if result_confirm.status_code == 500:
162
162
  description = "DotNetNuke InstallWizard SuperUser Privilege Escalation"
@@ -165,11 +165,11 @@ class dotnetnuke(BaseModule):
165
165
  "severity": "CRITICAL",
166
166
  "description": description,
167
167
  "host": str(event.host),
168
- "url": f'{event.data["url"]}/Install/InstallWizard.aspx',
168
+ "url": f"{event.data['url']}/Install/InstallWizard.aspx",
169
169
  },
170
170
  "VULNERABILITY",
171
171
  event,
172
- context=f'{{module}} scanned {event.data["url"]} and found critical {{event.type}}: {description}',
172
+ context=f"{{module}} scanned {event.data['url']} and found critical {{event.type}}: {description}",
173
173
  )
174
174
  return
175
175
 
@@ -180,7 +180,7 @@ class dotnetnuke(BaseModule):
180
180
  self.interactsh_subdomain_tags[subdomain_tag] = event
181
181
 
182
182
  await self.helpers.request(
183
- f'{event.data["url"]}/DnnImageHandler.ashx?mode=file&url=http://{subdomain_tag}.{self.interactsh_domain}'
183
+ f"{event.data['url']}/DnnImageHandler.ashx?mode=file&url=http://{subdomain_tag}.{self.interactsh_domain}"
184
184
  )
185
185
  else:
186
186
  self.debug(
@@ -65,7 +65,7 @@ class extractous(BaseModule):
65
65
  "extensions": "File extensions to parse",
66
66
  }
67
67
 
68
- deps_pip = ["extractous"]
68
+ deps_pip = ["extractous~=0.3.0"]
69
69
  scope_distance_modifier = 1
70
70
 
71
71
  async def setup(self):
@@ -1,3 +1,4 @@
1
+ import pickle
1
2
  import re
2
3
  import random
3
4
  import string
@@ -5,32 +6,10 @@ import string
5
6
  from bbot.modules.deadly.ffuf import ffuf
6
7
 
7
8
 
8
- def find_common_prefixes(strings, minimum_set_length=4):
9
- prefix_candidates = [s[:i] for s in strings if len(s) == 6 for i in range(3, 6)]
10
- frequency_dict = {item: prefix_candidates.count(item) for item in prefix_candidates}
11
- frequency_dict = {k: v for k, v in frequency_dict.items() if v >= minimum_set_length}
12
- prefix_list = list(set(frequency_dict.keys()))
13
-
14
- found_prefixes = set()
15
- for prefix in prefix_list:
16
- prefix_frequency = frequency_dict[prefix]
17
- is_substring = False
18
-
19
- for k, v in frequency_dict.items():
20
- if prefix != k:
21
- if prefix in k:
22
- is_substring = True
23
- if not is_substring:
24
- found_prefixes.add(prefix)
25
- else:
26
- if prefix_frequency > v and (len(k) - len(prefix) == 1):
27
- found_prefixes.add(prefix)
28
- return list(found_prefixes)
29
-
30
-
31
9
  class ffuf_shortnames(ffuf):
32
10
  watched_events = ["URL_HINT"]
33
11
  produced_events = ["URL_UNVERIFIED"]
12
+ deps_pip = ["numpy"]
34
13
  flags = ["aggressive", "active", "iis-shortnames", "web-thorough"]
35
14
  meta = {
36
15
  "description": "Use ffuf in combination IIS shortnames",
@@ -41,54 +20,118 @@ class ffuf_shortnames(ffuf):
41
20
  options = {
42
21
  "wordlist": "", # default is defined within setup function
43
22
  "wordlist_extensions": "", # default is defined within setup function
44
- "lines": 1000000,
45
23
  "max_depth": 1,
46
24
  "version": "2.0.0",
47
25
  "extensions": "",
48
26
  "ignore_redirects": True,
49
27
  "find_common_prefixes": False,
50
28
  "find_delimiters": True,
29
+ "max_predictions": 250,
51
30
  }
52
31
 
53
32
  options_desc = {
54
33
  "wordlist": "Specify wordlist to use when finding directories",
55
34
  "wordlist_extensions": "Specify wordlist to use when making extension lists",
56
- "lines": "take only the first N lines from the wordlist when finding directories",
57
35
  "max_depth": "the maximum directory depth to attempt to solve",
58
36
  "version": "ffuf version",
59
37
  "extensions": "Optionally include a list of extensions to extend the keyword with (comma separated)",
60
38
  "ignore_redirects": "Explicitly ignore redirects (301,302)",
61
39
  "find_common_prefixes": "Attempt to automatically detect common prefixes and make additional ffuf runs against them",
62
40
  "find_delimiters": "Attempt to detect common delimiters and make additional ffuf runs against them",
41
+ "max_predictions": "The maximum number of predictions to generate per shortname prefix",
63
42
  }
64
43
 
65
44
  deps_common = ["ffuf"]
66
45
 
67
46
  in_scope_only = True
68
47
 
48
+ def generate_templist(self, prefix, shortname_type):
49
+ virtual_file = []
50
+
51
+ for prediction, score in self.predict(prefix, self.max_predictions, model=shortname_type):
52
+ self.debug(f"Got prediction: [{prediction}] from prefix [{prefix}] with score [{score}]")
53
+ virtual_file.append(prediction)
54
+ virtual_file.append(self.canary)
55
+ return self.helpers.tempfile(virtual_file, pipe=False), len(virtual_file)
56
+
57
+ def predict(self, prefix, n=25, model="endpoint"):
58
+ predictor_name = f"{model}_predictor"
59
+ predictor = getattr(self, predictor_name)
60
+ return predictor.predict(prefix, n)
61
+
62
+ @staticmethod
63
+ def find_common_prefixes(strings, minimum_set_length=4):
64
+ prefix_candidates = [s[:i] for s in strings if len(s) == 6 for i in range(3, 6)]
65
+ frequency_dict = {item: prefix_candidates.count(item) for item in prefix_candidates}
66
+ frequency_dict = {k: v for k, v in frequency_dict.items() if v >= minimum_set_length}
67
+ prefix_list = list(set(frequency_dict.keys()))
68
+
69
+ found_prefixes = set()
70
+ for prefix in prefix_list:
71
+ prefix_frequency = frequency_dict[prefix]
72
+ is_substring = False
73
+
74
+ for k, v in frequency_dict.items():
75
+ if prefix != k:
76
+ if prefix in k:
77
+ is_substring = True
78
+ if not is_substring:
79
+ found_prefixes.add(prefix)
80
+ else:
81
+ if prefix_frequency > v and (len(k) - len(prefix) == 1):
82
+ found_prefixes.add(prefix)
83
+ return list(found_prefixes)
84
+
69
85
  async def setup(self):
70
86
  self.proxy = self.scan.web_config.get("http_proxy", "")
71
87
  self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10))
72
- wordlist = self.config.get("wordlist", "")
73
- if not wordlist:
74
- wordlist = f"{self.helpers.wordlist_dir}/ffuf_shortname_candidates.txt"
75
- self.debug(f"Using [{wordlist}] for shortname candidate list")
76
- self.wordlist = await self.helpers.wordlist(wordlist)
77
- self.wordlist_lines = self.generate_wordlist(self.wordlist)
78
-
79
88
  wordlist_extensions = self.config.get("wordlist_extensions", "")
80
89
  if not wordlist_extensions:
81
90
  wordlist_extensions = f"{self.helpers.wordlist_dir}/raft-small-extensions-lowercase_CLEANED.txt"
82
91
  self.debug(f"Using [{wordlist_extensions}] for shortname candidate extension list")
83
92
  self.wordlist_extensions = await self.helpers.wordlist(wordlist_extensions)
93
+ self.ignore_redirects = self.config.get("ignore_redirects")
94
+ self.max_predictions = self.config.get("max_predictions")
84
95
 
85
- try:
86
- self.extensions = self.helpers.chain_lists(self.config.get("extensions", ""), validate=True)
87
- self.debug(f"Using custom extensions: [{','.join(self.extensions)}]")
88
- except ValueError as e:
89
- return False, f"Error parsing extensions: {e}"
96
+ class MinimalWordPredictor:
97
+ def __init__(self):
98
+ self.word_frequencies = {}
90
99
 
91
- self.ignore_redirects = self.config.get("ignore_redirects")
100
+ def predict(self, prefix, top_n):
101
+ prefix = prefix.lower()
102
+ matches = [(word, freq) for word, freq in self.word_frequencies.items() if word.startswith(prefix)]
103
+
104
+ if not matches:
105
+ return []
106
+
107
+ matches.sort(key=lambda x: x[1], reverse=True)
108
+ matches = matches[:top_n]
109
+
110
+ max_freq = matches[0][1]
111
+ return [(word, freq / max_freq) for word, freq in matches]
112
+
113
+ class CustomUnpickler(pickle.Unpickler):
114
+ def find_class(self, module, name):
115
+ if name == "MinimalWordPredictor":
116
+ return MinimalWordPredictor
117
+ return super().find_class(module, name)
118
+
119
+ endpoint_model = await self.helpers.download(
120
+ "https://raw.githubusercontent.com/blacklanternsecurity/wordpredictor/refs/heads/main/trained_models/endpoints.bin"
121
+ )
122
+ directory_model = await self.helpers.download(
123
+ "https://raw.githubusercontent.com/blacklanternsecurity/wordpredictor/refs/heads/main/trained_models/directories.bin"
124
+ )
125
+
126
+ self.debug(f"Loading endpoint model from: {endpoint_model}")
127
+ with open(endpoint_model, "rb") as f:
128
+ unpickler = CustomUnpickler(f)
129
+ self.endpoint_predictor = unpickler.load()
130
+
131
+ self.debug(f"Loading directory model from: {directory_model}")
132
+ with open(directory_model, "rb") as f:
133
+ unpickler = CustomUnpickler(f)
134
+ self.directory_predictor = unpickler.load()
92
135
 
93
136
  self.per_host_collection = {}
94
137
  self.shortname_to_event = {}
@@ -123,6 +166,14 @@ class ffuf_shortnames(ffuf):
123
166
  async def handle_event(self, event):
124
167
  filename_hint = re.sub(r"~\d", "", event.parsed_url.path.rsplit(".", 1)[0].split("/")[-1]).lower()
125
168
 
169
+ if "shortname-endpoint" in event.tags:
170
+ shortname_type = "endpoint"
171
+ elif "shortname-directory" in event.tags:
172
+ shortname_type = "directory"
173
+ else:
174
+ self.error("ffuf_shortnames received URL_HINT without proper 'shortname-' tag")
175
+ return
176
+
126
177
  host = f"{event.parent.parsed_url.scheme}://{event.parent.parsed_url.netloc}/"
127
178
  if host not in self.per_host_collection.keys():
128
179
  self.per_host_collection[host] = [(filename_hint, event.parent.data)]
@@ -135,11 +186,11 @@ class ffuf_shortnames(ffuf):
135
186
  root_stub = "/".join(event.parsed_url.path.split("/")[:-1])
136
187
  root_url = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}{root_stub}/"
137
188
 
138
- if "shortname-file" in event.tags:
189
+ if shortname_type == "endpoint":
139
190
  used_extensions = self.build_extension_list(event)
140
191
 
141
192
  if len(filename_hint) == 6:
142
- tempfile, tempfile_len = self.generate_templist(prefix=filename_hint)
193
+ tempfile, tempfile_len = self.generate_templist(filename_hint, shortname_type)
143
194
  self.verbose(
144
195
  f"generated temp word list of size [{str(tempfile_len)}] for filename hint: [{filename_hint}]"
145
196
  )
@@ -149,7 +200,7 @@ class ffuf_shortnames(ffuf):
149
200
  tempfile_len = 1
150
201
 
151
202
  if tempfile_len > 0:
152
- if "shortname-file" in event.tags:
203
+ if shortname_type == "endpoint":
153
204
  for ext in used_extensions:
154
205
  async for r in self.execute_ffuf(tempfile, root_url, suffix=f".{ext}"):
155
206
  await self.emit_event(
@@ -160,7 +211,7 @@ class ffuf_shortnames(ffuf):
160
211
  context=f"{{module}} brute-forced {ext.upper()} files at {root_url} and found {{event.type}}: {{event.data}}",
161
212
  )
162
213
 
163
- elif "shortname-directory" in event.tags:
214
+ elif shortname_type == "directory":
164
215
  async for r in self.execute_ffuf(tempfile, root_url, exts=["/"]):
165
216
  r_url = f"{r['url'].rstrip('/')}/"
166
217
  await self.emit_event(
@@ -177,7 +228,7 @@ class ffuf_shortnames(ffuf):
177
228
  if delimiter_r:
178
229
  delimiter, prefix, partial_hint = delimiter_r
179
230
  self.verbose(f"Detected delimiter [{delimiter}] in hint [{filename_hint}]")
180
- tempfile, tempfile_len = self.generate_templist(prefix=partial_hint)
231
+ tempfile, tempfile_len = self.generate_templist(partial_hint, "directory")
181
232
  ffuf_prefix = f"{prefix}{delimiter}"
182
233
  async for r in self.execute_ffuf(tempfile, root_url, prefix=ffuf_prefix, exts=["/"]):
183
234
  await self.emit_event(
@@ -188,13 +239,13 @@ class ffuf_shortnames(ffuf):
188
239
  context=f'{{module}} brute-forced directories with detected prefix "{ffuf_prefix}" and found {{event.type}}: {{event.data}}',
189
240
  )
190
241
 
191
- elif "shortname-file" in event.tags:
242
+ elif "shortname-endpoint" in event.tags:
192
243
  for ext in used_extensions:
193
244
  delimiter_r = self.find_delimiter(filename_hint)
194
245
  if delimiter_r:
195
246
  delimiter, prefix, partial_hint = delimiter_r
196
247
  self.verbose(f"Detected delimiter [{delimiter}] in hint [{filename_hint}]")
197
- tempfile, tempfile_len = self.generate_templist(prefix=partial_hint)
248
+ tempfile, tempfile_len = self.generate_templist(partial_hint, "endpoint")
198
249
  ffuf_prefix = f"{prefix}{delimiter}"
199
250
  async for r in self.execute_ffuf(tempfile, root_url, prefix=ffuf_prefix, suffix=f".{ext}"):
200
251
  await self.emit_event(
@@ -213,17 +264,25 @@ class ffuf_shortnames(ffuf):
213
264
  for host, hint_tuple_list in per_host_collection.items():
214
265
  hint_list = [x[0] for x in hint_tuple_list]
215
266
 
216
- common_prefixes = find_common_prefixes(hint_list)
267
+ common_prefixes = self.find_common_prefixes(hint_list)
217
268
  for prefix in common_prefixes:
218
269
  self.verbose(f"Found common prefix: [{prefix}] for host [{host}]")
219
270
  for hint_tuple in hint_tuple_list:
220
271
  hint, url = hint_tuple
221
272
  if hint.startswith(prefix):
273
+ if "shortname-endpoint" in self.shortname_to_event[hint].tags:
274
+ shortname_type = "endpoint"
275
+ elif "shortname-directory" in self.shortname_to_event[hint].tags:
276
+ shortname_type = "directory"
277
+ else:
278
+ self.error("ffuf_shortnames received URL_HINT without proper 'shortname-' tag")
279
+ continue
280
+
222
281
  partial_hint = hint[len(prefix) :]
223
282
 
224
283
  # safeguard to prevent loading the entire wordlist
225
284
  if len(partial_hint) > 0:
226
- tempfile, tempfile_len = self.generate_templist(prefix=partial_hint)
285
+ tempfile, tempfile_len = self.generate_templist(partial_hint, shortname_type)
227
286
 
228
287
  if "shortname-directory" in self.shortname_to_event[hint].tags:
229
288
  self.verbose(
@@ -238,7 +297,7 @@ class ffuf_shortnames(ffuf):
238
297
  tags=[f"status-{r['status']}"],
239
298
  context=f'{{module}} brute-forced directories with common prefix "{prefix}" and found {{event.type}}: {{event.data}}',
240
299
  )
241
- elif "shortname-file" in self.shortname_to_event[hint].tags:
300
+ elif shortname_type == "endpoint":
242
301
  used_extensions = self.build_extension_list(self.shortname_to_event[hint])
243
302
 
244
303
  for ext in used_extensions:
@@ -65,6 +65,7 @@ class filedownload(BaseModule):
65
65
  "swp", # Swap File (temporary file, often Vim)
66
66
  "sxw", # OpenOffice.org Writer document
67
67
  "tar.gz", # Gzip-Compressed Tar Archive
68
+ "tgz", # Gzip-Compressed Tar Archive
68
69
  "tar", # Tar Archive
69
70
  "txt", # Plain Text Document
70
71
  "vbs", # Visual Basic Script
@@ -76,6 +77,11 @@ class filedownload(BaseModule):
76
77
  "yaml", # YAML Ain't Markup Language
77
78
  "yml", # YAML Ain't Markup Language
78
79
  "zip", # Zip Archive
80
+ "lzma", # LZMA Compressed File
81
+ "rar", # RAR Compressed File
82
+ "7z", # 7-Zip Compressed File
83
+ "xz", # XZ Compressed File
84
+ "bz2", # Bzip2 Compressed File
79
85
  ],
80
86
  "max_filesize": "10MB",
81
87
  "base_64_encoded_file": "false",