bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5397rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (267) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +3 -7
  3. bbot/core/config/files.py +0 -1
  4. bbot/core/config/logger.py +34 -4
  5. bbot/core/core.py +21 -4
  6. bbot/core/engine.py +9 -8
  7. bbot/core/event/base.py +131 -52
  8. bbot/core/helpers/bloom.py +10 -3
  9. bbot/core/helpers/command.py +8 -7
  10. bbot/core/helpers/depsinstaller/installer.py +31 -13
  11. bbot/core/helpers/diff.py +10 -10
  12. bbot/core/helpers/dns/brute.py +7 -4
  13. bbot/core/helpers/dns/dns.py +1 -2
  14. bbot/core/helpers/dns/engine.py +4 -6
  15. bbot/core/helpers/dns/helpers.py +2 -2
  16. bbot/core/helpers/dns/mock.py +0 -1
  17. bbot/core/helpers/files.py +1 -1
  18. bbot/core/helpers/helper.py +7 -4
  19. bbot/core/helpers/interactsh.py +3 -3
  20. bbot/core/helpers/libmagic.py +65 -0
  21. bbot/core/helpers/misc.py +65 -22
  22. bbot/core/helpers/names_generator.py +17 -3
  23. bbot/core/helpers/process.py +0 -20
  24. bbot/core/helpers/regex.py +1 -1
  25. bbot/core/helpers/regexes.py +12 -6
  26. bbot/core/helpers/validators.py +1 -2
  27. bbot/core/helpers/web/client.py +1 -1
  28. bbot/core/helpers/web/engine.py +1 -2
  29. bbot/core/helpers/web/web.py +4 -114
  30. bbot/core/helpers/wordcloud.py +5 -5
  31. bbot/core/modules.py +36 -27
  32. bbot/core/multiprocess.py +58 -0
  33. bbot/core/shared_deps.py +46 -3
  34. bbot/db/sql/models.py +147 -0
  35. bbot/defaults.yml +11 -9
  36. bbot/modules/anubisdb.py +2 -2
  37. bbot/modules/apkpure.py +63 -0
  38. bbot/modules/azure_tenant.py +2 -2
  39. bbot/modules/baddns.py +35 -19
  40. bbot/modules/baddns_direct.py +92 -0
  41. bbot/modules/baddns_zone.py +3 -8
  42. bbot/modules/badsecrets.py +4 -3
  43. bbot/modules/base.py +195 -51
  44. bbot/modules/bevigil.py +7 -7
  45. bbot/modules/binaryedge.py +7 -4
  46. bbot/modules/bufferoverrun.py +47 -0
  47. bbot/modules/builtwith.py +6 -10
  48. bbot/modules/bypass403.py +5 -5
  49. bbot/modules/c99.py +10 -7
  50. bbot/modules/censys.py +9 -13
  51. bbot/modules/certspotter.py +5 -3
  52. bbot/modules/chaos.py +9 -7
  53. bbot/modules/code_repository.py +1 -0
  54. bbot/modules/columbus.py +3 -3
  55. bbot/modules/crt.py +5 -3
  56. bbot/modules/deadly/dastardly.py +1 -1
  57. bbot/modules/deadly/ffuf.py +9 -9
  58. bbot/modules/deadly/nuclei.py +3 -3
  59. bbot/modules/deadly/vhost.py +4 -3
  60. bbot/modules/dehashed.py +1 -1
  61. bbot/modules/digitorus.py +1 -1
  62. bbot/modules/dnsbimi.py +145 -0
  63. bbot/modules/dnscaa.py +3 -3
  64. bbot/modules/dnsdumpster.py +4 -4
  65. bbot/modules/dnstlsrpt.py +144 -0
  66. bbot/modules/docker_pull.py +7 -5
  67. bbot/modules/dockerhub.py +2 -2
  68. bbot/modules/dotnetnuke.py +18 -19
  69. bbot/modules/emailformat.py +1 -1
  70. bbot/modules/extractous.py +122 -0
  71. bbot/modules/filedownload.py +9 -7
  72. bbot/modules/fullhunt.py +7 -4
  73. bbot/modules/generic_ssrf.py +5 -5
  74. bbot/modules/github_codesearch.py +3 -2
  75. bbot/modules/github_org.py +4 -4
  76. bbot/modules/github_workflows.py +4 -4
  77. bbot/modules/gitlab.py +2 -5
  78. bbot/modules/google_playstore.py +93 -0
  79. bbot/modules/gowitness.py +48 -50
  80. bbot/modules/hackertarget.py +5 -3
  81. bbot/modules/host_header.py +5 -5
  82. bbot/modules/httpx.py +1 -4
  83. bbot/modules/hunterio.py +3 -9
  84. bbot/modules/iis_shortnames.py +19 -30
  85. bbot/modules/internal/cloudcheck.py +27 -12
  86. bbot/modules/internal/dnsresolve.py +22 -20
  87. bbot/modules/internal/excavate.py +85 -48
  88. bbot/modules/internal/speculate.py +41 -32
  89. bbot/modules/internetdb.py +4 -2
  90. bbot/modules/ip2location.py +3 -5
  91. bbot/modules/ipneighbor.py +1 -1
  92. bbot/modules/ipstack.py +3 -8
  93. bbot/modules/jadx.py +87 -0
  94. bbot/modules/leakix.py +11 -10
  95. bbot/modules/myssl.py +2 -2
  96. bbot/modules/newsletters.py +2 -2
  97. bbot/modules/otx.py +5 -3
  98. bbot/modules/output/asset_inventory.py +7 -7
  99. bbot/modules/output/base.py +1 -1
  100. bbot/modules/output/csv.py +1 -1
  101. bbot/modules/output/http.py +20 -14
  102. bbot/modules/output/mysql.py +51 -0
  103. bbot/modules/output/neo4j.py +7 -2
  104. bbot/modules/output/postgres.py +49 -0
  105. bbot/modules/output/slack.py +0 -1
  106. bbot/modules/output/sqlite.py +29 -0
  107. bbot/modules/output/stdout.py +2 -2
  108. bbot/modules/output/teams.py +107 -6
  109. bbot/modules/paramminer_headers.py +5 -8
  110. bbot/modules/passivetotal.py +13 -13
  111. bbot/modules/portscan.py +32 -6
  112. bbot/modules/postman.py +50 -126
  113. bbot/modules/postman_download.py +220 -0
  114. bbot/modules/rapiddns.py +3 -8
  115. bbot/modules/report/asn.py +11 -11
  116. bbot/modules/robots.py +3 -3
  117. bbot/modules/securitytrails.py +7 -10
  118. bbot/modules/securitytxt.py +1 -1
  119. bbot/modules/shodan_dns.py +7 -9
  120. bbot/modules/sitedossier.py +1 -1
  121. bbot/modules/skymem.py +2 -2
  122. bbot/modules/social.py +2 -1
  123. bbot/modules/subdomaincenter.py +1 -1
  124. bbot/modules/subdomainradar.py +160 -0
  125. bbot/modules/telerik.py +8 -8
  126. bbot/modules/templates/bucket.py +1 -1
  127. bbot/modules/templates/github.py +22 -14
  128. bbot/modules/templates/postman.py +21 -0
  129. bbot/modules/templates/shodan.py +14 -13
  130. bbot/modules/templates/sql.py +95 -0
  131. bbot/modules/templates/subdomain_enum.py +51 -16
  132. bbot/modules/templates/webhook.py +2 -4
  133. bbot/modules/trickest.py +8 -37
  134. bbot/modules/trufflehog.py +10 -12
  135. bbot/modules/url_manipulation.py +3 -3
  136. bbot/modules/urlscan.py +1 -1
  137. bbot/modules/viewdns.py +1 -1
  138. bbot/modules/virustotal.py +8 -30
  139. bbot/modules/wafw00f.py +1 -1
  140. bbot/modules/wayback.py +1 -1
  141. bbot/modules/wpscan.py +17 -11
  142. bbot/modules/zoomeye.py +11 -6
  143. bbot/presets/baddns-thorough.yml +12 -0
  144. bbot/presets/fast.yml +16 -0
  145. bbot/presets/kitchen-sink.yml +1 -0
  146. bbot/presets/spider.yml +4 -0
  147. bbot/presets/subdomain-enum.yml +7 -7
  148. bbot/scanner/manager.py +5 -16
  149. bbot/scanner/preset/args.py +44 -26
  150. bbot/scanner/preset/environ.py +7 -2
  151. bbot/scanner/preset/path.py +7 -4
  152. bbot/scanner/preset/preset.py +36 -23
  153. bbot/scanner/scanner.py +172 -62
  154. bbot/scanner/target.py +236 -434
  155. bbot/scripts/docs.py +1 -1
  156. bbot/test/bbot_fixtures.py +13 -3
  157. bbot/test/conftest.py +132 -100
  158. bbot/test/fastapi_test.py +17 -0
  159. bbot/test/owasp_mastg.apk +0 -0
  160. bbot/test/run_tests.sh +4 -4
  161. bbot/test/test.conf +2 -0
  162. bbot/test/test_step_1/test_bbot_fastapi.py +82 -0
  163. bbot/test/test_step_1/test_bloom_filter.py +2 -0
  164. bbot/test/test_step_1/test_cli.py +138 -64
  165. bbot/test/test_step_1/test_dns.py +62 -25
  166. bbot/test/test_step_1/test_engine.py +17 -17
  167. bbot/test/test_step_1/test_events.py +183 -28
  168. bbot/test/test_step_1/test_helpers.py +64 -28
  169. bbot/test/test_step_1/test_manager_deduplication.py +1 -1
  170. bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
  171. bbot/test/test_step_1/test_modules_basic.py +68 -70
  172. bbot/test/test_step_1/test_presets.py +184 -96
  173. bbot/test/test_step_1/test_python_api.py +7 -2
  174. bbot/test/test_step_1/test_regexes.py +35 -5
  175. bbot/test/test_step_1/test_scan.py +39 -5
  176. bbot/test/test_step_1/test_scope.py +4 -3
  177. bbot/test/test_step_1/test_target.py +243 -145
  178. bbot/test/test_step_1/test_web.py +14 -8
  179. bbot/test/test_step_2/module_tests/base.py +15 -7
  180. bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
  181. bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
  182. bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
  183. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
  184. bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
  185. bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
  186. bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
  187. bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
  188. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
  189. bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
  190. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
  191. bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
  192. bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
  193. bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
  194. bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
  195. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
  196. bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
  197. bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
  198. bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
  199. bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
  200. bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
  201. bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
  202. bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
  203. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
  204. bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
  205. bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
  206. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
  207. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
  208. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
  209. bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
  210. bbot/test/test_step_2/module_tests/test_module_excavate.py +17 -37
  211. bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
  212. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
  213. bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
  214. bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
  215. bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
  216. bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
  217. bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
  218. bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
  219. bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
  220. bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
  221. bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
  222. bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
  223. bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
  224. bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
  225. bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
  226. bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
  227. bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
  228. bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
  229. bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
  230. bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
  231. bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
  232. bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
  233. bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
  234. bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
  235. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
  236. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
  237. bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
  238. bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
  239. bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
  240. bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
  241. bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
  242. bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
  243. bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
  244. bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
  245. bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
  246. bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
  247. bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
  248. bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
  249. bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
  250. bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
  251. bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
  252. bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
  253. bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
  254. bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
  255. bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
  256. bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
  257. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
  258. bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
  259. bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
  260. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/METADATA +48 -18
  261. bbot-2.3.0.5397rc0.dist-info/RECORD +421 -0
  262. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/WHEEL +1 -1
  263. bbot/modules/unstructured.py +0 -163
  264. bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
  265. bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
  266. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/LICENSE +0 -0
  267. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/entry_points.txt +0 -0
bbot/modules/censys.py CHANGED
@@ -15,25 +15,21 @@ class censys(subdomain_enum_apikey):
15
15
  "author": "@TheTechromancer",
16
16
  "auth_required": True,
17
17
  }
18
- options = {"api_id": "", "api_secret": "", "max_pages": 5}
18
+ options = {"api_key": "", "max_pages": 5}
19
19
  options_desc = {
20
- "api_id": "Censys.io API ID",
21
- "api_secret": "Censys.io API Secret",
20
+ "api_key": "Censys.io API Key in the format of 'key:secret'",
22
21
  "max_pages": "Maximum number of pages to fetch (100 results per page)",
23
22
  }
24
23
 
25
24
  base_url = "https://search.censys.io/api"
26
25
 
27
26
  async def setup(self):
28
- self.api_id = self.config.get("api_id", "")
29
- self.api_secret = self.config.get("api_secret", "")
30
- self.auth = (self.api_id, self.api_secret)
31
27
  self.max_pages = self.config.get("max_pages", 5)
32
28
  return await super().setup()
33
29
 
34
30
  async def ping(self):
35
31
  url = f"{self.base_url}/v1/account"
36
- resp = await self.helpers.request(url, auth=self.auth)
32
+ resp = await self.api_request(url)
37
33
  d = resp.json()
38
34
  assert isinstance(d, dict), f"Invalid response from {url}: {resp}"
39
35
  quota = d.get("quota", {})
@@ -41,6 +37,11 @@ class censys(subdomain_enum_apikey):
41
37
  allowance = int(quota.get("allowance", 0))
42
38
  assert used < allowance, "No quota remaining"
43
39
 
40
+ def prepare_api_request(self, url, kwargs):
41
+ api_id, api_secret = self.api_key.split(":", 1)
42
+ kwargs["auth"] = (api_id, api_secret)
43
+ return url, kwargs
44
+
44
45
  async def query(self, query):
45
46
  results = set()
46
47
  cursor = ""
@@ -52,11 +53,10 @@ class censys(subdomain_enum_apikey):
52
53
  }
53
54
  if cursor:
54
55
  json_data.update({"cursor": cursor})
55
- resp = await self.helpers.request(
56
+ resp = await self.api_request(
56
57
  url,
57
58
  method="POST",
58
59
  json=json_data,
59
- auth=self.auth,
60
60
  )
61
61
 
62
62
  if resp is None:
@@ -96,7 +96,3 @@ class censys(subdomain_enum_apikey):
96
96
  break
97
97
 
98
98
  return results
99
-
100
- @property
101
- def auth_secret(self):
102
- return self.api_id and self.api_secret
@@ -15,11 +15,13 @@ class certspotter(subdomain_enum):
15
15
 
16
16
  def request_url(self, query):
17
17
  url = f"{self.base_url}/issuances?domain={self.helpers.quote(query)}&include_subdomains=true&expand=dns_names"
18
- return self.request_with_fail_count(url, timeout=self.http_timeout + 30)
18
+ return self.api_request(url, timeout=self.http_timeout + 30)
19
19
 
20
- def parse_results(self, r, query):
20
+ async def parse_results(self, r, query):
21
+ results = set()
21
22
  json = r.json()
22
23
  if json:
23
24
  for r in json:
24
25
  for dns_name in r.get("dns_names", []):
25
- yield dns_name.lstrip(".*").rstrip(".")
26
+ results.add(dns_name.lstrip(".*").rstrip("."))
27
+ return results
bbot/modules/chaos.py CHANGED
@@ -15,18 +15,19 @@ class chaos(subdomain_enum_apikey):
15
15
  options_desc = {"api_key": "Chaos API key"}
16
16
 
17
17
  base_url = "https://dns.projectdiscovery.io/dns"
18
+ ping_url = f"{base_url}/example.com"
18
19
 
19
- async def ping(self):
20
- url = f"{self.base_url}/example.com"
21
- response = await self.request_with_fail_count(url, headers={"Authorization": self.api_key})
22
- assert response.json()["domain"] == "example.com"
20
+ def prepare_api_request(self, url, kwargs):
21
+ kwargs["headers"]["Authorization"] = self.api_key
22
+ return url, kwargs
23
23
 
24
24
  async def request_url(self, query):
25
25
  _, domain = self.helpers.split_domain(query)
26
26
  url = f"{self.base_url}/{domain}/subdomains"
27
- return await self.request_with_fail_count(url, headers={"Authorization": self.api_key})
27
+ return await self.api_request(url)
28
28
 
29
- def parse_results(self, r, query):
29
+ async def parse_results(self, r, query):
30
+ results = set()
30
31
  j = r.json()
31
32
  subdomains_set = set()
32
33
  if isinstance(j, dict):
@@ -39,4 +40,5 @@ class chaos(subdomain_enum_apikey):
39
40
  for s in subdomains_set:
40
41
  full_subdomain = f"{s}.{domain}"
41
42
  if full_subdomain and full_subdomain.endswith(f".{query}"):
42
- yield full_subdomain
43
+ results.add(full_subdomain)
44
+ return results
@@ -19,6 +19,7 @@ class code_repository(BaseModule):
19
19
  (r"gitlab.(?:com|org)/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False),
20
20
  ],
21
21
  "docker": (r"hub.docker.com/r/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False),
22
+ "postman": (r"www.postman.com/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False),
22
23
  }
23
24
 
24
25
  scope_distance_modifier = 1
bbot/modules/columbus.py CHANGED
@@ -15,11 +15,11 @@ class columbus(subdomain_enum):
15
15
 
16
16
  async def request_url(self, query):
17
17
  url = f"{self.base_url}/{self.helpers.quote(query)}?days=365"
18
- return await self.request_with_fail_count(url)
18
+ return await self.api_request(url)
19
19
 
20
- def parse_results(self, r, query):
20
+ async def parse_results(self, r, query):
21
21
  results = set()
22
22
  json = r.json()
23
23
  if json and isinstance(json, list):
24
- return set([f"{s.lower()}.{query}" for s in json])
24
+ return {f"{s.lower()}.{query}" for s in json}
25
25
  return results
bbot/modules/crt.py CHANGED
@@ -21,9 +21,10 @@ class crt(subdomain_enum):
21
21
  async def request_url(self, query):
22
22
  params = {"q": f"%.{query}", "output": "json"}
23
23
  url = self.helpers.add_get_params(self.base_url, params).geturl()
24
- return await self.request_with_fail_count(url, timeout=self.http_timeout + 30)
24
+ return await self.api_request(url, timeout=self.http_timeout + 30)
25
25
 
26
- def parse_results(self, r, query):
26
+ async def parse_results(self, r, query):
27
+ results = set()
27
28
  j = r.json()
28
29
  for cert_info in j:
29
30
  if not type(cert_info) == dict:
@@ -35,4 +36,5 @@ class crt(subdomain_enum):
35
36
  domain = cert_info.get("name_value")
36
37
  if domain:
37
38
  for d in domain.splitlines():
38
- yield d.lower()
39
+ results.add(d.lower())
40
+ return results
@@ -90,7 +90,7 @@ class dastardly(BaseModule):
90
90
  def parse_dastardly_xml(self, xml_file):
91
91
  try:
92
92
  with open(xml_file, "rb") as f:
93
- et = etree.parse(f, parser=etree.XMLParser(recover=True))
93
+ et = etree.parse(f, parser=etree.XMLParser(recover=True, resolve_entities=False))
94
94
  for testsuite in et.iter("testsuite"):
95
95
  yield TestSuite(testsuite)
96
96
  except FileNotFoundError:
@@ -10,7 +10,7 @@ class ffuf(BaseModule):
10
10
  watched_events = ["URL"]
11
11
  produced_events = ["URL_UNVERIFIED"]
12
12
  flags = ["aggressive", "active"]
13
- meta = {"description": "A fast web fuzzer written in Go", "created_date": "2022-04-10", "author": "@pmueller"}
13
+ meta = {"description": "A fast web fuzzer written in Go", "created_date": "2022-04-10", "author": "@liquidsec"}
14
14
 
15
15
  options = {
16
16
  "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/Web-Content/raft-small-directories.txt",
@@ -28,7 +28,7 @@ class ffuf(BaseModule):
28
28
 
29
29
  deps_common = ["ffuf"]
30
30
 
31
- banned_characters = set([" "])
31
+ banned_characters = {" "}
32
32
  blacklist = ["images", "css", "image"]
33
33
 
34
34
  in_scope_only = True
@@ -52,7 +52,7 @@ class ffuf(BaseModule):
52
52
 
53
53
  async def handle_event(self, event):
54
54
  if self.helpers.url_depth(event.data) > self.config.get("max_depth"):
55
- self.debug(f"Exceeded max depth, aborting event")
55
+ self.debug("Exceeded max depth, aborting event")
56
56
  return
57
57
 
58
58
  # only FFUF against a directory
@@ -122,7 +122,7 @@ class ffuf(BaseModule):
122
122
  continue
123
123
 
124
124
  # if the codes are different, we should abort, this should also be a warning, as it is highly unusual behavior
125
- if len(set(d["status"] for d in canary_results)) != 1:
125
+ if len({d["status"] for d in canary_results}) != 1:
126
126
  self.warning("Got different codes for each baseline. This could indicate load balancing")
127
127
  filters[ext] = ["ABORT", "BASELINE_CHANGED_CODES"]
128
128
  continue
@@ -148,7 +148,7 @@ class ffuf(BaseModule):
148
148
  continue
149
149
 
150
150
  # we start by seeing if all of the baselines have the same character count
151
- if len(set(d["length"] for d in canary_results)) == 1:
151
+ if len({d["length"] for d in canary_results}) == 1:
152
152
  self.debug("All baseline results had the same char count, we can make a filter on that")
153
153
  filters[ext] = [
154
154
  "-fc",
@@ -161,7 +161,7 @@ class ffuf(BaseModule):
161
161
  continue
162
162
 
163
163
  # if that doesn't work we can try words
164
- if len(set(d["words"] for d in canary_results)) == 1:
164
+ if len({d["words"] for d in canary_results}) == 1:
165
165
  self.debug("All baseline results had the same word count, we can make a filter on that")
166
166
  filters[ext] = [
167
167
  "-fc",
@@ -174,7 +174,7 @@ class ffuf(BaseModule):
174
174
  continue
175
175
 
176
176
  # as a last resort we will try lines
177
- if len(set(d["lines"] for d in canary_results)) == 1:
177
+ if len({d["lines"] for d in canary_results}) == 1:
178
178
  self.debug("All baseline results had the same word count, we can make a filter on that")
179
179
  filters[ext] = [
180
180
  "-fc",
@@ -252,7 +252,7 @@ class ffuf(BaseModule):
252
252
  self.warning(f"Exiting from FFUF run early, received an ABORT filter: [{filters[ext][1]}]")
253
253
  continue
254
254
 
255
- elif filters[ext] == None:
255
+ elif filters[ext] is None:
256
256
  pass
257
257
 
258
258
  else:
@@ -282,7 +282,7 @@ class ffuf(BaseModule):
282
282
  else:
283
283
  if mode == "normal":
284
284
  # before emitting, we are going to send another baseline. This will immediately catch things like a WAF flipping blocking on us mid-scan
285
- if baseline == False:
285
+ if baseline is False:
286
286
  pre_emit_temp_canary = [
287
287
  f
288
288
  async for f in self.execute_ffuf(
@@ -15,7 +15,7 @@ class nuclei(BaseModule):
15
15
  }
16
16
 
17
17
  options = {
18
- "version": "3.3.2",
18
+ "version": "3.3.6",
19
19
  "tags": "",
20
20
  "templates": "",
21
21
  "severity": "",
@@ -226,8 +226,8 @@ class nuclei(BaseModule):
226
226
  command.append(f"-{cli_option}")
227
227
  command.append(option)
228
228
 
229
- if self.scan.config.get("interactsh_disable") == True:
230
- self.info("Disbling interactsh in accordance with global settings")
229
+ if self.scan.config.get("interactsh_disable") is True:
230
+ self.info("Disabling interactsh in accordance with global settings")
231
231
  command.append("-no-interactsh")
232
232
 
233
233
  if self.mode == "technology":
@@ -23,6 +23,7 @@ class vhost(ffuf):
23
23
  }
24
24
 
25
25
  deps_common = ["ffuf"]
26
+ banned_characters = {" ", "."}
26
27
 
27
28
  in_scope_only = True
28
29
 
@@ -72,7 +73,7 @@ class vhost(ffuf):
72
73
 
73
74
  async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=False):
74
75
  filters = await self.baseline_ffuf(f"{host}/", exts=[""], suffix=basehost, mode="hostheader")
75
- self.debug(f"Baseline completed and returned these filters:")
76
+ self.debug("Baseline completed and returned these filters:")
76
77
  self.debug(filters)
77
78
  if not wordlist:
78
79
  wordlist = self.tempfile
@@ -89,7 +90,7 @@ class vhost(ffuf):
89
90
  parent=event,
90
91
  context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {vhost_str}",
91
92
  )
92
- if skip_dns_host == False:
93
+ if skip_dns_host is False:
93
94
  await self.emit_event(
94
95
  f"{vhost_dict['vhost']}{basehost}",
95
96
  "DNS_NAME",
@@ -103,7 +104,7 @@ class vhost(ffuf):
103
104
  def mutations_check(self, vhost):
104
105
  mutations_list = []
105
106
  for mutation in self.helpers.word_cloud.mutations(vhost):
106
- for i in ["", ".", "-"]:
107
+ for i in ["", "-"]:
107
108
  mutations_list.append(i.join(mutation))
108
109
  mutations_list_file = self.helpers.tempfile(mutations_list, pipe=False)
109
110
  return mutations_list_file
bbot/modules/dehashed.py CHANGED
@@ -90,7 +90,7 @@ class dehashed(subdomain_enum):
90
90
  url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}"
91
91
  page = 0
92
92
  num_entries = 0
93
- agen = self.helpers.api_page_iter(url=url, auth=self.auth, headers=self.headers, json=False)
93
+ agen = self.api_page_iter(url=url, auth=self.auth, headers=self.headers, json=False)
94
94
  async for result in agen:
95
95
  result_json = {}
96
96
  with suppress(Exception):
bbot/modules/digitorus.py CHANGED
@@ -19,7 +19,7 @@ class digitorus(subdomain_enum):
19
19
  url = f"{self.base_url}/{self.helpers.quote(query)}"
20
20
  return await self.helpers.request(url)
21
21
 
22
- def parse_results(self, r, query):
22
+ async def parse_results(self, r, query):
23
23
  results = set()
24
24
  content = getattr(r, "text", "")
25
25
  extract_regex = re.compile(r"[\w.-]+\." + query, re.I)
@@ -0,0 +1,145 @@
1
+ # bimi.py
2
+ #
3
+ # Checks for and parses common BIMI DNS TXT records, e.g. default._bimi.target.domain
4
+ #
5
+ # Example TXT record: "v=BIMI1; l=https://example.com/brand/logo.svg; a=https://example.com/brand/certificate.pem"
6
+ #
7
+ # BIMI records may contain a link to an SVG format brand authorised image, which may be useful for:
8
+ # 1. Sub-domain or otherwise unknown content hosting locations
9
+ # 2. Brand impersonation
10
+ # 3. May not be formatted/stripped of metadata correctly leading to some (low value probably) information exposure
11
+ #
12
+ # BIMI records may also contain a link to a PEM format X.509 VMC certificate, which may be similarly useful.
13
+ #
14
+ # We simply extract any URL's as URL_UNVERIFIED, no further parsing or download is done by this module in order to remain passive.
15
+ #
16
+ # The domain portion of any URL's is also passively checked and added as appropriate, for additional inspection by other modules.
17
+ #
18
+ # Files may be downloaded by other modules which respond to URL_UNVERIFIED events, if you have configured bbot to do so.
19
+ #
20
+ # NOTE: .svg file extensions are filtered from inclusion by default, modify "url_extension_blacklist" appropriately if you want the .svg image to be considered for download.
21
+ #
22
+ # NOTE: use the "filedownload" module if you to download .svg and .pem files. .pem will be downloaded by default, .svg will require a customised configuration for that module.
23
+ #
24
+ # The domain portion of any URL_UNVERIFIED's will be extracted by the various internal modules if .svg is not filtered.
25
+ #
26
+
27
+ from bbot.modules.base import BaseModule
28
+ from bbot.core.helpers.dns.helpers import service_record
29
+
30
+ import re
31
+
32
+ # Handle "v=BIMI1; l=; a=;" == RFC conformant explicit declination to publish, e.g. useful on a sub-domain if you don't want the sub-domain to have a BIMI logo, yet your registered domain does?
33
+ # Handle "v=BIMI1; l=; a=" == RFC non-conformant explicit declination to publish
34
+ # Handle "v=BIMI1; l=;" == RFC non-conformant explicit declination to publish
35
+ # Handle "v=BIMI1; l=" == RFC non-conformant explicit declination to publish
36
+ # Handle "v=BIMI1;" == RFC non-conformant explicit declination to publish
37
+ # Handle "v=BIMI1" == RFC non-conformant explicit declination to publish
38
+ # Handle "v=BIMI1;l=https://bimi.entrust.net/example.com/logo.svg;"
39
+ # Handle "v=BIMI1; l=https://bimi.entrust.net/example.com/logo.svg;"
40
+ # Handle "v=BIMI1;l=https://bimi.entrust.net/example.com/logo.svg;a=https://bimi.entrust.net/example.com/certchain.pem"
41
+ # Handle "v=BIMI1; l=https://bimi.entrust.net/example.com/logo.svg;a=https://bimi.entrust.net/example.com/certchain.pem;"
42
+ _bimi_regex = r"^v=(?P<v>BIMI1);* *(l=(?P<l>https*://[^;]*|)|);*( *a=((?P<a>https://[^;]*|)|);*)*$"
43
+ bimi_regex = re.compile(_bimi_regex, re.I)
44
+
45
+
46
+ class dnsbimi(BaseModule):
47
+ watched_events = ["DNS_NAME"]
48
+ produced_events = ["URL_UNVERIFIED", "RAW_DNS_RECORD"]
49
+ flags = ["subdomain-enum", "cloud-enum", "passive", "safe"]
50
+ meta = {
51
+ "description": "Check DNS_NAME's for BIMI records to find image and certificate hosting URL's",
52
+ "author": "@colin-stubbs",
53
+ "created_date": "2024-11-15",
54
+ }
55
+ options = {
56
+ "emit_raw_dns_records": False,
57
+ "emit_urls": True,
58
+ "selectors": "default,email,mail,bimi",
59
+ }
60
+ options_desc = {
61
+ "emit_raw_dns_records": "Emit RAW_DNS_RECORD events",
62
+ "emit_urls": "Emit URL_UNVERIFIED events",
63
+ "selectors": "CSV list of BIMI selectors to check",
64
+ }
65
+
66
+ async def setup(self):
67
+ self.emit_raw_dns_records = self.config.get("emit_raw_dns_records", False)
68
+ self.emit_urls = self.config.get("emit_urls", True)
69
+ self._selectors = self.config.get("selectors", "").replace(", ", ",").split(",")
70
+
71
+ return await super().setup()
72
+
73
+ def _incoming_dedup_hash(self, event):
74
+ # dedupe by parent
75
+ parent_domain = self.helpers.parent_domain(event.data)
76
+ return hash(parent_domain), "already processed parent domain"
77
+
78
+ async def filter_event(self, event):
79
+ if "_wildcard" in str(event.host).split("."):
80
+ return False, "event is wildcard"
81
+
82
+ # there's no value in inspecting service records
83
+ if service_record(event.host) is True:
84
+ return False, "service record detected"
85
+
86
+ return True
87
+
88
+ async def inspectBIMI(self, event, domain):
89
+ parent_domain = self.helpers.parent_domain(event.data)
90
+ rdtype = "TXT"
91
+
92
+ for selector in self._selectors:
93
+ tags = ["bimi-record", f"bimi-{selector}"]
94
+ hostname = f"{selector}._bimi.{parent_domain}"
95
+
96
+ r = await self.helpers.resolve_raw(hostname, type=rdtype)
97
+
98
+ if r:
99
+ raw_results, errors = r
100
+
101
+ for answer in raw_results:
102
+ if self.emit_raw_dns_records:
103
+ await self.emit_event(
104
+ {
105
+ "host": hostname,
106
+ "type": rdtype,
107
+ "answer": answer.to_text(),
108
+ },
109
+ "RAW_DNS_RECORD",
110
+ parent=event,
111
+ tags=tags.append(f"{rdtype.lower()}-record"),
112
+ context=f"{rdtype} lookup on {hostname} produced {{event.type}}",
113
+ )
114
+
115
+ # we need to strip surrounding quotes and whitespace, as well as fix TXT data that may have been split across two different rdata's
116
+ # e.g. we will get a single string, but within that string we may have two parts such as:
117
+ # answer = '"part 1 that was really long" "part 2 that did not fit in part 1"'
118
+ s = answer.to_text().strip('"').strip().replace('" "', "")
119
+
120
+ bimi_match = bimi_regex.search(s)
121
+
122
+ if bimi_match and bimi_match.group("v") and "bimi" in bimi_match.group("v").lower():
123
+ if bimi_match.group("l") and bimi_match.group("l") != "":
124
+ if self.emit_urls:
125
+ await self.emit_event(
126
+ bimi_match.group("l"),
127
+ "URL_UNVERIFIED",
128
+ parent=event,
129
+ tags=tags.append("bimi-location"),
130
+ )
131
+
132
+ if bimi_match.group("a") and bimi_match.group("a") != "":
133
+ if self.emit_urls:
134
+ await self.emit_event(
135
+ bimi_match.group("a"),
136
+ "URL_UNVERIFIED",
137
+ parent=event,
138
+ tags=tags.append("bimi-authority"),
139
+ )
140
+
141
+ async def handle_event(self, event):
142
+ await self.inspectBIMI(event, event.host)
143
+
144
+
145
+ # EOF
bbot/modules/dnscaa.py CHANGED
@@ -2,7 +2,7 @@
2
2
  #
3
3
  # Checks for and parses CAA DNS TXT records for IODEF reporting destination email addresses and/or URL's.
4
4
  #
5
- # NOTE: when the target domain is initially resolved basic "dns_name_regex" matched targets will be extracted so we do not perform that again here.
5
+ # NOTE: when the target domain is initially resolved basic "dns_name_extraction_regex" matched targets will be extracted so we do not perform that again here.
6
6
  #
7
7
  # Example CAA records,
8
8
  # 0 iodef "mailto:dnsadmin@example.com"
@@ -23,7 +23,7 @@ from bbot.modules.base import BaseModule
23
23
 
24
24
  import re
25
25
 
26
- from bbot.core.helpers.regexes import dns_name_regex, email_regex, url_regexes
26
+ from bbot.core.helpers.regexes import dns_name_extraction_regex, email_regex, url_regexes
27
27
 
28
28
  # Handle '0 iodef "mailto:support@hcaptcha.com"'
29
29
  # Handle '1 iodef "https://some.host.tld/caa;"'
@@ -109,7 +109,7 @@ class dnscaa(BaseModule):
109
109
 
110
110
  elif caa_match.group("property").lower().startswith("issue"):
111
111
  if self._dns_names:
112
- for match in dns_name_regex.finditer(caa_match.group("text")):
112
+ for match in dns_name_extraction_regex.finditer(caa_match.group("text")):
113
113
  start, end = match.span()
114
114
  name = caa_match.group("text")[start:end]
115
115
 
@@ -18,7 +18,7 @@ class dnsdumpster(subdomain_enum):
18
18
  async def query(self, domain):
19
19
  ret = []
20
20
  # first, get the CSRF tokens
21
- res1 = await self.request_with_fail_count(self.base_url)
21
+ res1 = await self.api_request(self.base_url)
22
22
  status_code = getattr(res1, "status_code", 0)
23
23
  if status_code in [429]:
24
24
  self.verbose(f'Too many requests "{status_code}"')
@@ -31,7 +31,7 @@ class dnsdumpster(subdomain_enum):
31
31
 
32
32
  html = self.helpers.beautifulsoup(res1.content, "html.parser")
33
33
  if html is False:
34
- self.verbose(f"BeautifulSoup returned False")
34
+ self.verbose("BeautifulSoup returned False")
35
35
  return ret
36
36
 
37
37
  csrftoken = None
@@ -62,7 +62,7 @@ class dnsdumpster(subdomain_enum):
62
62
 
63
63
  # Otherwise, do the needful
64
64
  subdomains = set()
65
- res2 = await self.request_with_fail_count(
65
+ res2 = await self.api_request(
66
66
  f"{self.base_url}/",
67
67
  method="POST",
68
68
  cookies={"csrftoken": csrftoken},
@@ -82,7 +82,7 @@ class dnsdumpster(subdomain_enum):
82
82
  return ret
83
83
  html = self.helpers.beautifulsoup(res2.content, "html.parser")
84
84
  if html is False:
85
- self.verbose(f"BeautifulSoup returned False")
85
+ self.verbose("BeautifulSoup returned False")
86
86
  return ret
87
87
  escaped_domain = re.escape(domain)
88
88
  match_pattern = re.compile(r"^[\w\.-]+\." + escaped_domain + r"$")