bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5401rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (278) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +3 -7
  3. bbot/core/config/files.py +0 -1
  4. bbot/core/config/logger.py +34 -4
  5. bbot/core/core.py +21 -4
  6. bbot/core/engine.py +9 -8
  7. bbot/core/event/base.py +131 -52
  8. bbot/core/helpers/bloom.py +10 -3
  9. bbot/core/helpers/command.py +8 -7
  10. bbot/core/helpers/depsinstaller/installer.py +31 -13
  11. bbot/core/helpers/diff.py +10 -10
  12. bbot/core/helpers/dns/brute.py +7 -4
  13. bbot/core/helpers/dns/dns.py +1 -2
  14. bbot/core/helpers/dns/engine.py +4 -6
  15. bbot/core/helpers/dns/helpers.py +2 -2
  16. bbot/core/helpers/dns/mock.py +0 -1
  17. bbot/core/helpers/files.py +1 -1
  18. bbot/core/helpers/helper.py +7 -4
  19. bbot/core/helpers/interactsh.py +3 -3
  20. bbot/core/helpers/libmagic.py +65 -0
  21. bbot/core/helpers/misc.py +65 -22
  22. bbot/core/helpers/names_generator.py +17 -3
  23. bbot/core/helpers/process.py +0 -20
  24. bbot/core/helpers/regex.py +1 -1
  25. bbot/core/helpers/regexes.py +12 -6
  26. bbot/core/helpers/validators.py +1 -2
  27. bbot/core/helpers/web/client.py +1 -1
  28. bbot/core/helpers/web/engine.py +1 -2
  29. bbot/core/helpers/web/web.py +4 -114
  30. bbot/core/helpers/wordcloud.py +5 -5
  31. bbot/core/modules.py +36 -27
  32. bbot/core/multiprocess.py +58 -0
  33. bbot/core/shared_deps.py +46 -3
  34. bbot/db/sql/models.py +147 -0
  35. bbot/defaults.yml +12 -10
  36. bbot/modules/anubisdb.py +2 -2
  37. bbot/modules/apkpure.py +63 -0
  38. bbot/modules/azure_tenant.py +2 -2
  39. bbot/modules/baddns.py +35 -19
  40. bbot/modules/baddns_direct.py +92 -0
  41. bbot/modules/baddns_zone.py +3 -8
  42. bbot/modules/badsecrets.py +4 -3
  43. bbot/modules/base.py +195 -51
  44. bbot/modules/bevigil.py +7 -7
  45. bbot/modules/binaryedge.py +7 -4
  46. bbot/modules/bufferoverrun.py +47 -0
  47. bbot/modules/builtwith.py +6 -10
  48. bbot/modules/bypass403.py +5 -5
  49. bbot/modules/c99.py +10 -7
  50. bbot/modules/censys.py +9 -13
  51. bbot/modules/certspotter.py +5 -3
  52. bbot/modules/chaos.py +9 -7
  53. bbot/modules/code_repository.py +1 -0
  54. bbot/modules/columbus.py +3 -3
  55. bbot/modules/crt.py +5 -3
  56. bbot/modules/deadly/dastardly.py +1 -1
  57. bbot/modules/deadly/ffuf.py +9 -9
  58. bbot/modules/deadly/nuclei.py +3 -3
  59. bbot/modules/deadly/vhost.py +4 -3
  60. bbot/modules/dehashed.py +1 -1
  61. bbot/modules/digitorus.py +1 -1
  62. bbot/modules/dnsbimi.py +145 -0
  63. bbot/modules/dnscaa.py +3 -3
  64. bbot/modules/dnsdumpster.py +4 -4
  65. bbot/modules/dnstlsrpt.py +144 -0
  66. bbot/modules/docker_pull.py +7 -5
  67. bbot/modules/dockerhub.py +2 -2
  68. bbot/modules/dotnetnuke.py +20 -21
  69. bbot/modules/emailformat.py +1 -1
  70. bbot/modules/extractous.py +122 -0
  71. bbot/modules/filedownload.py +9 -7
  72. bbot/modules/fullhunt.py +7 -4
  73. bbot/modules/generic_ssrf.py +5 -5
  74. bbot/modules/github_codesearch.py +3 -2
  75. bbot/modules/github_org.py +4 -4
  76. bbot/modules/github_workflows.py +4 -4
  77. bbot/modules/gitlab.py +2 -5
  78. bbot/modules/google_playstore.py +93 -0
  79. bbot/modules/gowitness.py +48 -50
  80. bbot/modules/hackertarget.py +5 -3
  81. bbot/modules/host_header.py +5 -5
  82. bbot/modules/httpx.py +1 -4
  83. bbot/modules/hunterio.py +3 -9
  84. bbot/modules/iis_shortnames.py +19 -30
  85. bbot/modules/internal/cloudcheck.py +29 -12
  86. bbot/modules/internal/dnsresolve.py +22 -22
  87. bbot/modules/internal/excavate.py +97 -59
  88. bbot/modules/internal/speculate.py +41 -32
  89. bbot/modules/internetdb.py +4 -2
  90. bbot/modules/ip2location.py +3 -5
  91. bbot/modules/ipneighbor.py +1 -1
  92. bbot/modules/ipstack.py +3 -8
  93. bbot/modules/jadx.py +87 -0
  94. bbot/modules/leakix.py +11 -10
  95. bbot/modules/myssl.py +2 -2
  96. bbot/modules/newsletters.py +2 -2
  97. bbot/modules/otx.py +5 -3
  98. bbot/modules/output/asset_inventory.py +7 -7
  99. bbot/modules/output/base.py +1 -1
  100. bbot/modules/output/csv.py +1 -1
  101. bbot/modules/output/http.py +20 -14
  102. bbot/modules/output/mysql.py +51 -0
  103. bbot/modules/output/neo4j.py +7 -2
  104. bbot/modules/output/postgres.py +49 -0
  105. bbot/modules/output/slack.py +0 -1
  106. bbot/modules/output/sqlite.py +29 -0
  107. bbot/modules/output/stdout.py +2 -2
  108. bbot/modules/output/teams.py +107 -6
  109. bbot/modules/paramminer_headers.py +8 -11
  110. bbot/modules/passivetotal.py +13 -13
  111. bbot/modules/portscan.py +32 -6
  112. bbot/modules/postman.py +50 -126
  113. bbot/modules/postman_download.py +220 -0
  114. bbot/modules/rapiddns.py +3 -8
  115. bbot/modules/report/asn.py +18 -11
  116. bbot/modules/robots.py +3 -3
  117. bbot/modules/securitytrails.py +7 -10
  118. bbot/modules/securitytxt.py +1 -1
  119. bbot/modules/shodan_dns.py +7 -9
  120. bbot/modules/sitedossier.py +1 -1
  121. bbot/modules/skymem.py +2 -2
  122. bbot/modules/social.py +2 -1
  123. bbot/modules/subdomaincenter.py +1 -1
  124. bbot/modules/subdomainradar.py +160 -0
  125. bbot/modules/telerik.py +8 -8
  126. bbot/modules/templates/bucket.py +1 -1
  127. bbot/modules/templates/github.py +22 -14
  128. bbot/modules/templates/postman.py +21 -0
  129. bbot/modules/templates/shodan.py +14 -13
  130. bbot/modules/templates/sql.py +95 -0
  131. bbot/modules/templates/subdomain_enum.py +51 -16
  132. bbot/modules/templates/webhook.py +2 -4
  133. bbot/modules/trickest.py +8 -37
  134. bbot/modules/trufflehog.py +10 -12
  135. bbot/modules/url_manipulation.py +3 -3
  136. bbot/modules/urlscan.py +1 -1
  137. bbot/modules/viewdns.py +1 -1
  138. bbot/modules/virustotal.py +8 -30
  139. bbot/modules/wafw00f.py +1 -1
  140. bbot/modules/wayback.py +1 -1
  141. bbot/modules/wpscan.py +17 -11
  142. bbot/modules/zoomeye.py +11 -6
  143. bbot/presets/baddns-thorough.yml +12 -0
  144. bbot/presets/fast.yml +16 -0
  145. bbot/presets/kitchen-sink.yml +1 -2
  146. bbot/presets/spider.yml +4 -0
  147. bbot/presets/subdomain-enum.yml +7 -7
  148. bbot/presets/web/dotnet-audit.yml +0 -1
  149. bbot/scanner/manager.py +5 -16
  150. bbot/scanner/preset/args.py +46 -26
  151. bbot/scanner/preset/environ.py +7 -2
  152. bbot/scanner/preset/path.py +7 -4
  153. bbot/scanner/preset/preset.py +36 -23
  154. bbot/scanner/scanner.py +172 -62
  155. bbot/scanner/target.py +236 -434
  156. bbot/scripts/docs.py +1 -1
  157. bbot/test/bbot_fixtures.py +13 -3
  158. bbot/test/conftest.py +132 -100
  159. bbot/test/fastapi_test.py +17 -0
  160. bbot/test/owasp_mastg.apk +0 -0
  161. bbot/test/run_tests.sh +4 -4
  162. bbot/test/test.conf +2 -0
  163. bbot/test/test_step_1/test__module__tests.py +0 -1
  164. bbot/test/test_step_1/test_bbot_fastapi.py +79 -0
  165. bbot/test/test_step_1/test_bloom_filter.py +2 -1
  166. bbot/test/test_step_1/test_cli.py +138 -64
  167. bbot/test/test_step_1/test_dns.py +61 -27
  168. bbot/test/test_step_1/test_engine.py +17 -19
  169. bbot/test/test_step_1/test_events.py +183 -30
  170. bbot/test/test_step_1/test_helpers.py +64 -29
  171. bbot/test/test_step_1/test_manager_deduplication.py +1 -1
  172. bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
  173. bbot/test/test_step_1/test_modules_basic.py +68 -70
  174. bbot/test/test_step_1/test_presets.py +183 -100
  175. bbot/test/test_step_1/test_python_api.py +7 -2
  176. bbot/test/test_step_1/test_regexes.py +35 -5
  177. bbot/test/test_step_1/test_scan.py +39 -5
  178. bbot/test/test_step_1/test_scope.py +4 -3
  179. bbot/test/test_step_1/test_target.py +242 -145
  180. bbot/test/test_step_1/test_web.py +14 -10
  181. bbot/test/test_step_2/module_tests/base.py +15 -7
  182. bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
  183. bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
  184. bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
  185. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
  186. bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
  187. bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
  188. bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
  189. bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
  190. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
  191. bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
  192. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
  193. bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
  194. bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
  195. bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
  196. bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
  197. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
  198. bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
  199. bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
  200. bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
  201. bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
  202. bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
  203. bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
  204. bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
  205. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
  206. bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
  207. bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
  208. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
  209. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
  210. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
  211. bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
  212. bbot/test/test_step_2/module_tests/test_module_excavate.py +28 -48
  213. bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
  214. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
  215. bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
  216. bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
  217. bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
  218. bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
  219. bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
  220. bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
  221. bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -6
  222. bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
  223. bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
  224. bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
  225. bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
  226. bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
  227. bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
  228. bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
  229. bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
  230. bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
  231. bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
  232. bbot/test/test_step_2/module_tests/test_module_newsletters.py +16 -16
  233. bbot/test/test_step_2/module_tests/test_module_ntlm.py +8 -7
  234. bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
  235. bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
  236. bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
  237. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
  238. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
  239. bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
  240. bbot/test/test_step_2/module_tests/test_module_pgp.py +2 -2
  241. bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
  242. bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
  243. bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
  244. bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
  245. bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
  246. bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
  247. bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
  248. bbot/test/test_step_2/module_tests/test_module_smuggler.py +14 -14
  249. bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
  250. bbot/test/test_step_2/module_tests/test_module_speculate.py +4 -8
  251. bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
  252. bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
  253. bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
  254. bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
  255. bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
  256. bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
  257. bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
  258. bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
  259. bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
  260. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
  261. bbot/test/test_step_2/module_tests/test_module_viewdns.py +1 -1
  262. bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
  263. bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
  264. bbot/wordlists/devops_mutations.txt +1 -1
  265. bbot/wordlists/ffuf_shortname_candidates.txt +1 -1
  266. bbot/wordlists/nameservers.txt +1 -1
  267. bbot/wordlists/paramminer_headers.txt +1 -1
  268. bbot/wordlists/paramminer_parameters.txt +1 -1
  269. bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt +1 -1
  270. bbot/wordlists/valid_url_schemes.txt +1 -1
  271. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/METADATA +48 -18
  272. bbot-2.3.0.5401rc0.dist-info/RECORD +421 -0
  273. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/WHEEL +1 -1
  274. bbot/modules/unstructured.py +0 -163
  275. bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
  276. bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
  277. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/LICENSE +0 -0
  278. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/entry_points.txt +0 -0
@@ -32,10 +32,11 @@ class speculate(BaseInternalModule):
32
32
  "author": "@liquidsec",
33
33
  }
34
34
 
35
- options = {"max_hosts": 65536, "ports": "80,443"}
35
+ options = {"max_hosts": 65536, "ports": "80,443", "essential_only": False}
36
36
  options_desc = {
37
37
  "max_hosts": "Max number of IP_RANGE hosts to convert into IP_ADDRESS events",
38
38
  "ports": "The set of ports to speculate on",
39
+ "essential_only": "Only enable essential speculate features (no extra discovery)",
39
40
  }
40
41
  scope_distance_modifier = 1
41
42
  _priority = 4
@@ -44,14 +45,15 @@ class speculate(BaseInternalModule):
44
45
 
45
46
  async def setup(self):
46
47
  scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"]
47
- self.open_port_consumers = any(["OPEN_TCP_PORT" in m.watched_events for m in scan_modules])
48
+ self.open_port_consumers = any("OPEN_TCP_PORT" in m.watched_events for m in scan_modules)
48
49
  # only consider active portscanners (still speculate if only passive ones are enabled)
49
50
  self.portscanner_enabled = any(
50
- ["portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values()]
51
+ "portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values()
51
52
  )
52
53
  self.emit_open_ports = self.open_port_consumers and not self.portscanner_enabled
53
54
  self.range_to_ip = True
54
55
  self.dns_disable = self.scan.config.get("dns", {}).get("disable", False)
56
+ self.essential_only = self.config.get("essential_only", False)
55
57
  self.org_stubs_seen = set()
56
58
 
57
59
  port_string = self.config.get("ports", "80,443")
@@ -63,18 +65,26 @@ class speculate(BaseInternalModule):
63
65
  if not self.portscanner_enabled:
64
66
  self.info(f"No portscanner enabled. Assuming open ports: {', '.join(str(x) for x in self.ports)}")
65
67
 
66
- target_len = len(self.scan.target)
68
+ target_len = len(self.scan.target.seeds)
67
69
  if target_len > self.config.get("max_hosts", 65536):
68
70
  if not self.portscanner_enabled:
69
71
  self.hugewarning(
70
72
  f"Selected target ({target_len:,} hosts) is too large, skipping IP_RANGE --> IP_ADDRESS speculation"
71
73
  )
72
- self.hugewarning(f'Enabling the "portscan" module is highly recommended')
74
+ self.hugewarning('Enabling the "portscan" module is highly recommended')
73
75
  self.range_to_ip = False
74
76
 
75
77
  return True
76
78
 
77
79
  async def handle_event(self, event):
80
+ ### BEGIN ESSENTIAL SPECULATION ###
81
+ # These features are required for smooth operation of bbot
82
+ # I.e. they are not "osinty" or intended to discover anything, they only compliment other modules
83
+
84
+ # we speculate on distance-1 stuff too, because distance-1 open ports are needed by certain modules like sslcert
85
+ event_in_scope_distance = event.scope_distance <= (self.scan.scope_search_distance + 1)
86
+ speculate_open_ports = self.emit_open_ports and event_in_scope_distance
87
+
78
88
  # generate individual IP addresses from IP range
79
89
  if event.type == "IP_RANGE" and self.range_to_ip:
80
90
  net = ipaddress.ip_network(event.data)
@@ -89,28 +99,46 @@ class speculate(BaseInternalModule):
89
99
  context=f"speculate converted range into individual IP_ADDRESS: {ip}",
90
100
  )
91
101
 
102
+ # IP_ADDRESS / DNS_NAME --> OPEN_TCP_PORT
103
+ if speculate_open_ports:
104
+ # don't act on unresolved DNS_NAMEs
105
+ usable_dns = False
106
+ if event.type == "DNS_NAME":
107
+ if self.dns_disable or ("a-record" in event.tags or "aaaa-record" in event.tags):
108
+ usable_dns = True
109
+
110
+ if event.type == "IP_ADDRESS" or usable_dns:
111
+ for port in self.ports:
112
+ await self.emit_event(
113
+ self.helpers.make_netloc(event.data, port),
114
+ "OPEN_TCP_PORT",
115
+ parent=event,
116
+ internal=True,
117
+ context="speculated {event.type}: {event.data}",
118
+ )
119
+
120
+ ### END ESSENTIAL SPECULATION ###
121
+ if self.essential_only:
122
+ return
123
+
92
124
  # parent domains
93
125
  if event.type.startswith("DNS_NAME"):
94
126
  parent = self.helpers.parent_domain(event.host_original)
95
127
  if parent != event.data:
96
128
  await self.emit_event(
97
- parent, "DNS_NAME", parent=event, context=f"speculated parent {{event.type}}: {{event.data}}"
129
+ parent, "DNS_NAME", parent=event, context="speculated parent {event.type}: {event.data}"
98
130
  )
99
131
 
100
- # we speculate on distance-1 stuff too, because distance-1 open ports are needed by certain modules like sslcert
101
- event_in_scope_distance = event.scope_distance <= (self.scan.scope_search_distance + 1)
102
- speculate_open_ports = self.emit_open_ports and event_in_scope_distance
103
-
104
132
  # URL --> OPEN_TCP_PORT
105
- if event.type == "URL" or (event.type == "URL_UNVERIFIED" and self.open_port_consumers):
133
+ event_is_url = event.type == "URL"
134
+ if event_is_url or (event.type == "URL_UNVERIFIED" and self.open_port_consumers):
106
135
  # only speculate port from a URL if it wouldn't be speculated naturally from the host
107
136
  if event.host and (event.port not in self.ports or not speculate_open_ports):
108
137
  await self.emit_event(
109
138
  self.helpers.make_netloc(event.host, event.port),
110
139
  "OPEN_TCP_PORT",
111
140
  parent=event,
112
- internal=True,
113
- quick=(event.type == "URL"),
141
+ internal=not event_is_url, # if the URL is verified, the port is definitely open
114
142
  context=f"speculated {{event.type}} from {event.type}: {{event.data}}",
115
143
  )
116
144
 
@@ -144,25 +172,6 @@ class speculate(BaseInternalModule):
144
172
  context="speculated {event.type}: {event.data}",
145
173
  )
146
174
 
147
- # IP_ADDRESS / DNS_NAME --> OPEN_TCP_PORT
148
- if speculate_open_ports:
149
- # don't act on unresolved DNS_NAMEs
150
- usable_dns = False
151
- if event.type == "DNS_NAME":
152
- if self.dns_disable or ("a-record" in event.tags or "aaaa-record" in event.tags):
153
- usable_dns = True
154
-
155
- if event.type == "IP_ADDRESS" or usable_dns:
156
- for port in self.ports:
157
- await self.emit_event(
158
- self.helpers.make_netloc(event.data, port),
159
- "OPEN_TCP_PORT",
160
- parent=event,
161
- internal=True,
162
- quick=True,
163
- context="speculated {event.type}: {event.data}",
164
- )
165
-
166
175
  # ORG_STUB from TLD, SOCIAL, AZURE_TENANT
167
176
  org_stubs = set()
168
177
  if event.type == "DNS_NAME" and event.scope_distance == 0:
@@ -48,6 +48,9 @@ class internetdb(BaseModule):
48
48
  "show_open_ports": "Display OPEN_TCP_PORT events in output, even if they didn't lead to an interesting discovery"
49
49
  }
50
50
 
51
+ # we get lots of 404s, that's normal
52
+ _api_failure_abort_threshold = 9999999999
53
+
51
54
  _qsize = 500
52
55
 
53
56
  base_url = "https://internetdb.shodan.io"
@@ -64,7 +67,7 @@ class internetdb(BaseModule):
64
67
  if ip is None:
65
68
  return
66
69
  url = f"{self.base_url}/{ip}"
67
- r = await self.request_with_fail_count(url)
70
+ r = await self.api_request(url)
68
71
  if r is None:
69
72
  self.debug(f"No response for {event.data}")
70
73
  return
@@ -113,7 +116,6 @@ class internetdb(BaseModule):
113
116
  "OPEN_TCP_PORT",
114
117
  parent=event,
115
118
  internal=(not self.show_open_ports),
116
- quick=True,
117
119
  context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}',
118
120
  )
119
121
  vulns = data.get("vulns", [])
@@ -32,12 +32,10 @@ class IP2Location(BaseModule):
32
32
 
33
33
  async def ping(self):
34
34
  url = self.build_url("8.8.8.8")
35
- r = await self.request_with_fail_count(url)
36
- resp_content = getattr(r, "text", "")
37
- assert getattr(r, "status_code", 0) == 200, resp_content
35
+ await super().ping(url)
38
36
 
39
37
  def build_url(self, data):
40
- url = f"{self.base_url}/?key={self.api_key}&ip={data}&format=json&source=bbot"
38
+ url = f"{self.base_url}/?key={{api_key}}&ip={data}&format=json&source=bbot"
41
39
  if self.lang:
42
40
  url = f"{url}&lang={self.lang}"
43
41
  return url
@@ -45,7 +43,7 @@ class IP2Location(BaseModule):
45
43
  async def handle_event(self, event):
46
44
  try:
47
45
  url = self.build_url(event.data)
48
- result = await self.request_with_fail_count(url)
46
+ result = await self.api_request(url)
49
47
  if result:
50
48
  geo_data = result.json()
51
49
  if not geo_data:
@@ -31,7 +31,7 @@ class ipneighbor(BaseModule):
31
31
  netmask = main_ip.max_prefixlen - min(main_ip.max_prefixlen, self.num_bits)
32
32
  network = ipaddress.ip_network(f"{main_ip}/{netmask}", strict=False)
33
33
  subnet_hash = hash(network)
34
- if not subnet_hash in self.processed:
34
+ if subnet_hash not in self.processed:
35
35
  self.processed.add(subnet_hash)
36
36
  for ip in network:
37
37
  if ip != main_ip:
bbot/modules/ipstack.py CHANGED
@@ -23,20 +23,15 @@ class Ipstack(BaseModule):
23
23
  suppress_dupes = False
24
24
 
25
25
  base_url = "http://api.ipstack.com"
26
+ ping_url = f"{base_url}/check?access_key={{api_key}}"
26
27
 
27
28
  async def setup(self):
28
29
  return await self.require_api_key()
29
30
 
30
- async def ping(self):
31
- url = f"{self.base_url}/check?access_key={self.api_key}"
32
- r = await self.request_with_fail_count(url)
33
- resp_content = getattr(r, "text", "")
34
- assert getattr(r, "status_code", 0) == 200, resp_content
35
-
36
31
  async def handle_event(self, event):
37
32
  try:
38
- url = f"{self.base_url}/{event.data}?access_key={self.api_key}"
39
- result = await self.request_with_fail_count(url)
33
+ url = f"{self.base_url}/{event.data}?access_key={{api_key}}"
34
+ result = await self.api_request(url)
40
35
  if result:
41
36
  geo_data = result.json()
42
37
  if not geo_data:
bbot/modules/jadx.py ADDED
@@ -0,0 +1,87 @@
1
+ from pathlib import Path
2
+ from subprocess import CalledProcessError
3
+ from bbot.modules.internal.base import BaseModule
4
+
5
+
6
+ class jadx(BaseModule):
7
+ watched_events = ["FILESYSTEM"]
8
+ produced_events = ["FILESYSTEM"]
9
+ flags = ["passive", "safe"]
10
+ meta = {
11
+ "description": "Decompile APKs and XAPKs using JADX",
12
+ "created_date": "2024-11-04",
13
+ "author": "@domwhewell-sage",
14
+ }
15
+ options = {
16
+ "threads": 4,
17
+ }
18
+ options_desc = {
19
+ "threads": "Maximum jadx threads for extracting apk's, default: 4",
20
+ }
21
+ deps_common = ["java"]
22
+ deps_ansible = [
23
+ {
24
+ "name": "Create jadx directory",
25
+ "file": {"path": "#{BBOT_TOOLS}/jadx", "state": "directory", "mode": "0755"},
26
+ },
27
+ {
28
+ "name": "Download jadx",
29
+ "unarchive": {
30
+ "src": "https://github.com/skylot/jadx/releases/download/v1.5.0/jadx-1.5.0.zip",
31
+ "include": ["lib/jadx-1.5.0-all.jar", "bin/jadx"],
32
+ "dest": "#{BBOT_TOOLS}/jadx",
33
+ "remote_src": True,
34
+ },
35
+ },
36
+ ]
37
+
38
+ allowed_file_types = ["java archive", "android application package"]
39
+
40
+ async def setup(self):
41
+ self.threads = self.config.get("threads", 4)
42
+ return True
43
+
44
+ async def filter_event(self, event):
45
+ if "file" in event.tags:
46
+ if event.data["magic_description"].lower() not in self.allowed_file_types:
47
+ return False, f"Jadx is not able to decompile this file type: {event.data['magic_description']}"
48
+ else:
49
+ return False, "Event is not a file"
50
+ return True
51
+
52
+ async def handle_event(self, event):
53
+ path = Path(event.data["path"])
54
+ output_dir = path.parent / path.name.replace(".", "_")
55
+ self.helpers.mkdir(output_dir)
56
+ success = await self.decompile_apk(path, output_dir)
57
+
58
+ # If jadx was able to decompile the java archive, emit an event
59
+ if success:
60
+ await self.emit_event(
61
+ {"path": str(output_dir)},
62
+ "FILESYSTEM",
63
+ tags="folder",
64
+ parent=event,
65
+ context=f'extracted "{path}" to: {output_dir}',
66
+ )
67
+ else:
68
+ output_dir.rmdir()
69
+
70
+ async def decompile_apk(self, path, output_dir):
71
+ command = [
72
+ f"{self.scan.helpers.tools_dir}/jadx/bin/jadx",
73
+ "--threads-count",
74
+ self.threads,
75
+ "--output-dir",
76
+ str(output_dir),
77
+ str(path),
78
+ ]
79
+ try:
80
+ output = await self.run_process(command, check=True)
81
+ except CalledProcessError as e:
82
+ self.warning(f"Error decompiling {path}. STDOUT: {e.stdout} STDERR: {repr(e.stderr)}")
83
+ return False
84
+ if not (output_dir / "resources").exists() and not (output_dir / "sources").exists():
85
+ self.warning(f"JADX was unable to decompile {path}: (STDOUT: {output.stdout} STDERR: {output.stderr})")
86
+ return False
87
+ return True
bbot/modules/leakix.py CHANGED
@@ -15,31 +15,32 @@ class leakix(subdomain_enum_apikey):
15
15
  }
16
16
 
17
17
  base_url = "https://leakix.net"
18
+ ping_url = f"{base_url}/host/1.1.1.1"
18
19
 
19
20
  async def setup(self):
20
21
  ret = await super(subdomain_enum_apikey, self).setup()
21
- self.headers = {"Accept": "application/json"}
22
22
  self.api_key = self.config.get("api_key", "")
23
23
  if self.api_key:
24
- self.headers["api-key"] = self.api_key
25
24
  return await self.require_api_key()
26
25
  return ret
27
26
 
28
- async def ping(self):
29
- url = f"{self.base_url}/host/1.2.3.4.5"
30
- r = await self.helpers.request(url, headers=self.headers)
31
- resp_content = getattr(r, "text", "")
32
- assert getattr(r, "status_code", 0) != 401, resp_content
27
+ def prepare_api_request(self, url, kwargs):
28
+ if self.api_key:
29
+ kwargs["headers"]["api-key"] = self.api_key
30
+ kwargs["headers"]["Accept"] = "application/json"
31
+ return url, kwargs
33
32
 
34
33
  async def request_url(self, query):
35
34
  url = f"{self.base_url}/api/subdomains/{self.helpers.quote(query)}"
36
- response = await self.request_with_fail_count(url, headers=self.headers)
35
+ response = await self.api_request(url)
37
36
  return response
38
37
 
39
- def parse_results(self, r, query=None):
38
+ async def parse_results(self, r, query=None):
39
+ results = set()
40
40
  json = r.json()
41
41
  if json:
42
42
  for entry in json:
43
43
  subdomain = entry.get("subdomain", "")
44
44
  if subdomain:
45
- yield subdomain
45
+ results.add(subdomain)
46
+ return results
bbot/modules/myssl.py CHANGED
@@ -15,9 +15,9 @@ class myssl(subdomain_enum):
15
15
 
16
16
  async def request_url(self, query):
17
17
  url = f"{self.base_url}?domain={self.helpers.quote(query)}"
18
- return await self.request_with_fail_count(url)
18
+ return await self.api_request(url)
19
19
 
20
- def parse_results(self, r, query):
20
+ async def parse_results(self, r, query):
21
21
  results = set()
22
22
  json = r.json()
23
23
  if json and isinstance(json, dict):
@@ -46,11 +46,11 @@ class newsletters(BaseModule):
46
46
  body = _event.data["body"]
47
47
  soup = self.helpers.beautifulsoup(body, "html.parser")
48
48
  if soup is False:
49
- self.debug(f"BeautifulSoup returned False")
49
+ self.debug("BeautifulSoup returned False")
50
50
  return
51
51
  result = self.find_type(soup)
52
52
  if result:
53
- description = f"Found a Newsletter Submission Form that could be used for email bombing attacks"
53
+ description = "Found a Newsletter Submission Form that could be used for email bombing attacks"
54
54
  data = {"host": str(_event.host), "description": description, "url": _event.data["url"]}
55
55
  await self.emit_event(
56
56
  data,
bbot/modules/otx.py CHANGED
@@ -15,12 +15,14 @@ class otx(subdomain_enum):
15
15
 
16
16
  def request_url(self, query):
17
17
  url = f"{self.base_url}/api/v1/indicators/domain/{self.helpers.quote(query)}/passive_dns"
18
- return self.request_with_fail_count(url)
18
+ return self.api_request(url)
19
19
 
20
- def parse_results(self, r, query):
20
+ async def parse_results(self, r, query):
21
+ results = set()
21
22
  j = r.json()
22
23
  if isinstance(j, dict):
23
24
  for entry in j.get("passive_dns", []):
24
25
  subdomain = entry.get("hostname", "")
25
26
  if subdomain:
26
- yield subdomain
27
+ results.add(subdomain)
28
+ return results
@@ -91,15 +91,15 @@ class asset_inventory(CSV):
91
91
  self.assets[hostkey].absorb_event(event)
92
92
 
93
93
  async def report(self):
94
- stats = dict()
95
- totals = dict()
94
+ stats = {}
95
+ totals = {}
96
96
 
97
97
  def increment_stat(stat, value):
98
98
  try:
99
99
  totals[stat] += 1
100
100
  except KeyError:
101
101
  totals[stat] = 1
102
- if not stat in stats:
102
+ if stat not in stats:
103
103
  stats[stat] = {}
104
104
  try:
105
105
  stats[stat][value] += 1
@@ -259,17 +259,17 @@ class Asset:
259
259
  # ips
260
260
  self.ip_addresses = set(_make_ip_list(row.get("IP (External)", "")))
261
261
  self.ip_addresses.update(set(_make_ip_list(row.get("IP (Internal)", ""))))
262
- # If user reqests a recheck dont import the following fields to force them to be rechecked
262
+ # If user requests a recheck dont import the following fields to force them to be rechecked
263
263
  if not self.recheck:
264
264
  # ports
265
265
  ports = [i.strip() for i in row.get("Open Ports", "").split(",")]
266
- self.ports.update(set(i for i in ports if i and is_port(i)))
266
+ self.ports.update({i for i in ports if i and is_port(i)})
267
267
  # findings
268
268
  findings = [i.strip() for i in row.get("Findings", "").splitlines()]
269
- self.findings.update(set(i for i in findings if i))
269
+ self.findings.update({i for i in findings if i})
270
270
  # technologies
271
271
  technologies = [i.strip() for i in row.get("Technologies", "").splitlines()]
272
- self.technologies.update(set(i for i in technologies if i))
272
+ self.technologies.update({i for i in technologies if i})
273
273
  # risk rating
274
274
  risk_rating = row.get("Risk Rating", "").strip()
275
275
  if risk_rating and risk_rating.isdigit() and int(risk_rating) > self.risk_rating:
@@ -24,7 +24,7 @@ class BaseOutputModule(BaseModule):
24
24
  if event.type in ("FINISHED",):
25
25
  return True, "its type is FINISHED"
26
26
  if self.errored:
27
- return False, f"module is in error state"
27
+ return False, "module is in error state"
28
28
  # exclude non-watched types
29
29
  if not any(t in self.get_watched_events() for t in ("*", event.type)):
30
30
  return False, "its type is not in watched_events"
@@ -64,7 +64,7 @@ class CSV(BaseOutputModule):
64
64
  ),
65
65
  "Source Module": str(getattr(event, "module_sequence", "")),
66
66
  "Scope Distance": str(getattr(event, "scope_distance", "")),
67
- "Event Tags": ",".join(sorted(list(getattr(event, "tags", [])))),
67
+ "Event Tags": ",".join(sorted(getattr(event, "tags", []))),
68
68
  "Discovery Path": " --> ".join(discovery_path),
69
69
  }
70
70
  )
@@ -1,4 +1,3 @@
1
- from bbot.errors import WebError
2
1
  from bbot.modules.output.base import BaseOutputModule
3
2
 
4
3
 
@@ -52,16 +51,23 @@ class HTTP(BaseOutputModule):
52
51
 
53
52
  async def handle_event(self, event):
54
53
  while 1:
55
- try:
56
- await self.helpers.request(
57
- url=self.url,
58
- method=self.method,
59
- auth=self.auth,
60
- headers=self.headers,
61
- json=event.json(siem_friendly=self.siem_friendly),
62
- raise_error=True,
63
- )
64
- break
65
- except WebError as e:
66
- self.warning(f"Error sending {event}: {e}, retrying...")
67
- await self.helpers.sleep(1)
54
+ response = await self.helpers.request(
55
+ url=self.url,
56
+ method=self.method,
57
+ auth=self.auth,
58
+ headers=self.headers,
59
+ json=event.json(siem_friendly=self.siem_friendly),
60
+ )
61
+ is_success = False if response is None else response.is_success
62
+ if not is_success:
63
+ status_code = getattr(response, "status_code", 0)
64
+ self.warning(f"Error sending {event} (HTTP status code: {status_code}), retrying...")
65
+ body = getattr(response, "text", "")
66
+ self.debug(body)
67
+ if status_code == 429:
68
+ sleep_interval = 10
69
+ else:
70
+ sleep_interval = 1
71
+ await self.helpers.sleep(sleep_interval)
72
+ continue
73
+ break
@@ -0,0 +1,51 @@
1
+ from bbot.modules.templates.sql import SQLTemplate
2
+
3
+
4
+ class MySQL(SQLTemplate):
5
+ watched_events = ["*"]
6
+ meta = {"description": "Output scan data to a MySQL database"}
7
+ options = {
8
+ "username": "root",
9
+ "password": "bbotislife",
10
+ "host": "localhost",
11
+ "port": 3306,
12
+ "database": "bbot",
13
+ }
14
+ options_desc = {
15
+ "username": "The username to connect to MySQL",
16
+ "password": "The password to connect to MySQL",
17
+ "host": "The server running MySQL",
18
+ "port": "The port to connect to MySQL",
19
+ "database": "The database name to connect to",
20
+ }
21
+ deps_pip = ["sqlmodel", "aiomysql"]
22
+ protocol = "mysql+aiomysql"
23
+
24
+ async def create_database(self):
25
+ from sqlalchemy import text
26
+ from sqlalchemy.ext.asyncio import create_async_engine
27
+
28
+ # Create the engine for the initial connection to the server
29
+ initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0])
30
+
31
+ async with initial_engine.connect() as conn:
32
+ # Check if the database exists
33
+ result = await conn.execute(text(f"SHOW DATABASES LIKE '{self.database}'"))
34
+ database_exists = result.scalar() is not None
35
+
36
+ # Create the database if it does not exist
37
+ if not database_exists:
38
+ # Use aiomysql directly to create the database
39
+ import aiomysql
40
+
41
+ raw_conn = await aiomysql.connect(
42
+ user=self.username,
43
+ password=self.password,
44
+ host=self.host,
45
+ port=self.port,
46
+ )
47
+ try:
48
+ async with raw_conn.cursor() as cursor:
49
+ await cursor.execute(f"CREATE DATABASE {self.database}")
50
+ finally:
51
+ await raw_conn.ensure_closed()
@@ -1,10 +1,15 @@
1
1
  import json
2
+ import logging
2
3
  from contextlib import suppress
3
4
  from neo4j import AsyncGraphDatabase
4
5
 
5
6
  from bbot.modules.output.base import BaseOutputModule
6
7
 
7
8
 
9
+ # silence annoying neo4j logger
10
+ logging.getLogger("neo4j").setLevel(logging.CRITICAL)
11
+
12
+
8
13
  class neo4j(BaseOutputModule):
9
14
  """
10
15
  # start Neo4j in the background with docker
@@ -48,7 +53,7 @@ class neo4j(BaseOutputModule):
48
53
  ),
49
54
  )
50
55
  self.session = self.driver.session()
51
- await self.handle_event(self.scan.root_event)
56
+ await self.session.run("Match () Return 1 Limit 1")
52
57
  except Exception as e:
53
58
  return False, f"Error setting up Neo4j: {e}"
54
59
  return True
@@ -110,7 +115,7 @@ class neo4j(BaseOutputModule):
110
115
 
111
116
  cypher = f"""UNWIND $events AS event
112
117
  MERGE (_:{event_type} {{ id: event.id }})
113
- SET _ += event
118
+ SET _ += properties(event)
114
119
  RETURN event.data as event_data, event.id as event_id, elementId(_) as neo4j_id"""
115
120
  neo4j_ids = {}
116
121
  # insert events
@@ -0,0 +1,49 @@
1
+ from bbot.modules.templates.sql import SQLTemplate
2
+
3
+
4
+ class Postgres(SQLTemplate):
5
+ watched_events = ["*"]
6
+ meta = {"description": "Output scan data to a SQLite database"}
7
+ options = {
8
+ "username": "postgres",
9
+ "password": "bbotislife",
10
+ "host": "localhost",
11
+ "port": 5432,
12
+ "database": "bbot",
13
+ }
14
+ options_desc = {
15
+ "username": "The username to connect to Postgres",
16
+ "password": "The password to connect to Postgres",
17
+ "host": "The server running Postgres",
18
+ "port": "The port to connect to Postgres",
19
+ "database": "The database name to connect to",
20
+ }
21
+ deps_pip = ["sqlmodel", "asyncpg"]
22
+ protocol = "postgresql+asyncpg"
23
+
24
+ async def create_database(self):
25
+ import asyncpg
26
+ from sqlalchemy import text
27
+ from sqlalchemy.ext.asyncio import create_async_engine
28
+
29
+ # Create the engine for the initial connection to the server
30
+ initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0])
31
+
32
+ async with initial_engine.connect() as conn:
33
+ # Check if the database exists
34
+ result = await conn.execute(text(f"SELECT 1 FROM pg_database WHERE datname = '{self.database}'"))
35
+ database_exists = result.scalar() is not None
36
+
37
+ # Create the database if it does not exist
38
+ if not database_exists:
39
+ # Use asyncpg directly to create the database
40
+ raw_conn = await asyncpg.connect(
41
+ user=self.username,
42
+ password=self.password,
43
+ host=self.host,
44
+ port=self.port,
45
+ )
46
+ try:
47
+ await raw_conn.execute(f"CREATE DATABASE {self.database}")
48
+ finally:
49
+ await raw_conn.close()
@@ -16,7 +16,6 @@ class Slack(WebhookOutputModule):
16
16
  "event_types": "Types of events to send",
17
17
  "min_severity": "Only allow VULNERABILITY events of this severity or higher",
18
18
  }
19
- good_status_code = 200
20
19
  content_key = "text"
21
20
 
22
21
  def format_message_str(self, event):