bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5397rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (267) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +3 -7
  3. bbot/core/config/files.py +0 -1
  4. bbot/core/config/logger.py +34 -4
  5. bbot/core/core.py +21 -4
  6. bbot/core/engine.py +9 -8
  7. bbot/core/event/base.py +131 -52
  8. bbot/core/helpers/bloom.py +10 -3
  9. bbot/core/helpers/command.py +8 -7
  10. bbot/core/helpers/depsinstaller/installer.py +31 -13
  11. bbot/core/helpers/diff.py +10 -10
  12. bbot/core/helpers/dns/brute.py +7 -4
  13. bbot/core/helpers/dns/dns.py +1 -2
  14. bbot/core/helpers/dns/engine.py +4 -6
  15. bbot/core/helpers/dns/helpers.py +2 -2
  16. bbot/core/helpers/dns/mock.py +0 -1
  17. bbot/core/helpers/files.py +1 -1
  18. bbot/core/helpers/helper.py +7 -4
  19. bbot/core/helpers/interactsh.py +3 -3
  20. bbot/core/helpers/libmagic.py +65 -0
  21. bbot/core/helpers/misc.py +65 -22
  22. bbot/core/helpers/names_generator.py +17 -3
  23. bbot/core/helpers/process.py +0 -20
  24. bbot/core/helpers/regex.py +1 -1
  25. bbot/core/helpers/regexes.py +12 -6
  26. bbot/core/helpers/validators.py +1 -2
  27. bbot/core/helpers/web/client.py +1 -1
  28. bbot/core/helpers/web/engine.py +1 -2
  29. bbot/core/helpers/web/web.py +4 -114
  30. bbot/core/helpers/wordcloud.py +5 -5
  31. bbot/core/modules.py +36 -27
  32. bbot/core/multiprocess.py +58 -0
  33. bbot/core/shared_deps.py +46 -3
  34. bbot/db/sql/models.py +147 -0
  35. bbot/defaults.yml +11 -9
  36. bbot/modules/anubisdb.py +2 -2
  37. bbot/modules/apkpure.py +63 -0
  38. bbot/modules/azure_tenant.py +2 -2
  39. bbot/modules/baddns.py +35 -19
  40. bbot/modules/baddns_direct.py +92 -0
  41. bbot/modules/baddns_zone.py +3 -8
  42. bbot/modules/badsecrets.py +4 -3
  43. bbot/modules/base.py +195 -51
  44. bbot/modules/bevigil.py +7 -7
  45. bbot/modules/binaryedge.py +7 -4
  46. bbot/modules/bufferoverrun.py +47 -0
  47. bbot/modules/builtwith.py +6 -10
  48. bbot/modules/bypass403.py +5 -5
  49. bbot/modules/c99.py +10 -7
  50. bbot/modules/censys.py +9 -13
  51. bbot/modules/certspotter.py +5 -3
  52. bbot/modules/chaos.py +9 -7
  53. bbot/modules/code_repository.py +1 -0
  54. bbot/modules/columbus.py +3 -3
  55. bbot/modules/crt.py +5 -3
  56. bbot/modules/deadly/dastardly.py +1 -1
  57. bbot/modules/deadly/ffuf.py +9 -9
  58. bbot/modules/deadly/nuclei.py +3 -3
  59. bbot/modules/deadly/vhost.py +4 -3
  60. bbot/modules/dehashed.py +1 -1
  61. bbot/modules/digitorus.py +1 -1
  62. bbot/modules/dnsbimi.py +145 -0
  63. bbot/modules/dnscaa.py +3 -3
  64. bbot/modules/dnsdumpster.py +4 -4
  65. bbot/modules/dnstlsrpt.py +144 -0
  66. bbot/modules/docker_pull.py +7 -5
  67. bbot/modules/dockerhub.py +2 -2
  68. bbot/modules/dotnetnuke.py +18 -19
  69. bbot/modules/emailformat.py +1 -1
  70. bbot/modules/extractous.py +122 -0
  71. bbot/modules/filedownload.py +9 -7
  72. bbot/modules/fullhunt.py +7 -4
  73. bbot/modules/generic_ssrf.py +5 -5
  74. bbot/modules/github_codesearch.py +3 -2
  75. bbot/modules/github_org.py +4 -4
  76. bbot/modules/github_workflows.py +4 -4
  77. bbot/modules/gitlab.py +2 -5
  78. bbot/modules/google_playstore.py +93 -0
  79. bbot/modules/gowitness.py +48 -50
  80. bbot/modules/hackertarget.py +5 -3
  81. bbot/modules/host_header.py +5 -5
  82. bbot/modules/httpx.py +1 -4
  83. bbot/modules/hunterio.py +3 -9
  84. bbot/modules/iis_shortnames.py +19 -30
  85. bbot/modules/internal/cloudcheck.py +27 -12
  86. bbot/modules/internal/dnsresolve.py +22 -20
  87. bbot/modules/internal/excavate.py +85 -48
  88. bbot/modules/internal/speculate.py +41 -32
  89. bbot/modules/internetdb.py +4 -2
  90. bbot/modules/ip2location.py +3 -5
  91. bbot/modules/ipneighbor.py +1 -1
  92. bbot/modules/ipstack.py +3 -8
  93. bbot/modules/jadx.py +87 -0
  94. bbot/modules/leakix.py +11 -10
  95. bbot/modules/myssl.py +2 -2
  96. bbot/modules/newsletters.py +2 -2
  97. bbot/modules/otx.py +5 -3
  98. bbot/modules/output/asset_inventory.py +7 -7
  99. bbot/modules/output/base.py +1 -1
  100. bbot/modules/output/csv.py +1 -1
  101. bbot/modules/output/http.py +20 -14
  102. bbot/modules/output/mysql.py +51 -0
  103. bbot/modules/output/neo4j.py +7 -2
  104. bbot/modules/output/postgres.py +49 -0
  105. bbot/modules/output/slack.py +0 -1
  106. bbot/modules/output/sqlite.py +29 -0
  107. bbot/modules/output/stdout.py +2 -2
  108. bbot/modules/output/teams.py +107 -6
  109. bbot/modules/paramminer_headers.py +5 -8
  110. bbot/modules/passivetotal.py +13 -13
  111. bbot/modules/portscan.py +32 -6
  112. bbot/modules/postman.py +50 -126
  113. bbot/modules/postman_download.py +220 -0
  114. bbot/modules/rapiddns.py +3 -8
  115. bbot/modules/report/asn.py +11 -11
  116. bbot/modules/robots.py +3 -3
  117. bbot/modules/securitytrails.py +7 -10
  118. bbot/modules/securitytxt.py +1 -1
  119. bbot/modules/shodan_dns.py +7 -9
  120. bbot/modules/sitedossier.py +1 -1
  121. bbot/modules/skymem.py +2 -2
  122. bbot/modules/social.py +2 -1
  123. bbot/modules/subdomaincenter.py +1 -1
  124. bbot/modules/subdomainradar.py +160 -0
  125. bbot/modules/telerik.py +8 -8
  126. bbot/modules/templates/bucket.py +1 -1
  127. bbot/modules/templates/github.py +22 -14
  128. bbot/modules/templates/postman.py +21 -0
  129. bbot/modules/templates/shodan.py +14 -13
  130. bbot/modules/templates/sql.py +95 -0
  131. bbot/modules/templates/subdomain_enum.py +51 -16
  132. bbot/modules/templates/webhook.py +2 -4
  133. bbot/modules/trickest.py +8 -37
  134. bbot/modules/trufflehog.py +10 -12
  135. bbot/modules/url_manipulation.py +3 -3
  136. bbot/modules/urlscan.py +1 -1
  137. bbot/modules/viewdns.py +1 -1
  138. bbot/modules/virustotal.py +8 -30
  139. bbot/modules/wafw00f.py +1 -1
  140. bbot/modules/wayback.py +1 -1
  141. bbot/modules/wpscan.py +17 -11
  142. bbot/modules/zoomeye.py +11 -6
  143. bbot/presets/baddns-thorough.yml +12 -0
  144. bbot/presets/fast.yml +16 -0
  145. bbot/presets/kitchen-sink.yml +1 -0
  146. bbot/presets/spider.yml +4 -0
  147. bbot/presets/subdomain-enum.yml +7 -7
  148. bbot/scanner/manager.py +5 -16
  149. bbot/scanner/preset/args.py +44 -26
  150. bbot/scanner/preset/environ.py +7 -2
  151. bbot/scanner/preset/path.py +7 -4
  152. bbot/scanner/preset/preset.py +36 -23
  153. bbot/scanner/scanner.py +172 -62
  154. bbot/scanner/target.py +236 -434
  155. bbot/scripts/docs.py +1 -1
  156. bbot/test/bbot_fixtures.py +13 -3
  157. bbot/test/conftest.py +132 -100
  158. bbot/test/fastapi_test.py +17 -0
  159. bbot/test/owasp_mastg.apk +0 -0
  160. bbot/test/run_tests.sh +4 -4
  161. bbot/test/test.conf +2 -0
  162. bbot/test/test_step_1/test_bbot_fastapi.py +82 -0
  163. bbot/test/test_step_1/test_bloom_filter.py +2 -0
  164. bbot/test/test_step_1/test_cli.py +138 -64
  165. bbot/test/test_step_1/test_dns.py +62 -25
  166. bbot/test/test_step_1/test_engine.py +17 -17
  167. bbot/test/test_step_1/test_events.py +183 -28
  168. bbot/test/test_step_1/test_helpers.py +64 -28
  169. bbot/test/test_step_1/test_manager_deduplication.py +1 -1
  170. bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
  171. bbot/test/test_step_1/test_modules_basic.py +68 -70
  172. bbot/test/test_step_1/test_presets.py +184 -96
  173. bbot/test/test_step_1/test_python_api.py +7 -2
  174. bbot/test/test_step_1/test_regexes.py +35 -5
  175. bbot/test/test_step_1/test_scan.py +39 -5
  176. bbot/test/test_step_1/test_scope.py +4 -3
  177. bbot/test/test_step_1/test_target.py +243 -145
  178. bbot/test/test_step_1/test_web.py +14 -8
  179. bbot/test/test_step_2/module_tests/base.py +15 -7
  180. bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
  181. bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
  182. bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
  183. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
  184. bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
  185. bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
  186. bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
  187. bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
  188. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
  189. bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
  190. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
  191. bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
  192. bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
  193. bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
  194. bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
  195. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
  196. bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
  197. bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
  198. bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
  199. bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
  200. bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
  201. bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
  202. bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
  203. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
  204. bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
  205. bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
  206. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
  207. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
  208. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
  209. bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
  210. bbot/test/test_step_2/module_tests/test_module_excavate.py +17 -37
  211. bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
  212. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
  213. bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
  214. bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
  215. bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
  216. bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
  217. bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
  218. bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
  219. bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
  220. bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
  221. bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
  222. bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
  223. bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
  224. bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
  225. bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
  226. bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
  227. bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
  228. bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
  229. bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
  230. bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
  231. bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
  232. bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
  233. bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
  234. bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
  235. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
  236. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
  237. bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
  238. bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
  239. bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
  240. bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
  241. bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
  242. bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
  243. bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
  244. bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
  245. bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
  246. bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
  247. bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
  248. bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
  249. bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
  250. bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
  251. bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
  252. bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
  253. bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
  254. bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
  255. bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
  256. bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
  257. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
  258. bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
  259. bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
  260. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/METADATA +48 -18
  261. bbot-2.3.0.5397rc0.dist-info/RECORD +421 -0
  262. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/WHEEL +1 -1
  263. bbot/modules/unstructured.py +0 -163
  264. bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
  265. bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
  266. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/LICENSE +0 -0
  267. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,95 @@
1
+ from contextlib import suppress
2
+ from sqlmodel import SQLModel
3
+ from sqlalchemy.orm import sessionmaker
4
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
5
+
6
+ from bbot.db.sql.models import Event, Scan, Target
7
+ from bbot.modules.output.base import BaseOutputModule
8
+
9
+
10
+ class SQLTemplate(BaseOutputModule):
11
+ meta = {"description": "SQL output module template"}
12
+ options = {
13
+ "database": "bbot",
14
+ "username": "",
15
+ "password": "",
16
+ "host": "127.0.0.1",
17
+ "port": 0,
18
+ }
19
+ options_desc = {
20
+ "database": "The database to use",
21
+ "username": "The username to use to connect to the database",
22
+ "password": "The password to use to connect to the database",
23
+ "host": "The host to use to connect to the database",
24
+ "port": "The port to use to connect to the database",
25
+ }
26
+
27
+ protocol = ""
28
+
29
+ async def setup(self):
30
+ self.database = self.config.get("database", "bbot")
31
+ self.username = self.config.get("username", "")
32
+ self.password = self.config.get("password", "")
33
+ self.host = self.config.get("host", "127.0.0.1")
34
+ self.port = self.config.get("port", 0)
35
+
36
+ await self.init_database()
37
+ return True
38
+
39
+ async def handle_event(self, event):
40
+ event_obj = Event(**event.json()).validated
41
+
42
+ async with self.async_session() as session:
43
+ async with session.begin():
44
+ # insert event
45
+ session.add(event_obj)
46
+
47
+ # if it's a SCAN event, create/update the scan and target
48
+ if event_obj.type == "SCAN":
49
+ event_data = event_obj.get_data()
50
+ if not isinstance(event_data, dict):
51
+ raise ValueError(f"Invalid data for SCAN event: {event_data}")
52
+ scan = Scan(**event_data).validated
53
+ await session.merge(scan) # Insert or update scan
54
+
55
+ target_data = event_data.get("target", {})
56
+ if not isinstance(target_data, dict):
57
+ raise ValueError(f"Invalid target for SCAN event: {target_data}")
58
+ target = Target(**target_data).validated
59
+ await session.merge(target) # Insert or update target
60
+
61
+ await session.commit()
62
+
63
+ async def create_database(self):
64
+ pass
65
+
66
+ async def init_database(self):
67
+ await self.create_database()
68
+
69
+ # Now create the engine for the actual database
70
+ self.engine = create_async_engine(self.connection_string())
71
+ # Create a session factory bound to the engine
72
+ self.async_session = sessionmaker(self.engine, expire_on_commit=False, class_=AsyncSession)
73
+
74
+ # Use the engine directly to create all tables
75
+ async with self.engine.begin() as conn:
76
+ await conn.run_sync(SQLModel.metadata.create_all)
77
+
78
+ def connection_string(self, mask_password=False):
79
+ connection_string = f"{self.protocol}://"
80
+ if self.username:
81
+ password = self.password
82
+ if mask_password:
83
+ password = "****"
84
+ connection_string += f"{self.username}:{password}"
85
+ if self.host:
86
+ connection_string += f"@{self.host}"
87
+ if self.port:
88
+ connection_string += f":{self.port}"
89
+ if self.database:
90
+ connection_string += f"/{self.database}"
91
+ return connection_string
92
+
93
+ async def cleanup(self):
94
+ with suppress(Exception):
95
+ await self.engine.dispose()
@@ -20,8 +20,8 @@ class subdomain_enum(BaseModule):
20
20
  # whether to reject wildcard DNS_NAMEs
21
21
  reject_wildcards = "strict"
22
22
 
23
- # set qsize to 10. this helps combat rate limiting by ensuring that a query doesn't execute
24
- # until the queue is ready to receive its results
23
+ # set qsize to 10. this helps combat rate limiting by ensuring the next query doesn't execute
24
+ # until the result from the previous queue have been consumed by the scan
25
25
  # we don't use 1 because it causes delays due to the asyncio.sleep; 10 gives us reasonable buffer room
26
26
  _qsize = 10
27
27
 
@@ -31,6 +31,11 @@ class subdomain_enum(BaseModule):
31
31
  # "lowest_parent": dedupe by lowest parent (lowest parent of www.api.test.evilcorp.com is api.test.evilcorp.com)
32
32
  dedup_strategy = "highest_parent"
33
33
 
34
+ # how many results to request per API call
35
+ page_size = 100
36
+ # arguments to pass to api_page_iter
37
+ api_page_iter_kwargs = {}
38
+
34
39
  @property
35
40
  def source_pretty_name(self):
36
41
  return f"{self.__class__.__name__} API"
@@ -61,9 +66,30 @@ class subdomain_enum(BaseModule):
61
66
  context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}',
62
67
  )
63
68
 
69
+ async def handle_event_paginated(self, event):
70
+ query = self.make_query(event)
71
+ async for result_batch in self.query_paginated(query):
72
+ for hostname in set(result_batch):
73
+ try:
74
+ hostname = self.helpers.validators.validate_host(hostname)
75
+ except ValueError as e:
76
+ self.verbose(e)
77
+ continue
78
+ if hostname and hostname.endswith(f".{query}") and not hostname == event.data:
79
+ await self.emit_event(
80
+ hostname,
81
+ "DNS_NAME",
82
+ event,
83
+ abort_if=self.abort_if,
84
+ context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}',
85
+ )
86
+
64
87
  async def request_url(self, query):
65
- url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}"
66
- return await self.request_with_fail_count(url)
88
+ url = self.make_url(query)
89
+ return await self.api_request(url)
90
+
91
+ def make_url(self, query):
92
+ return f"{self.base_url}/subdomains/{self.helpers.quote(query)}"
67
93
 
68
94
  def make_query(self, event):
69
95
  query = event.data
@@ -78,30 +104,26 @@ class subdomain_enum(BaseModule):
78
104
  if self.scan.in_scope(p):
79
105
  query = p
80
106
  break
81
- try:
82
- return ".".join([s for s in query.split(".") if s != "_wildcard"])
83
- except Exception:
84
- self.critical(query)
85
- raise
107
+ return ".".join([s for s in query.split(".") if s != "_wildcard"])
86
108
 
87
- def parse_results(self, r, query=None):
109
+ async def parse_results(self, r, query=None):
88
110
  json = r.json()
89
111
  if json:
90
112
  for hostname in json:
91
113
  yield hostname
92
114
 
93
- async def query(self, query, parse_fn=None, request_fn=None):
94
- if parse_fn is None:
95
- parse_fn = self.parse_results
115
+ async def query(self, query, request_fn=None, parse_fn=None):
96
116
  if request_fn is None:
97
117
  request_fn = self.request_url
118
+ if parse_fn is None:
119
+ parse_fn = self.parse_results
98
120
  try:
99
121
  response = await request_fn(query)
100
122
  if response is None:
101
123
  self.info(f'Query "{query}" failed (no response)')
102
124
  return []
103
125
  try:
104
- results = list(parse_fn(response, query))
126
+ results = list(await parse_fn(response, query))
105
127
  except Exception as e:
106
128
  if response:
107
129
  self.info(
@@ -117,10 +139,23 @@ class subdomain_enum(BaseModule):
117
139
  except Exception as e:
118
140
  self.info(f"Error retrieving results for {query}: {e}", trace=True)
119
141
 
142
+ async def query_paginated(self, query):
143
+ url = self.make_url(query)
144
+ agen = self.api_page_iter(url, page_size=self.page_size, **self.api_page_iter_kwargs)
145
+ try:
146
+ async for response in agen:
147
+ subdomains = await self.parse_results(response, query)
148
+ self.verbose(f'Got {len(subdomains):,} subdomains for "{query}"')
149
+ if not subdomains:
150
+ break
151
+ yield subdomains
152
+ finally:
153
+ agen.aclose()
154
+
120
155
  async def _is_wildcard(self, query):
121
156
  rdtypes = ("A", "AAAA", "CNAME")
122
157
  if self.helpers.is_dns_name(query):
123
- for domain, wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).items():
158
+ for wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).values():
124
159
  if any(t in wildcard_rdtypes for t in rdtypes):
125
160
  return True
126
161
  return False
@@ -134,7 +169,7 @@ class subdomain_enum(BaseModule):
134
169
  if any(t.startswith("cloud-") for t in event.tags):
135
170
  is_cloud = True
136
171
  # reject if it's a cloud resource and not in our target
137
- if is_cloud and event not in self.scan.target:
172
+ if is_cloud and event not in self.scan.target.whitelist:
138
173
  return False, "Event is a cloud resource and not a direct target"
139
174
  # optionally reject events with wildcards / errors
140
175
  if self.reject_wildcards:
@@ -9,7 +9,6 @@ class WebhookOutputModule(BaseOutputModule):
9
9
  """
10
10
 
11
11
  accept_dupes = False
12
- good_status_code = 204
13
12
  message_size_limit = 2000
14
13
  content_key = "content"
15
14
  vuln_severities = ["UNKNOWN", "LOW", "MEDIUM", "HIGH", "CRITICAL"]
@@ -61,7 +60,7 @@ class WebhookOutputModule(BaseOutputModule):
61
60
  async def filter_event(self, event):
62
61
  if event.type == "VULNERABILITY":
63
62
  severity = event.data.get("severity", "UNKNOWN")
64
- if not severity in self.allowed_severities:
63
+ if severity not in self.allowed_severities:
65
64
  return False, f"{severity} is below min_severity threshold"
66
65
  return True
67
66
 
@@ -94,5 +93,4 @@ class WebhookOutputModule(BaseOutputModule):
94
93
  return msg
95
94
 
96
95
  def evaluate_response(self, response):
97
- status_code = getattr(response, "status_code", 0)
98
- return status_code == self.good_status_code
96
+ return getattr(response, "is_success", False)
bbot/modules/trickest.py CHANGED
@@ -19,53 +19,24 @@ class Trickest(subdomain_enum_apikey):
19
19
  }
20
20
 
21
21
  base_url = "https://api.trickest.io/solutions/v1/public/solution/a7cba1f1-df07-4a5c-876a-953f178996be"
22
+ ping_url = f"{base_url}/dataset"
22
23
  dataset_id = "a0a49ca9-03bb-45e0-aa9a-ad59082ebdfc"
23
24
  page_size = 50
24
25
 
25
- async def ping(self):
26
- self.headers = {"Authorization": f"Token {self.api_key}"}
27
- url = f"{self.base_url}/dataset"
28
- response = await self.helpers.request(url, headers=self.headers)
29
- status_code = getattr(response, "status_code", 0)
30
- if status_code != 200:
31
- response_text = getattr(response, "text", "no response from server")
32
- return False, response_text
33
- return True
26
+ def prepare_api_request(self, url, kwargs):
27
+ kwargs["headers"]["Authorization"] = f"Token {self.api_key}"
28
+ return url, kwargs
34
29
 
35
30
  async def handle_event(self, event):
36
- query = self.make_query(event)
37
- async for result_batch in self.query(query):
38
- for hostname in set(result_batch):
39
- try:
40
- hostname = self.helpers.validators.validate_host(hostname)
41
- except ValueError as e:
42
- self.verbose(e)
43
- continue
44
- if hostname and hostname.endswith(f".{query}") and not hostname == event.data:
45
- await self.emit_event(
46
- hostname,
47
- "DNS_NAME",
48
- event,
49
- abort_if=self.abort_if,
50
- context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}',
51
- )
31
+ await self.handle_event_paginated(event)
52
32
 
53
- async def query(self, query):
33
+ def make_url(self, query):
54
34
  url = f"{self.base_url}/view?q=hostname%20~%20%22.{self.helpers.quote(query)}%22"
55
35
  url += f"&dataset_id={self.dataset_id}"
56
36
  url += "&limit={page_size}&offset={offset}&select=hostname&orderby=hostname"
57
- agen = self.helpers.api_page_iter(url, headers=self.headers, page_size=self.page_size)
58
- try:
59
- async for response in agen:
60
- subdomains = self.parse_results(response)
61
- self.verbose(f'Got {len(subdomains):,} subdomains for "{query}"')
62
- if not subdomains:
63
- break
64
- yield subdomains
65
- finally:
66
- agen.aclose()
37
+ return url
67
38
 
68
- def parse_results(self, j):
39
+ async def parse_results(self, j, query):
69
40
  results = j.get("results", [])
70
41
  subdomains = set()
71
42
  for item in results:
@@ -1,5 +1,4 @@
1
1
  import json
2
- from pathlib import Path
3
2
  from bbot.modules.base import BaseModule
4
3
 
5
4
 
@@ -14,7 +13,7 @@ class trufflehog(BaseModule):
14
13
  }
15
14
 
16
15
  options = {
17
- "version": "3.81.10",
16
+ "version": "3.84.1",
18
17
  "config": "",
19
18
  "only_verified": True,
20
19
  "concurrency": 8,
@@ -31,7 +30,7 @@ class trufflehog(BaseModule):
31
30
  {
32
31
  "name": "Download trufflehog",
33
32
  "unarchive": {
34
- "src": "https://github.com/trufflesecurity/trufflehog/releases/download/v#{BBOT_MODULES_TRUFFLEHOG_VERSION}/trufflehog_#{BBOT_MODULES_TRUFFLEHOG_VERSION}_#{BBOT_OS}_#{BBOT_CPU_ARCH}.tar.gz",
33
+ "src": "https://github.com/trufflesecurity/trufflehog/releases/download/v#{BBOT_MODULES_TRUFFLEHOG_VERSION}/trufflehog_#{BBOT_MODULES_TRUFFLEHOG_VERSION}_#{BBOT_OS_PLATFORM}_#{BBOT_CPU_ARCH}.tar.gz",
35
34
  "include": "trufflehog",
36
35
  "dest": "#{BBOT_TOOLS}",
37
36
  "remote_src": True,
@@ -52,7 +51,7 @@ class trufflehog(BaseModule):
52
51
  self.github_token = ""
53
52
  if self.deleted_forks:
54
53
  self.warning(
55
- f"Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours."
54
+ "Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours."
56
55
  )
57
56
  for module_name in ("github", "github_codesearch", "github_org", "git_clone"):
58
57
  module_config = self.scan.config.get("modules", {}).get(module_name, {})
@@ -65,7 +64,6 @@ class trufflehog(BaseModule):
65
64
  if not self.github_token:
66
65
  self.deleted_forks = False
67
66
  return None, "A github api_key must be provided to the github modules for deleted forks to be scanned"
68
- self.processed = set()
69
67
  return True
70
68
 
71
69
  async def filter_event(self, event):
@@ -78,12 +76,8 @@ class trufflehog(BaseModule):
78
76
  else:
79
77
  return False, "Deleted forks is not enabled"
80
78
  else:
81
- path = event.data["path"]
82
- for processed in self.processed:
83
- processed_path = Path(processed)
84
- new_path = Path(path)
85
- if new_path.is_relative_to(processed_path):
86
- return False, "Parent folder has already been processed"
79
+ if "parsed-folder" in event.tags:
80
+ return False, "Not accepting parsed-folder events"
87
81
  return True
88
82
 
89
83
  async def handle_event(self, event):
@@ -94,11 +88,12 @@ class trufflehog(BaseModule):
94
88
  module = "github-experimental"
95
89
  else:
96
90
  path = event.data["path"]
97
- self.processed.add(path)
98
91
  if "git" in event.tags:
99
92
  module = "git"
100
93
  elif "docker" in event.tags:
101
94
  module = "docker"
95
+ elif "postman" in event.tags:
96
+ module = "postman"
102
97
  else:
103
98
  module = "filesystem"
104
99
  if event.type == "CODE_REPOSITORY":
@@ -164,6 +159,9 @@ class trufflehog(BaseModule):
164
159
  elif module == "docker":
165
160
  command.append("docker")
166
161
  command.append("--image=file://" + path)
162
+ elif module == "postman":
163
+ command.append("postman")
164
+ command.append("--workspace-paths=" + path)
167
165
  elif module == "filesystem":
168
166
  command.append("filesystem")
169
167
  command.append(path)
@@ -69,11 +69,11 @@ class url_manipulation(BaseModule):
69
69
 
70
70
  if subject_response:
71
71
  subject_content = "".join([str(x) for x in subject_response.headers])
72
- if subject_response.text != None:
72
+ if subject_response.text is not None:
73
73
  subject_content += subject_response.text
74
74
 
75
75
  if self.rand_string not in subject_content:
76
- if match == False:
76
+ if match is False:
77
77
  if str(subject_response.status_code).startswith("2"):
78
78
  if "body" in reasons:
79
79
  reported_signature = f"Modified URL: {sig[1]}"
@@ -98,7 +98,7 @@ class url_manipulation(BaseModule):
98
98
  return False
99
99
 
100
100
  def format_signature(self, sig, event):
101
- if sig[2] == True:
101
+ if sig[2] is True:
102
102
  cleaned_path = event.parsed_url.path.strip("/")
103
103
  else:
104
104
  cleaned_path = event.parsed_url.path.lstrip("/")
bbot/modules/urlscan.py CHANGED
@@ -78,5 +78,5 @@ class urlscan(subdomain_enum):
78
78
  else:
79
79
  self.debug(f'No results for "{query}"')
80
80
  except Exception:
81
- self.verbose(f"Error retrieving urlscan results")
81
+ self.verbose("Error retrieving urlscan results")
82
82
  return results
bbot/modules/viewdns.py CHANGED
@@ -48,7 +48,7 @@ class viewdns(BaseModule):
48
48
 
49
49
  html = self.helpers.beautifulsoup(content, "html.parser")
50
50
  if html is False:
51
- self.debug(f"BeautifulSoup returned False")
51
+ self.debug("BeautifulSoup returned False")
52
52
  return results
53
53
  found = set()
54
54
  for table_row in html.findAll("tr"):
@@ -15,37 +15,15 @@ class virustotal(subdomain_enum_apikey):
15
15
  options_desc = {"api_key": "VirusTotal API Key"}
16
16
 
17
17
  base_url = "https://www.virustotal.com/api/v3"
18
+ api_page_iter_kwargs = {"json": False, "next_key": lambda r: r.json().get("links", {}).get("next", "")}
18
19
 
19
- async def setup(self):
20
- self.api_key = self.config.get("api_key", "")
21
- self.headers = {"x-apikey": self.api_key}
22
- return await super().setup()
20
+ def make_url(self, query):
21
+ return f"{self.base_url}/domains/{self.helpers.quote(query)}/subdomains"
23
22
 
24
- async def ping(self):
25
- # virustotal does not have a ping function
26
- return
23
+ def prepare_api_request(self, url, kwargs):
24
+ kwargs["headers"]["x-apikey"] = self.api_key
25
+ return url, kwargs
27
26
 
28
- def parse_results(self, r, query):
29
- results = set()
27
+ async def parse_results(self, r, query):
30
28
  text = getattr(r, "text", "")
31
- for match in self.helpers.regexes.dns_name_regex.findall(text):
32
- match = match.lower()
33
- if match.endswith(query):
34
- results.add(match)
35
- return results
36
-
37
- async def query(self, query):
38
- results = set()
39
- url = f"{self.base_url}/domains/{self.helpers.quote(query)}/subdomains"
40
- agen = self.helpers.api_page_iter(
41
- url, json=False, headers=self.headers, next_key=lambda r: r.json().get("links", {}).get("next", "")
42
- )
43
- try:
44
- async for response in agen:
45
- r = self.parse_results(response, query)
46
- if not r:
47
- break
48
- results.update(r)
49
- finally:
50
- agen.aclose()
51
- return results
29
+ return await self.scan.extract_in_scope_hostnames(text)
bbot/modules/wafw00f.py CHANGED
@@ -52,7 +52,7 @@ class wafw00f(BaseModule):
52
52
  context=f"{{module}} scanned {url} and identified {{event.type}}: {waf}",
53
53
  )
54
54
  else:
55
- if self.config.get("generic_detect") == True:
55
+ if self.config.get("generic_detect") is True:
56
56
  generic = await self.helpers.run_in_executor(WW.genericdetect)
57
57
  if generic:
58
58
  waf = "generic detection"
bbot/modules/wayback.py CHANGED
@@ -10,7 +10,7 @@ class wayback(subdomain_enum):
10
10
  meta = {
11
11
  "description": "Query archive.org's API for subdomains",
12
12
  "created_date": "2022-04-01",
13
- "author": "@pmueller",
13
+ "author": "@liquidsec",
14
14
  }
15
15
  options = {"urls": False, "garbage_threshold": 10}
16
16
  options_desc = {
bbot/modules/wpscan.py CHANGED
@@ -14,26 +14,26 @@ class wpscan(BaseModule):
14
14
 
15
15
  options = {
16
16
  "api_key": "",
17
- "enumerate": "vp,vt,tt,cb,dbe,u,m",
17
+ "enumerate": "vp,vt,cb,dbe",
18
18
  "threads": 5,
19
- "request_timeout": 60,
20
- "connection_timeout": 30,
19
+ "request_timeout": 5,
20
+ "connection_timeout": 2,
21
21
  "disable_tls_checks": True,
22
22
  "force": False,
23
23
  }
24
24
  options_desc = {
25
25
  "api_key": "WPScan API Key",
26
- "enumerate": "Enumeration Process see wpscan help documentation (default: vp,vt,tt,cb,dbe,u,m)",
26
+ "enumerate": "Enumeration Process see wpscan help documentation (default: vp,vt,cb,dbe)",
27
27
  "threads": "How many wpscan threads to spawn (default is 5)",
28
- "request_timeout": "The request timeout in seconds (default 60)",
29
- "connection_timeout": "The connection timeout in seconds (default 30)",
28
+ "request_timeout": "The request timeout in seconds (default 5)",
29
+ "connection_timeout": "The connection timeout in seconds (default 2)",
30
30
  "disable_tls_checks": "Disables the SSL/TLS certificate verification (Default True)",
31
31
  "force": "Do not check if the target is running WordPress or returns a 403",
32
32
  }
33
33
  deps_apt = ["curl", "make", "gcc"]
34
34
  deps_ansible = [
35
35
  {
36
- "name": "Install Ruby Deps (Debian/Ubuntu)",
36
+ "name": "Install Ruby Deps (Debian)",
37
37
  "package": {"name": ["ruby-rubygems", "ruby-dev"], "state": "present"},
38
38
  "become": True,
39
39
  "when": "ansible_facts['os_family'] == 'Debian'",
@@ -48,7 +48,13 @@ class wpscan(BaseModule):
48
48
  "name": "Install Ruby Deps (Fedora)",
49
49
  "package": {"name": ["rubygems", "ruby-devel"], "state": "present"},
50
50
  "become": True,
51
- "when": "ansible_facts['os_family'] == 'Fedora'",
51
+ "when": "ansible_facts['os_family'] == 'RedHat'",
52
+ },
53
+ {
54
+ "name": "Install Ruby Deps (Alpine)",
55
+ "package": {"name": ["ruby-dev", "ruby-bundler"], "state": "present"},
56
+ "become": True,
57
+ "when": "ansible_facts['os_family'] == 'Alpine'",
52
58
  },
53
59
  {
54
60
  "name": "Install wpscan gem",
@@ -61,11 +67,11 @@ class wpscan(BaseModule):
61
67
  self.processed = set()
62
68
  self.ignore_events = ["xmlrpc", "readme"]
63
69
  self.api_key = self.config.get("api_key", "")
64
- self.enumerate = self.config.get("enumerate", "vp,vt,tt,cb,dbe,u,m")
70
+ self.enumerate = self.config.get("enumerate", "vp,vt,cb,dbe")
65
71
  self.proxy = self.scan.web_config.get("http_proxy", "")
66
72
  self.threads = self.config.get("threads", 5)
67
- self.request_timeout = self.config.get("request_timeout", 60)
68
- self.connection_timeout = self.config.get("connection_timeout", 30)
73
+ self.request_timeout = self.config.get("request_timeout", 5)
74
+ self.connection_timeout = self.config.get("connection_timeout", 2)
69
75
  self.disable_tls_checks = self.config.get("disable_tls_checks", True)
70
76
  self.force = self.config.get("force", False)
71
77
  return True
bbot/modules/zoomeye.py CHANGED
@@ -22,13 +22,16 @@ class zoomeye(subdomain_enum_apikey):
22
22
 
23
23
  async def setup(self):
24
24
  self.max_pages = self.config.get("max_pages", 20)
25
- self.headers = {"API-KEY": self.config.get("api_key", "")}
26
25
  self.include_related = self.config.get("include_related", False)
27
26
  return await super().setup()
28
27
 
28
+ def prepare_api_request(self, url, kwargs):
29
+ kwargs["headers"]["API-KEY"] = self.api_key
30
+ return url, kwargs
31
+
29
32
  async def ping(self):
30
33
  url = f"{self.base_url}/resources-info"
31
- r = await self.helpers.request(url, headers=self.headers)
34
+ r = await self.api_request(url)
32
35
  assert int(r.json()["quota_info"]["remain_total_quota"]) > 0, "No quota remaining"
33
36
 
34
37
  async def handle_event(self, event):
@@ -54,10 +57,10 @@ class zoomeye(subdomain_enum_apikey):
54
57
  query_type = 0 if self.include_related else 1
55
58
  url = f"{self.base_url}/domain/search?q={self.helpers.quote(query)}&type={query_type}&page=" + "{page}"
56
59
  i = 0
57
- agen = self.helpers.api_page_iter(url, headers=self.headers)
60
+ agen = self.api_page_iter(url)
58
61
  try:
59
62
  async for j in agen:
60
- r = list(self.parse_results(j))
63
+ r = list(await self.parse_results(j))
61
64
  if r:
62
65
  results.update(set(r))
63
66
  if not r or i >= (self.max_pages - 1):
@@ -67,6 +70,8 @@ class zoomeye(subdomain_enum_apikey):
67
70
  agen.aclose()
68
71
  return results
69
72
 
70
- def parse_results(self, r):
73
+ async def parse_results(self, r):
74
+ results = set()
71
75
  for entry in r.get("list", []):
72
- yield entry["name"]
76
+ results.add(entry["name"])
77
+ return results
@@ -0,0 +1,12 @@
1
+ description: Run all baddns modules and submodules.
2
+
3
+
4
+ modules:
5
+ - baddns
6
+ - baddns_zone
7
+ - baddns_direct
8
+
9
+ config:
10
+ modules:
11
+ baddns:
12
+ enabled_submodules: [CNAME,references,MX,NS,TXT]
bbot/presets/fast.yml ADDED
@@ -0,0 +1,16 @@
1
+ description: Scan only the provided targets as fast as possible - no extra discovery
2
+
3
+ exclude_modules:
4
+ - excavate
5
+
6
+ config:
7
+ # only scan the exact targets specified
8
+ scope:
9
+ strict: true
10
+ # speed up dns resolution by doing A/AAAA only - not MX/NS/SRV/etc
11
+ dns:
12
+ minimal: true
13
+ # essential speculation only
14
+ modules:
15
+ speculate:
16
+ essential_only: true
@@ -10,6 +10,7 @@ include:
10
10
  - paramminer
11
11
  - dirbust-light
12
12
  - web-screenshots
13
+ - baddns-thorough
13
14
 
14
15
  config:
15
16
  modules:
bbot/presets/spider.yml CHANGED
@@ -3,6 +3,10 @@ description: Recursive web spider
3
3
  modules:
4
4
  - httpx
5
5
 
6
+ blacklist:
7
+ # Prevent spider from invalidating sessions by logging out
8
+ - "RE:/.*(sign|log)[_-]?out"
9
+
6
10
  config:
7
11
  web:
8
12
  # how many links to follow in a row