bbot 2.0.1.4654rc0__py3-none-any.whl → 2.3.0.5397rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (270) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +3 -7
  3. bbot/core/config/files.py +0 -1
  4. bbot/core/config/logger.py +34 -4
  5. bbot/core/core.py +21 -6
  6. bbot/core/engine.py +9 -8
  7. bbot/core/event/base.py +162 -63
  8. bbot/core/helpers/bloom.py +10 -3
  9. bbot/core/helpers/command.py +9 -8
  10. bbot/core/helpers/depsinstaller/installer.py +89 -32
  11. bbot/core/helpers/depsinstaller/sudo_askpass.py +38 -2
  12. bbot/core/helpers/diff.py +10 -10
  13. bbot/core/helpers/dns/brute.py +18 -14
  14. bbot/core/helpers/dns/dns.py +16 -15
  15. bbot/core/helpers/dns/engine.py +159 -132
  16. bbot/core/helpers/dns/helpers.py +2 -2
  17. bbot/core/helpers/dns/mock.py +26 -8
  18. bbot/core/helpers/files.py +1 -1
  19. bbot/core/helpers/helper.py +7 -4
  20. bbot/core/helpers/interactsh.py +3 -3
  21. bbot/core/helpers/libmagic.py +65 -0
  22. bbot/core/helpers/misc.py +65 -22
  23. bbot/core/helpers/names_generator.py +17 -3
  24. bbot/core/helpers/process.py +0 -20
  25. bbot/core/helpers/regex.py +1 -1
  26. bbot/core/helpers/regexes.py +12 -6
  27. bbot/core/helpers/validators.py +1 -2
  28. bbot/core/helpers/web/client.py +1 -1
  29. bbot/core/helpers/web/engine.py +18 -13
  30. bbot/core/helpers/web/web.py +25 -116
  31. bbot/core/helpers/wordcloud.py +5 -5
  32. bbot/core/modules.py +36 -27
  33. bbot/core/multiprocess.py +58 -0
  34. bbot/core/shared_deps.py +46 -3
  35. bbot/db/sql/models.py +147 -0
  36. bbot/defaults.yml +15 -10
  37. bbot/errors.py +0 -8
  38. bbot/modules/anubisdb.py +2 -2
  39. bbot/modules/apkpure.py +63 -0
  40. bbot/modules/azure_tenant.py +2 -2
  41. bbot/modules/baddns.py +35 -19
  42. bbot/modules/baddns_direct.py +92 -0
  43. bbot/modules/baddns_zone.py +3 -8
  44. bbot/modules/badsecrets.py +4 -3
  45. bbot/modules/base.py +195 -51
  46. bbot/modules/bevigil.py +7 -7
  47. bbot/modules/binaryedge.py +7 -4
  48. bbot/modules/bufferoverrun.py +47 -0
  49. bbot/modules/builtwith.py +6 -10
  50. bbot/modules/bypass403.py +5 -5
  51. bbot/modules/c99.py +10 -7
  52. bbot/modules/censys.py +9 -13
  53. bbot/modules/certspotter.py +5 -3
  54. bbot/modules/chaos.py +9 -7
  55. bbot/modules/code_repository.py +1 -0
  56. bbot/modules/columbus.py +3 -3
  57. bbot/modules/crt.py +5 -3
  58. bbot/modules/deadly/dastardly.py +1 -1
  59. bbot/modules/deadly/ffuf.py +9 -9
  60. bbot/modules/deadly/nuclei.py +3 -3
  61. bbot/modules/deadly/vhost.py +4 -3
  62. bbot/modules/dehashed.py +1 -1
  63. bbot/modules/digitorus.py +1 -1
  64. bbot/modules/dnsbimi.py +145 -0
  65. bbot/modules/dnscaa.py +3 -3
  66. bbot/modules/dnsdumpster.py +4 -4
  67. bbot/modules/dnstlsrpt.py +144 -0
  68. bbot/modules/docker_pull.py +7 -5
  69. bbot/modules/dockerhub.py +2 -2
  70. bbot/modules/dotnetnuke.py +18 -19
  71. bbot/modules/emailformat.py +1 -1
  72. bbot/modules/extractous.py +122 -0
  73. bbot/modules/filedownload.py +9 -7
  74. bbot/modules/fullhunt.py +7 -4
  75. bbot/modules/generic_ssrf.py +5 -5
  76. bbot/modules/github_codesearch.py +3 -2
  77. bbot/modules/github_org.py +4 -4
  78. bbot/modules/github_workflows.py +4 -4
  79. bbot/modules/gitlab.py +2 -5
  80. bbot/modules/google_playstore.py +93 -0
  81. bbot/modules/gowitness.py +48 -50
  82. bbot/modules/hackertarget.py +5 -3
  83. bbot/modules/host_header.py +5 -5
  84. bbot/modules/httpx.py +1 -4
  85. bbot/modules/hunterio.py +3 -9
  86. bbot/modules/iis_shortnames.py +19 -30
  87. bbot/modules/internal/cloudcheck.py +27 -12
  88. bbot/modules/internal/dnsresolve.py +250 -276
  89. bbot/modules/internal/excavate.py +100 -64
  90. bbot/modules/internal/speculate.py +42 -33
  91. bbot/modules/internetdb.py +4 -2
  92. bbot/modules/ip2location.py +3 -5
  93. bbot/modules/ipneighbor.py +1 -1
  94. bbot/modules/ipstack.py +3 -8
  95. bbot/modules/jadx.py +87 -0
  96. bbot/modules/leakix.py +11 -10
  97. bbot/modules/myssl.py +2 -2
  98. bbot/modules/newsletters.py +2 -2
  99. bbot/modules/otx.py +5 -3
  100. bbot/modules/output/asset_inventory.py +7 -7
  101. bbot/modules/output/base.py +1 -1
  102. bbot/modules/output/csv.py +1 -2
  103. bbot/modules/output/http.py +20 -14
  104. bbot/modules/output/mysql.py +51 -0
  105. bbot/modules/output/neo4j.py +7 -2
  106. bbot/modules/output/postgres.py +49 -0
  107. bbot/modules/output/slack.py +0 -1
  108. bbot/modules/output/sqlite.py +29 -0
  109. bbot/modules/output/stdout.py +2 -2
  110. bbot/modules/output/teams.py +107 -6
  111. bbot/modules/paramminer_headers.py +5 -8
  112. bbot/modules/passivetotal.py +13 -13
  113. bbot/modules/portscan.py +32 -6
  114. bbot/modules/postman.py +50 -126
  115. bbot/modules/postman_download.py +220 -0
  116. bbot/modules/rapiddns.py +3 -8
  117. bbot/modules/report/asn.py +11 -11
  118. bbot/modules/robots.py +3 -3
  119. bbot/modules/securitytrails.py +7 -10
  120. bbot/modules/securitytxt.py +128 -0
  121. bbot/modules/shodan_dns.py +7 -9
  122. bbot/modules/sitedossier.py +1 -1
  123. bbot/modules/skymem.py +2 -2
  124. bbot/modules/social.py +2 -1
  125. bbot/modules/subdomaincenter.py +1 -1
  126. bbot/modules/subdomainradar.py +160 -0
  127. bbot/modules/telerik.py +8 -8
  128. bbot/modules/templates/bucket.py +1 -1
  129. bbot/modules/templates/github.py +22 -14
  130. bbot/modules/templates/postman.py +21 -0
  131. bbot/modules/templates/shodan.py +14 -13
  132. bbot/modules/templates/sql.py +95 -0
  133. bbot/modules/templates/subdomain_enum.py +53 -17
  134. bbot/modules/templates/webhook.py +2 -4
  135. bbot/modules/trickest.py +8 -37
  136. bbot/modules/trufflehog.py +18 -3
  137. bbot/modules/url_manipulation.py +3 -3
  138. bbot/modules/urlscan.py +1 -1
  139. bbot/modules/viewdns.py +1 -1
  140. bbot/modules/virustotal.py +8 -30
  141. bbot/modules/wafw00f.py +1 -1
  142. bbot/modules/wayback.py +1 -1
  143. bbot/modules/wpscan.py +17 -11
  144. bbot/modules/zoomeye.py +11 -6
  145. bbot/presets/baddns-thorough.yml +12 -0
  146. bbot/presets/fast.yml +16 -0
  147. bbot/presets/kitchen-sink.yml +1 -0
  148. bbot/presets/spider.yml +4 -0
  149. bbot/presets/subdomain-enum.yml +7 -7
  150. bbot/scanner/manager.py +5 -16
  151. bbot/scanner/preset/args.py +44 -26
  152. bbot/scanner/preset/environ.py +7 -2
  153. bbot/scanner/preset/path.py +7 -4
  154. bbot/scanner/preset/preset.py +36 -23
  155. bbot/scanner/scanner.py +176 -63
  156. bbot/scanner/target.py +236 -434
  157. bbot/scripts/docs.py +1 -1
  158. bbot/test/bbot_fixtures.py +22 -3
  159. bbot/test/conftest.py +132 -100
  160. bbot/test/fastapi_test.py +17 -0
  161. bbot/test/owasp_mastg.apk +0 -0
  162. bbot/test/run_tests.sh +4 -4
  163. bbot/test/test.conf +2 -0
  164. bbot/test/test_step_1/test_bbot_fastapi.py +82 -0
  165. bbot/test/test_step_1/test_bloom_filter.py +2 -0
  166. bbot/test/test_step_1/test_cli.py +138 -64
  167. bbot/test/test_step_1/test_dns.py +392 -70
  168. bbot/test/test_step_1/test_engine.py +17 -17
  169. bbot/test/test_step_1/test_events.py +203 -37
  170. bbot/test/test_step_1/test_helpers.py +64 -28
  171. bbot/test/test_step_1/test_manager_deduplication.py +1 -1
  172. bbot/test/test_step_1/test_manager_scope_accuracy.py +336 -338
  173. bbot/test/test_step_1/test_modules_basic.py +69 -71
  174. bbot/test/test_step_1/test_presets.py +184 -96
  175. bbot/test/test_step_1/test_python_api.py +7 -2
  176. bbot/test/test_step_1/test_regexes.py +35 -5
  177. bbot/test/test_step_1/test_scan.py +39 -5
  178. bbot/test/test_step_1/test_scope.py +5 -4
  179. bbot/test/test_step_1/test_target.py +243 -145
  180. bbot/test/test_step_1/test_web.py +48 -10
  181. bbot/test/test_step_2/module_tests/base.py +17 -20
  182. bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
  183. bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
  184. bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
  185. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
  186. bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
  187. bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
  188. bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
  189. bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
  190. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
  191. bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
  192. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
  193. bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
  194. bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
  195. bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
  196. bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
  197. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
  198. bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
  199. bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
  200. bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
  201. bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
  202. bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
  203. bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
  204. bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
  205. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
  206. bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
  207. bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
  208. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
  209. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
  210. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
  211. bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
  212. bbot/test/test_step_2/module_tests/test_module_excavate.py +17 -37
  213. bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
  214. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
  215. bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
  216. bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
  217. bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
  218. bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
  219. bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
  220. bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
  221. bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
  222. bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
  223. bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
  224. bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
  225. bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
  226. bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
  227. bbot/test/test_step_2/module_tests/test_module_json.py +24 -11
  228. bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
  229. bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
  230. bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
  231. bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
  232. bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
  233. bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
  234. bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
  235. bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
  236. bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
  237. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
  238. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
  239. bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
  240. bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
  241. bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
  242. bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
  243. bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
  244. bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
  245. bbot/test/test_step_2/module_tests/test_module_securitytxt.py +50 -0
  246. bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
  247. bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
  248. bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
  249. bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
  250. bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
  251. bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
  252. bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
  253. bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
  254. bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
  255. bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
  256. bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
  257. bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
  258. bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
  259. bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
  260. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -11
  261. bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
  262. bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +135 -0
  263. {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/METADATA +48 -18
  264. bbot-2.3.0.5397rc0.dist-info/RECORD +421 -0
  265. {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/WHEEL +1 -1
  266. bbot/modules/unstructured.py +0 -163
  267. bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
  268. bbot-2.0.1.4654rc0.dist-info/RECORD +0 -385
  269. {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/LICENSE +0 -0
  270. {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/entry_points.txt +0 -0
@@ -11,36 +11,36 @@ class passivetotal(subdomain_enum_apikey):
11
11
  "author": "@TheTechromancer",
12
12
  "auth_required": True,
13
13
  }
14
- options = {"username": "", "api_key": ""}
15
- options_desc = {"username": "RiskIQ Username", "api_key": "RiskIQ API Key"}
14
+ options = {"api_key": ""}
15
+ options_desc = {"api_key": "PassiveTotal API Key in the format of 'username:api_key'"}
16
16
 
17
17
  base_url = "https://api.passivetotal.org/v2"
18
18
 
19
19
  async def setup(self):
20
- self.username = self.config.get("username", "")
21
- self.api_key = self.config.get("api_key", "")
22
- self.auth = (self.username, self.api_key)
23
20
  return await super().setup()
24
21
 
25
22
  async def ping(self):
26
23
  url = f"{self.base_url}/account/quota"
27
- j = (await self.request_with_fail_count(url, auth=self.auth)).json()
24
+ j = (await self.api_request(url)).json()
28
25
  limit = j["user"]["limits"]["search_api"]
29
26
  used = j["user"]["counts"]["search_api"]
30
27
  assert used < limit, "No quota remaining"
31
28
 
29
+ def prepare_api_request(self, url, kwargs):
30
+ api_username, api_key = self.api_key.split(":", 1)
31
+ kwargs["auth"] = (api_username, api_key)
32
+ return url, kwargs
33
+
32
34
  async def abort_if(self, event):
33
35
  # RiskIQ is famous for their junk data
34
36
  return await super().abort_if(event) or "unresolved" in event.tags
35
37
 
36
38
  async def request_url(self, query):
37
39
  url = f"{self.base_url}/enrichment/subdomains?query={self.helpers.quote(query)}"
38
- return await self.request_with_fail_count(url, auth=self.auth)
40
+ return await self.api_request(url)
39
41
 
40
- def parse_results(self, r, query):
42
+ async def parse_results(self, r, query):
43
+ results = set()
41
44
  for subdomain in r.json().get("subdomains", []):
42
- yield f"{subdomain}.{query}"
43
-
44
- @property
45
- def auth_secret(self):
46
- return self.username and self.api_key
45
+ results.add(f"{subdomain}.{query}")
46
+ return results
bbot/modules/portscan.py CHANGED
@@ -6,6 +6,9 @@ from radixtarget import RadixTarget
6
6
  from bbot.modules.base import BaseModule
7
7
 
8
8
 
9
+ # TODO: this module is getting big. It should probably be two modules: one for ping and one for SYN.
10
+
11
+
9
12
  class portscan(BaseModule):
10
13
  flags = ["active", "portscan", "safe"]
11
14
  watched_events = ["IP_ADDRESS", "IP_RANGE", "DNS_NAME"]
@@ -27,6 +30,8 @@ class portscan(BaseModule):
27
30
  "adapter_ip": "",
28
31
  "adapter_mac": "",
29
32
  "router_mac": "",
33
+ "cdn_tags": "cdn-",
34
+ "allowed_cdn_ports": None,
30
35
  }
31
36
  options_desc = {
32
37
  "top_ports": "Top ports to scan (default 100) (to override, specify 'ports')",
@@ -39,6 +44,8 @@ class portscan(BaseModule):
39
44
  "adapter_ip": "Send packets using this IP address. Not needed unless masscan's autodetection fails",
40
45
  "adapter_mac": "Send packets using this as the source MAC address. Not needed unless masscan's autodetection fails",
41
46
  "router_mac": "Send packets to this MAC address as the destination. Not needed unless masscan's autodetection fails",
47
+ "cdn_tags": "Comma-separated list of tags to skip, e.g. 'cdn,cloud'",
48
+ "allowed_cdn_ports": "Comma-separated list of ports that are allowed to be scanned for CDNs",
42
49
  }
43
50
  deps_common = ["masscan"]
44
51
  batch_size = 1000000
@@ -60,7 +67,15 @@ class portscan(BaseModule):
60
67
  try:
61
68
  self.helpers.parse_port_string(self.ports)
62
69
  except ValueError as e:
63
- return False, f"Error parsing ports: {e}"
70
+ return False, f"Error parsing ports '{self.ports}': {e}"
71
+ self.cdn_tags = [t.strip() for t in self.config.get("cdn_tags", "").split(",")]
72
+ self.allowed_cdn_ports = self.config.get("allowed_cdn_ports", None)
73
+ if self.allowed_cdn_ports is not None:
74
+ try:
75
+ self.allowed_cdn_ports = [int(p.strip()) for p in self.allowed_cdn_ports.split(",")]
76
+ except Exception as e:
77
+ return False, f"Error parsing allowed CDN ports '{self.allowed_cdn_ports}': {e}"
78
+
64
79
  # whether we've finished scanning our original scan targets
65
80
  self.scanned_initial_targets = False
66
81
  # keeps track of individual scanned IPs and their open ports
@@ -84,17 +99,17 @@ class portscan(BaseModule):
84
99
  return False, "Masscan failed to run"
85
100
  returncode = getattr(ipv6_result, "returncode", 0)
86
101
  if returncode and "failed to detect IPv6 address" in ipv6_result.stderr:
87
- self.warning(f"It looks like you are not set up for IPv6. IPv6 targets will not be scanned.")
102
+ self.warning("It looks like you are not set up for IPv6. IPv6 targets will not be scanned.")
88
103
  self.ipv6_support = False
89
104
  return True
90
105
 
91
106
  async def handle_batch(self, *events):
92
- # on our first run, we automatically include all our intial scan targets
107
+ # on our first run, we automatically include all our initial scan targets
93
108
  if not self.scanned_initial_targets:
94
109
  self.scanned_initial_targets = True
95
110
  events = set(events)
96
111
  events.update(
97
- set([e for e in self.scan.target.seeds.events if e.type in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE")])
112
+ {e for e in self.scan.target.seeds.events if e.type in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE")}
98
113
  )
99
114
 
100
115
  # ping scan
@@ -227,9 +242,20 @@ class portscan(BaseModule):
227
242
  parent=parent_event,
228
243
  context=f"{{module}} executed a {scan_type} scan against {parent_event.data} and found: {{event.type}}: {{event.data}}",
229
244
  )
230
- await self.emit_event(event)
245
+
246
+ await self.emit_event(event, abort_if=self.abort_if)
231
247
  return event
232
248
 
249
+ def abort_if(self, event):
250
+ if self.allowed_cdn_ports is not None:
251
+ # if the host is a CDN
252
+ for cdn_tag in self.cdn_tags:
253
+ if any(t.startswith(str(cdn_tag)) for t in event.tags):
254
+ # and if its port isn't in the list of allowed CDN ports
255
+ if event.port not in self.allowed_cdn_ports:
256
+ return True, "event is a CDN and port is not in the allowed list"
257
+ return False
258
+
233
259
  def parse_json_line(self, line):
234
260
  try:
235
261
  j = json.loads(line)
@@ -308,7 +334,7 @@ class portscan(BaseModule):
308
334
  if "FAIL" in s:
309
335
  self.warning(s)
310
336
  self.warning(
311
- f'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.'
337
+ 'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.'
312
338
  )
313
339
  else:
314
340
  self.verbose(s)
bbot/modules/postman.py CHANGED
@@ -1,36 +1,62 @@
1
- from bbot.modules.templates.subdomain_enum import subdomain_enum
1
+ from bbot.modules.templates.postman import postman
2
2
 
3
3
 
4
- class postman(subdomain_enum):
5
- watched_events = ["DNS_NAME"]
6
- produced_events = ["URL_UNVERIFIED"]
4
+ class postman(postman):
5
+ watched_events = ["ORG_STUB", "SOCIAL"]
6
+ produced_events = ["CODE_REPOSITORY"]
7
7
  flags = ["passive", "subdomain-enum", "safe", "code-enum"]
8
8
  meta = {
9
- "description": "Query Postman's API for related workspaces, collections, requests",
10
- "created_date": "2023-12-23",
9
+ "description": "Query Postman's API for related workspaces, collections, requests and download them",
10
+ "created_date": "2024-09-07",
11
11
  "author": "@domwhewell-sage",
12
12
  }
13
13
 
14
- base_url = "https://www.postman.com/_api"
15
-
16
- headers = {
17
- "Content-Type": "application/json",
18
- "X-App-Version": "10.18.8-230926-0808",
19
- "X-Entity-Team-Id": "0",
20
- "Origin": "https://www.postman.com",
21
- "Referer": "https://www.postman.com/search?q=&scope=public&type=all",
22
- }
23
-
24
14
  reject_wildcards = False
25
15
 
26
16
  async def handle_event(self, event):
27
- query = self.make_query(event)
28
- self.verbose(f"Searching for any postman workspaces, collections, requests belonging to {query}")
29
- for url, context in await self.query(query):
30
- await self.emit_event(url, "URL_UNVERIFIED", parent=event, tags="httpx-safe", context=context)
17
+ # Handle postman profile
18
+ if event.type == "SOCIAL":
19
+ await self.handle_profile(event)
20
+ elif event.type == "ORG_STUB":
21
+ await self.handle_org_stub(event)
22
+
23
+ async def handle_profile(self, event):
24
+ profile_name = event.data.get("profile_name", "")
25
+ self.verbose(f"Searching for postman workspaces, collections, requests belonging to {profile_name}")
26
+ for item in await self.query(profile_name):
27
+ workspace = item["document"]
28
+ name = workspace["slug"]
29
+ profile = workspace["publisherHandle"]
30
+ if profile_name.lower() == profile.lower():
31
+ self.verbose(f"Got {name}")
32
+ workspace_url = f"{self.html_url}/{profile}/{name}"
33
+ await self.emit_event(
34
+ {"url": workspace_url},
35
+ "CODE_REPOSITORY",
36
+ tags="postman",
37
+ parent=event,
38
+ context=f'{{module}} searched postman.com for workspaces belonging to "{profile_name}" and found "{name}" at {{event.type}}: {workspace_url}',
39
+ )
40
+
41
+ async def handle_org_stub(self, event):
42
+ org_name = event.data
43
+ self.verbose(f"Searching for any postman workspaces, collections, requests for {org_name}")
44
+ for item in await self.query(org_name):
45
+ workspace = item["document"]
46
+ name = workspace["slug"]
47
+ profile = workspace["publisherHandle"]
48
+ self.verbose(f"Got {name}")
49
+ workspace_url = f"{self.html_url}/{profile}/{name}"
50
+ await self.emit_event(
51
+ {"url": workspace_url},
52
+ "CODE_REPOSITORY",
53
+ tags="postman",
54
+ parent=event,
55
+ context=f'{{module}} searched postman.com for "{org_name}" and found matching workspace "{name}" at {{event.type}}: {workspace_url}',
56
+ )
31
57
 
32
58
  async def query(self, query):
33
- interesting_urls = []
59
+ data = []
34
60
  url = f"{self.base_url}/ws/proxy"
35
61
  json = {
36
62
  "service": "search",
@@ -39,11 +65,6 @@ class postman(subdomain_enum):
39
65
  "body": {
40
66
  "queryIndices": [
41
67
  "collaboration.workspace",
42
- "runtime.collection",
43
- "runtime.request",
44
- "adp.api",
45
- "flow.flow",
46
- "apinetwork.team",
47
68
  ],
48
69
  "queryText": self.helpers.quote(query),
49
70
  "size": 100,
@@ -57,108 +78,11 @@ class postman(subdomain_enum):
57
78
  }
58
79
  r = await self.helpers.request(url, method="POST", json=json, headers=self.headers)
59
80
  if r is None:
60
- return interesting_urls
81
+ return data
61
82
  status_code = getattr(r, "status_code", 0)
62
83
  try:
63
84
  json = r.json()
64
85
  except Exception as e:
65
86
  self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
66
- return interesting_urls
67
- workspaces = []
68
- for item in json.get("data", {}):
69
- for workspace in item.get("document", {}).get("workspaces", []):
70
- if workspace not in workspaces:
71
- workspaces.append(workspace)
72
- for item in workspaces:
73
- id = item.get("id", "")
74
- name = item.get("name", "")
75
- tldextract = self.helpers.tldextract(query)
76
- if tldextract.domain.lower() in name.lower():
77
- self.verbose(f"Discovered workspace {name} ({id})")
78
- workspace_url = f"{self.base_url}/workspace/{id}"
79
- interesting_urls.append(
80
- (
81
- workspace_url,
82
- f'{{module}} searched postman.com for "{query}" and found matching workspace "{name}" at {{event.type}}: {workspace_url}',
83
- )
84
- )
85
- environments, collections = await self.search_workspace(id)
86
- globals_url = f"{self.base_url}/workspace/{id}/globals"
87
- interesting_urls.append(
88
- (
89
- globals_url,
90
- f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, and found globals at {{event.type}}: {globals_url}',
91
- )
92
- )
93
- for e_id in environments:
94
- env_url = f"{self.base_url}/environment/{e_id}"
95
- interesting_urls.append(
96
- (
97
- env_url,
98
- f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, enumerated environments, and found {{event.type}}: {env_url}',
99
- )
100
- )
101
- for c_id in collections:
102
- collection_url = f"{self.base_url}/collection/{c_id}"
103
- interesting_urls.append(
104
- (
105
- collection_url,
106
- f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, enumerated collections, and found {{event.type}}: {collection_url}',
107
- )
108
- )
109
- requests = await self.search_collections(id)
110
- for r_id in requests:
111
- request_url = f"{self.base_url}/request/{r_id}"
112
- interesting_urls.append(
113
- (
114
- request_url,
115
- f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, enumerated requests, and found {{event.type}}: {request_url}',
116
- )
117
- )
118
- else:
119
- self.verbose(f"Skipping workspace {name} ({id}) as it does not appear to be in scope")
120
- return interesting_urls
121
-
122
- async def search_workspace(self, id):
123
- url = f"{self.base_url}/workspace/{id}"
124
- r = await self.helpers.request(url)
125
- if r is None:
126
- return [], []
127
- status_code = getattr(r, "status_code", 0)
128
- try:
129
- json = r.json()
130
- if not isinstance(json, dict):
131
- raise ValueError(f"Got unexpected value for JSON: {json}")
132
- except Exception as e:
133
- self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
134
- return [], []
135
- environments = json.get("data", {}).get("dependencies", {}).get("environments", [])
136
- collections = json.get("data", {}).get("dependencies", {}).get("collections", [])
137
- return environments, collections
138
-
139
- async def search_collections(self, id):
140
- request_ids = []
141
- url = f"{self.base_url}/list/collection?workspace={id}"
142
- r = await self.helpers.request(url, method="POST")
143
- if r is None:
144
- return request_ids
145
- status_code = getattr(r, "status_code", 0)
146
- try:
147
- json = r.json()
148
- except Exception as e:
149
- self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
150
- return request_ids
151
- for item in json.get("data", {}):
152
- request_ids.extend(await self.parse_collection(item))
153
- return request_ids
154
-
155
- async def parse_collection(self, json):
156
- request_ids = []
157
- folders = json.get("folders", [])
158
- requests = json.get("requests", [])
159
- for folder in folders:
160
- request_ids.extend(await self.parse_collection(folder))
161
- for request in requests:
162
- r_id = request.get("id", "")
163
- request_ids.append(r_id)
164
- return request_ids
87
+ return None
88
+ return json.get("data", [])
@@ -0,0 +1,220 @@
1
+ import zipfile
2
+ import json
3
+ from pathlib import Path
4
+ from bbot.modules.templates.postman import postman
5
+
6
+
7
+ class postman_download(postman):
8
+ watched_events = ["CODE_REPOSITORY"]
9
+ produced_events = ["FILESYSTEM"]
10
+ flags = ["passive", "subdomain-enum", "safe", "code-enum"]
11
+ meta = {
12
+ "description": "Download workspaces, collections, requests from Postman",
13
+ "created_date": "2024-09-07",
14
+ "author": "@domwhewell-sage",
15
+ }
16
+ options = {"output_folder": "", "api_key": ""}
17
+ options_desc = {"output_folder": "Folder to download postman workspaces to", "api_key": "Postman API Key"}
18
+ scope_distance_modifier = 2
19
+
20
+ async def setup(self):
21
+ output_folder = self.config.get("output_folder")
22
+ if output_folder:
23
+ self.output_dir = Path(output_folder) / "postman_workspaces"
24
+ else:
25
+ self.output_dir = self.scan.home / "postman_workspaces"
26
+ self.helpers.mkdir(self.output_dir)
27
+ return await self.require_api_key()
28
+
29
+ def prepare_api_request(self, url, kwargs):
30
+ kwargs["headers"]["X-Api-Key"] = self.api_key
31
+ return url, kwargs
32
+
33
+ async def filter_event(self, event):
34
+ if event.type == "CODE_REPOSITORY":
35
+ if "postman" not in event.tags:
36
+ return False, "event is not a postman workspace"
37
+ return True
38
+
39
+ async def handle_event(self, event):
40
+ repo_url = event.data.get("url")
41
+ workspace_id = await self.get_workspace_id(repo_url)
42
+ if workspace_id:
43
+ self.verbose(f"Found workspace ID {workspace_id} for {repo_url}")
44
+ data = await self.request_workspace(workspace_id)
45
+ workspace = data["workspace"]
46
+ environments = data["environments"]
47
+ collections = data["collections"]
48
+ in_scope = await self.validate_workspace(workspace, environments, collections)
49
+ if in_scope:
50
+ workspace_path = self.save_workspace(workspace, environments, collections)
51
+ if workspace_path:
52
+ self.verbose(f"Downloaded workspace from {repo_url} to {workspace_path}")
53
+ codebase_event = self.make_event(
54
+ {"path": str(workspace_path)}, "FILESYSTEM", tags=["postman", "workspace"], parent=event
55
+ )
56
+ await self.emit_event(
57
+ codebase_event,
58
+ context=f"{{module}} downloaded postman workspace at {repo_url} to {{event.type}}: {workspace_path}",
59
+ )
60
+ else:
61
+ self.verbose(
62
+ f"Failed to validate {repo_url} is in our scope as it does not contain any in-scope dns_names / emails, skipping download"
63
+ )
64
+
65
+ async def get_workspace_id(self, repo_url):
66
+ workspace_id = ""
67
+ profile = repo_url.split("/")[-2]
68
+ name = repo_url.split("/")[-1]
69
+ url = f"{self.base_url}/ws/proxy"
70
+ json = {
71
+ "service": "workspaces",
72
+ "method": "GET",
73
+ "path": f"/workspaces?handle={profile}&slug={name}",
74
+ }
75
+ r = await self.helpers.request(url, method="POST", json=json, headers=self.headers)
76
+ if r is None:
77
+ return workspace_id
78
+ status_code = getattr(r, "status_code", 0)
79
+ try:
80
+ json = r.json()
81
+ except Exception as e:
82
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
83
+ return workspace_id
84
+ data = json.get("data", [])
85
+ if len(data) == 1:
86
+ workspace_id = data[0]["id"]
87
+ return workspace_id
88
+
89
+ async def request_workspace(self, id):
90
+ data = {"workspace": {}, "environments": [], "collections": []}
91
+ workspace = await self.get_workspace(id)
92
+ if workspace:
93
+ # Main Workspace
94
+ name = workspace["name"]
95
+ data["workspace"] = workspace
96
+
97
+ # Workspace global variables
98
+ self.verbose(f"Downloading globals for workspace {name}")
99
+ globals = await self.get_globals(id)
100
+ data["environments"].append(globals)
101
+
102
+ # Workspace Environments
103
+ workspace_environments = workspace.get("environments", [])
104
+ if workspace_environments:
105
+ self.verbose(f"Downloading environments for workspace {name}")
106
+ for _ in workspace_environments:
107
+ environment_id = _["uid"]
108
+ environment = await self.get_environment(environment_id)
109
+ data["environments"].append(environment)
110
+
111
+ # Workspace Collections
112
+ workspace_collections = workspace.get("collections", [])
113
+ if workspace_collections:
114
+ self.verbose(f"Downloading collections for workspace {name}")
115
+ for _ in workspace_collections:
116
+ collection_id = _["uid"]
117
+ collection = await self.get_collection(collection_id)
118
+ data["collections"].append(collection)
119
+ return data
120
+
121
+ async def get_workspace(self, workspace_id):
122
+ workspace = {}
123
+ workspace_url = f"{self.api_url}/workspaces/{workspace_id}"
124
+ r = await self.api_request(workspace_url)
125
+ if r is None:
126
+ return workspace
127
+ status_code = getattr(r, "status_code", 0)
128
+ try:
129
+ json = r.json()
130
+ except Exception as e:
131
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
132
+ return workspace
133
+ workspace = json.get("workspace", {})
134
+ return workspace
135
+
136
+ async def get_globals(self, workspace_id):
137
+ globals = {}
138
+ globals_url = f"{self.base_url}/workspace/{workspace_id}/globals"
139
+ r = await self.helpers.request(globals_url, headers=self.headers)
140
+ if r is None:
141
+ return globals
142
+ status_code = getattr(r, "status_code", 0)
143
+ try:
144
+ json = r.json()
145
+ except Exception as e:
146
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
147
+ return globals
148
+ globals = json.get("data", {})
149
+ return globals
150
+
151
+ async def get_environment(self, environment_id):
152
+ environment = {}
153
+ environment_url = f"{self.api_url}/environments/{environment_id}"
154
+ r = await self.api_request(environment_url)
155
+ if r is None:
156
+ return environment
157
+ status_code = getattr(r, "status_code", 0)
158
+ try:
159
+ json = r.json()
160
+ except Exception as e:
161
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
162
+ return environment
163
+ environment = json.get("environment", {})
164
+ return environment
165
+
166
+ async def get_collection(self, collection_id):
167
+ collection = {}
168
+ collection_url = f"{self.api_url}/collections/{collection_id}"
169
+ r = await self.api_request(collection_url)
170
+ if r is None:
171
+ return collection
172
+ status_code = getattr(r, "status_code", 0)
173
+ try:
174
+ json = r.json()
175
+ except Exception as e:
176
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
177
+ return collection
178
+ collection = json.get("collection", {})
179
+ return collection
180
+
181
+ async def validate_workspace(self, workspace, environments, collections):
182
+ name = workspace.get("name", "")
183
+ full_wks = str([workspace, environments, collections])
184
+ in_scope_hosts = await self.scan.extract_in_scope_hostnames(full_wks)
185
+ if in_scope_hosts:
186
+ self.verbose(
187
+ f'Found in-scope hostname(s): "{in_scope_hosts}" in workspace {name}, it appears to be in-scope'
188
+ )
189
+ return True
190
+ return False
191
+
192
+ def save_workspace(self, workspace, environments, collections):
193
+ zip_path = None
194
+ # Create a folder for the workspace
195
+ name = workspace["name"]
196
+ id = workspace["id"]
197
+ folder = self.output_dir / name
198
+ self.helpers.mkdir(folder)
199
+ zip_path = folder / f"{id}.zip"
200
+
201
+ # Main Workspace
202
+ self.add_json_to_zip(zip_path, workspace, f"{name}.postman_workspace.json")
203
+
204
+ # Workspace Environments
205
+ if environments:
206
+ for environment in environments:
207
+ environment_id = environment["id"]
208
+ self.add_json_to_zip(zip_path, environment, f"{environment_id}.postman_environment.json")
209
+
210
+ # Workspace Collections
211
+ if collections:
212
+ for collection in collections:
213
+ collection_name = collection["info"]["name"]
214
+ self.add_json_to_zip(zip_path, collection, f"{collection_name}.postman_collection.json")
215
+ return zip_path
216
+
217
+ def add_json_to_zip(self, zip_path, data, filename):
218
+ with zipfile.ZipFile(zip_path, "a") as zipf:
219
+ json_content = json.dumps(data, indent=4)
220
+ zipf.writestr(filename, json_content)
bbot/modules/rapiddns.py CHANGED
@@ -15,14 +15,9 @@ class rapiddns(subdomain_enum):
15
15
 
16
16
  async def request_url(self, query):
17
17
  url = f"{self.base_url}/subdomain/{self.helpers.quote(query)}?full=1#result"
18
- response = await self.request_with_fail_count(url, timeout=self.http_timeout + 10)
18
+ response = await self.api_request(url, timeout=self.http_timeout + 10)
19
19
  return response
20
20
 
21
- def parse_results(self, r, query):
22
- results = set()
21
+ async def parse_results(self, r, query):
23
22
  text = getattr(r, "text", "")
24
- for match in self.helpers.regexes.dns_name_regex.findall(text):
25
- match = match.lower()
26
- if match.endswith(query):
27
- results.add(match)
28
- return results
23
+ return await self.scan.extract_in_scope_hostnames(text)