bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5401rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (278) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +3 -7
  3. bbot/core/config/files.py +0 -1
  4. bbot/core/config/logger.py +34 -4
  5. bbot/core/core.py +21 -4
  6. bbot/core/engine.py +9 -8
  7. bbot/core/event/base.py +131 -52
  8. bbot/core/helpers/bloom.py +10 -3
  9. bbot/core/helpers/command.py +8 -7
  10. bbot/core/helpers/depsinstaller/installer.py +31 -13
  11. bbot/core/helpers/diff.py +10 -10
  12. bbot/core/helpers/dns/brute.py +7 -4
  13. bbot/core/helpers/dns/dns.py +1 -2
  14. bbot/core/helpers/dns/engine.py +4 -6
  15. bbot/core/helpers/dns/helpers.py +2 -2
  16. bbot/core/helpers/dns/mock.py +0 -1
  17. bbot/core/helpers/files.py +1 -1
  18. bbot/core/helpers/helper.py +7 -4
  19. bbot/core/helpers/interactsh.py +3 -3
  20. bbot/core/helpers/libmagic.py +65 -0
  21. bbot/core/helpers/misc.py +65 -22
  22. bbot/core/helpers/names_generator.py +17 -3
  23. bbot/core/helpers/process.py +0 -20
  24. bbot/core/helpers/regex.py +1 -1
  25. bbot/core/helpers/regexes.py +12 -6
  26. bbot/core/helpers/validators.py +1 -2
  27. bbot/core/helpers/web/client.py +1 -1
  28. bbot/core/helpers/web/engine.py +1 -2
  29. bbot/core/helpers/web/web.py +4 -114
  30. bbot/core/helpers/wordcloud.py +5 -5
  31. bbot/core/modules.py +36 -27
  32. bbot/core/multiprocess.py +58 -0
  33. bbot/core/shared_deps.py +46 -3
  34. bbot/db/sql/models.py +147 -0
  35. bbot/defaults.yml +12 -10
  36. bbot/modules/anubisdb.py +2 -2
  37. bbot/modules/apkpure.py +63 -0
  38. bbot/modules/azure_tenant.py +2 -2
  39. bbot/modules/baddns.py +35 -19
  40. bbot/modules/baddns_direct.py +92 -0
  41. bbot/modules/baddns_zone.py +3 -8
  42. bbot/modules/badsecrets.py +4 -3
  43. bbot/modules/base.py +195 -51
  44. bbot/modules/bevigil.py +7 -7
  45. bbot/modules/binaryedge.py +7 -4
  46. bbot/modules/bufferoverrun.py +47 -0
  47. bbot/modules/builtwith.py +6 -10
  48. bbot/modules/bypass403.py +5 -5
  49. bbot/modules/c99.py +10 -7
  50. bbot/modules/censys.py +9 -13
  51. bbot/modules/certspotter.py +5 -3
  52. bbot/modules/chaos.py +9 -7
  53. bbot/modules/code_repository.py +1 -0
  54. bbot/modules/columbus.py +3 -3
  55. bbot/modules/crt.py +5 -3
  56. bbot/modules/deadly/dastardly.py +1 -1
  57. bbot/modules/deadly/ffuf.py +9 -9
  58. bbot/modules/deadly/nuclei.py +3 -3
  59. bbot/modules/deadly/vhost.py +4 -3
  60. bbot/modules/dehashed.py +1 -1
  61. bbot/modules/digitorus.py +1 -1
  62. bbot/modules/dnsbimi.py +145 -0
  63. bbot/modules/dnscaa.py +3 -3
  64. bbot/modules/dnsdumpster.py +4 -4
  65. bbot/modules/dnstlsrpt.py +144 -0
  66. bbot/modules/docker_pull.py +7 -5
  67. bbot/modules/dockerhub.py +2 -2
  68. bbot/modules/dotnetnuke.py +20 -21
  69. bbot/modules/emailformat.py +1 -1
  70. bbot/modules/extractous.py +122 -0
  71. bbot/modules/filedownload.py +9 -7
  72. bbot/modules/fullhunt.py +7 -4
  73. bbot/modules/generic_ssrf.py +5 -5
  74. bbot/modules/github_codesearch.py +3 -2
  75. bbot/modules/github_org.py +4 -4
  76. bbot/modules/github_workflows.py +4 -4
  77. bbot/modules/gitlab.py +2 -5
  78. bbot/modules/google_playstore.py +93 -0
  79. bbot/modules/gowitness.py +48 -50
  80. bbot/modules/hackertarget.py +5 -3
  81. bbot/modules/host_header.py +5 -5
  82. bbot/modules/httpx.py +1 -4
  83. bbot/modules/hunterio.py +3 -9
  84. bbot/modules/iis_shortnames.py +19 -30
  85. bbot/modules/internal/cloudcheck.py +29 -12
  86. bbot/modules/internal/dnsresolve.py +22 -22
  87. bbot/modules/internal/excavate.py +97 -59
  88. bbot/modules/internal/speculate.py +41 -32
  89. bbot/modules/internetdb.py +4 -2
  90. bbot/modules/ip2location.py +3 -5
  91. bbot/modules/ipneighbor.py +1 -1
  92. bbot/modules/ipstack.py +3 -8
  93. bbot/modules/jadx.py +87 -0
  94. bbot/modules/leakix.py +11 -10
  95. bbot/modules/myssl.py +2 -2
  96. bbot/modules/newsletters.py +2 -2
  97. bbot/modules/otx.py +5 -3
  98. bbot/modules/output/asset_inventory.py +7 -7
  99. bbot/modules/output/base.py +1 -1
  100. bbot/modules/output/csv.py +1 -1
  101. bbot/modules/output/http.py +20 -14
  102. bbot/modules/output/mysql.py +51 -0
  103. bbot/modules/output/neo4j.py +7 -2
  104. bbot/modules/output/postgres.py +49 -0
  105. bbot/modules/output/slack.py +0 -1
  106. bbot/modules/output/sqlite.py +29 -0
  107. bbot/modules/output/stdout.py +2 -2
  108. bbot/modules/output/teams.py +107 -6
  109. bbot/modules/paramminer_headers.py +8 -11
  110. bbot/modules/passivetotal.py +13 -13
  111. bbot/modules/portscan.py +32 -6
  112. bbot/modules/postman.py +50 -126
  113. bbot/modules/postman_download.py +220 -0
  114. bbot/modules/rapiddns.py +3 -8
  115. bbot/modules/report/asn.py +18 -11
  116. bbot/modules/robots.py +3 -3
  117. bbot/modules/securitytrails.py +7 -10
  118. bbot/modules/securitytxt.py +1 -1
  119. bbot/modules/shodan_dns.py +7 -9
  120. bbot/modules/sitedossier.py +1 -1
  121. bbot/modules/skymem.py +2 -2
  122. bbot/modules/social.py +2 -1
  123. bbot/modules/subdomaincenter.py +1 -1
  124. bbot/modules/subdomainradar.py +160 -0
  125. bbot/modules/telerik.py +8 -8
  126. bbot/modules/templates/bucket.py +1 -1
  127. bbot/modules/templates/github.py +22 -14
  128. bbot/modules/templates/postman.py +21 -0
  129. bbot/modules/templates/shodan.py +14 -13
  130. bbot/modules/templates/sql.py +95 -0
  131. bbot/modules/templates/subdomain_enum.py +51 -16
  132. bbot/modules/templates/webhook.py +2 -4
  133. bbot/modules/trickest.py +8 -37
  134. bbot/modules/trufflehog.py +10 -12
  135. bbot/modules/url_manipulation.py +3 -3
  136. bbot/modules/urlscan.py +1 -1
  137. bbot/modules/viewdns.py +1 -1
  138. bbot/modules/virustotal.py +8 -30
  139. bbot/modules/wafw00f.py +1 -1
  140. bbot/modules/wayback.py +1 -1
  141. bbot/modules/wpscan.py +17 -11
  142. bbot/modules/zoomeye.py +11 -6
  143. bbot/presets/baddns-thorough.yml +12 -0
  144. bbot/presets/fast.yml +16 -0
  145. bbot/presets/kitchen-sink.yml +1 -2
  146. bbot/presets/spider.yml +4 -0
  147. bbot/presets/subdomain-enum.yml +7 -7
  148. bbot/presets/web/dotnet-audit.yml +0 -1
  149. bbot/scanner/manager.py +5 -16
  150. bbot/scanner/preset/args.py +46 -26
  151. bbot/scanner/preset/environ.py +7 -2
  152. bbot/scanner/preset/path.py +7 -4
  153. bbot/scanner/preset/preset.py +36 -23
  154. bbot/scanner/scanner.py +172 -62
  155. bbot/scanner/target.py +236 -434
  156. bbot/scripts/docs.py +1 -1
  157. bbot/test/bbot_fixtures.py +13 -3
  158. bbot/test/conftest.py +132 -100
  159. bbot/test/fastapi_test.py +17 -0
  160. bbot/test/owasp_mastg.apk +0 -0
  161. bbot/test/run_tests.sh +4 -4
  162. bbot/test/test.conf +2 -0
  163. bbot/test/test_step_1/test__module__tests.py +0 -1
  164. bbot/test/test_step_1/test_bbot_fastapi.py +79 -0
  165. bbot/test/test_step_1/test_bloom_filter.py +2 -1
  166. bbot/test/test_step_1/test_cli.py +138 -64
  167. bbot/test/test_step_1/test_dns.py +61 -27
  168. bbot/test/test_step_1/test_engine.py +17 -19
  169. bbot/test/test_step_1/test_events.py +183 -30
  170. bbot/test/test_step_1/test_helpers.py +64 -29
  171. bbot/test/test_step_1/test_manager_deduplication.py +1 -1
  172. bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
  173. bbot/test/test_step_1/test_modules_basic.py +68 -70
  174. bbot/test/test_step_1/test_presets.py +183 -100
  175. bbot/test/test_step_1/test_python_api.py +7 -2
  176. bbot/test/test_step_1/test_regexes.py +35 -5
  177. bbot/test/test_step_1/test_scan.py +39 -5
  178. bbot/test/test_step_1/test_scope.py +4 -3
  179. bbot/test/test_step_1/test_target.py +242 -145
  180. bbot/test/test_step_1/test_web.py +14 -10
  181. bbot/test/test_step_2/module_tests/base.py +15 -7
  182. bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
  183. bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
  184. bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
  185. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
  186. bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
  187. bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
  188. bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
  189. bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
  190. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
  191. bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
  192. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
  193. bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
  194. bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
  195. bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
  196. bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
  197. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
  198. bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
  199. bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
  200. bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
  201. bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
  202. bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
  203. bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
  204. bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
  205. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
  206. bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
  207. bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
  208. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
  209. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
  210. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
  211. bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
  212. bbot/test/test_step_2/module_tests/test_module_excavate.py +28 -48
  213. bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
  214. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
  215. bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
  216. bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
  217. bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
  218. bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
  219. bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
  220. bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
  221. bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -6
  222. bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
  223. bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
  224. bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
  225. bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
  226. bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
  227. bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
  228. bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
  229. bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
  230. bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
  231. bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
  232. bbot/test/test_step_2/module_tests/test_module_newsletters.py +16 -16
  233. bbot/test/test_step_2/module_tests/test_module_ntlm.py +8 -7
  234. bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
  235. bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
  236. bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
  237. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
  238. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
  239. bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
  240. bbot/test/test_step_2/module_tests/test_module_pgp.py +2 -2
  241. bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
  242. bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
  243. bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
  244. bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
  245. bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
  246. bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
  247. bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
  248. bbot/test/test_step_2/module_tests/test_module_smuggler.py +14 -14
  249. bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
  250. bbot/test/test_step_2/module_tests/test_module_speculate.py +4 -8
  251. bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
  252. bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
  253. bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
  254. bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
  255. bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
  256. bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
  257. bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
  258. bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
  259. bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
  260. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
  261. bbot/test/test_step_2/module_tests/test_module_viewdns.py +1 -1
  262. bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
  263. bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
  264. bbot/wordlists/devops_mutations.txt +1 -1
  265. bbot/wordlists/ffuf_shortname_candidates.txt +1 -1
  266. bbot/wordlists/nameservers.txt +1 -1
  267. bbot/wordlists/paramminer_headers.txt +1 -1
  268. bbot/wordlists/paramminer_parameters.txt +1 -1
  269. bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt +1 -1
  270. bbot/wordlists/valid_url_schemes.txt +1 -1
  271. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/METADATA +48 -18
  272. bbot-2.3.0.5401rc0.dist-info/RECORD +421 -0
  273. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/WHEEL +1 -1
  274. bbot/modules/unstructured.py +0 -163
  275. bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
  276. bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
  277. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/LICENSE +0 -0
  278. {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/entry_points.txt +0 -0
bbot/core/helpers/diff.py CHANGED
@@ -94,14 +94,14 @@ class HttpCompare:
94
94
  baseline_1_json = xmltodict.parse(baseline_1.text)
95
95
  baseline_2_json = xmltodict.parse(baseline_2.text)
96
96
  except ExpatError:
97
- log.debug(f"Cant HTML parse for {self.baseline_url}. Switching to text parsing as a backup")
97
+ log.debug(f"Can't HTML parse for {self.baseline_url}. Switching to text parsing as a backup")
98
98
  baseline_1_json = baseline_1.text.split("\n")
99
99
  baseline_2_json = baseline_2.text.split("\n")
100
100
 
101
101
  ddiff = DeepDiff(baseline_1_json, baseline_2_json, ignore_order=True, view="tree")
102
102
  self.ddiff_filters = []
103
103
 
104
- for k, v in ddiff.items():
104
+ for k in ddiff.keys():
105
105
  for x in list(ddiff[k]):
106
106
  log.debug(f"Added {k} filter for path: {x.path()}")
107
107
  self.ddiff_filters.append(x.path())
@@ -140,7 +140,7 @@ class HttpCompare:
140
140
 
141
141
  ddiff = DeepDiff(headers_1, headers_2, ignore_order=True, view="tree")
142
142
 
143
- for k, v in ddiff.items():
143
+ for k in ddiff.keys():
144
144
  for x in list(ddiff[k]):
145
145
  try:
146
146
  header_value = str(x).split("'")[1]
@@ -183,7 +183,7 @@ class HttpCompare:
183
183
 
184
184
  await self._baseline()
185
185
 
186
- if timeout == None:
186
+ if timeout is None:
187
187
  timeout = self.timeout
188
188
 
189
189
  reflection = False
@@ -203,7 +203,7 @@ class HttpCompare:
203
203
  )
204
204
 
205
205
  if subject_response is None:
206
- # this can be caused by a WAF not liking the header, so we really arent interested in it
206
+ # this can be caused by a WAF not liking the header, so we really aren't interested in it
207
207
  return (True, "403", reflection, subject_response)
208
208
 
209
209
  if check_reflection:
@@ -225,7 +225,7 @@ class HttpCompare:
225
225
  subject_json = xmltodict.parse(subject_response.text)
226
226
 
227
227
  except ExpatError:
228
- log.debug(f"Cant HTML parse for {subject.split('?')[0]}. Switching to text parsing as a backup")
228
+ log.debug(f"Can't HTML parse for {subject.split('?')[0]}. Switching to text parsing as a backup")
229
229
  subject_json = subject_response.text.split("\n")
230
230
 
231
231
  diff_reasons = []
@@ -238,11 +238,11 @@ class HttpCompare:
238
238
 
239
239
  different_headers = self.compare_headers(self.baseline.headers, subject_response.headers)
240
240
  if different_headers:
241
- log.debug(f"headers were different, no match")
241
+ log.debug("headers were different, no match")
242
242
  diff_reasons.append("header")
243
243
 
244
- if self.compare_body(self.baseline_json, subject_json) == False:
245
- log.debug(f"difference in HTML body, no match")
244
+ if self.compare_body(self.baseline_json, subject_json) is False:
245
+ log.debug("difference in HTML body, no match")
246
246
 
247
247
  diff_reasons.append("body")
248
248
 
@@ -275,6 +275,6 @@ class HttpCompare:
275
275
  )
276
276
 
277
277
  # if a nonsense header "caused" a difference, we need to abort. We also need to abort if our canary was reflected
278
- if match == False or reflection == True:
278
+ if match is False or reflection is True:
279
279
  return False
280
280
  return True
@@ -41,10 +41,13 @@ class DNSBrute:
41
41
  type = "A"
42
42
  type = str(type).strip().upper()
43
43
 
44
- wildcard_rdtypes = await self.parent_helper.dns.is_wildcard_domain(domain, (type, "CNAME"))
45
- if wildcard_rdtypes:
44
+ wildcard_domains = await self.parent_helper.dns.is_wildcard_domain(domain, (type, "CNAME"))
45
+ wildcard_rdtypes = set()
46
+ for domain, rdtypes in wildcard_domains.items():
47
+ wildcard_rdtypes.update(rdtypes)
48
+ if wildcard_domains:
46
49
  self.log.hugewarning(
47
- f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(wildcard_rdtypes)})"
50
+ f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(sorted(wildcard_rdtypes))})"
48
51
  )
49
52
  return []
50
53
 
@@ -161,7 +164,7 @@ class DNSBrute:
161
164
  for i in range(0, max(0, n - 5)):
162
165
  d = delimiters[i % len(delimiters)]
163
166
  l = lengths[i % len(lengths)]
164
- segments = list(random.choice(self.devops_mutations) for _ in range(l))
167
+ segments = [random.choice(self.devops_mutations) for _ in range(l)]
165
168
  segments.append(self.parent_helper.rand_string(length=8, digits=False))
166
169
  subdomain = d.join(segments)
167
170
  yield subdomain
@@ -16,7 +16,6 @@ log = logging.getLogger("bbot.core.helpers.dns")
16
16
 
17
17
 
18
18
  class DNSHelper(EngineClient):
19
-
20
19
  SERVER_CLASS = DNSEngine
21
20
  ERROR_CLASS = DNSError
22
21
 
@@ -179,7 +178,7 @@ class DNSHelper(EngineClient):
179
178
 
180
179
  host = clean_dns_record(host)
181
180
  # skip check if it's an IP or a plain hostname
182
- if is_ip(host) or not "." in host:
181
+ if is_ip(host) or "." not in host:
183
182
  return False
184
183
 
185
184
  # skip if query isn't a dns name
@@ -24,7 +24,6 @@ all_rdtypes = ["A", "AAAA", "SRV", "MX", "NS", "SOA", "CNAME", "TXT"]
24
24
 
25
25
 
26
26
  class DNSEngine(EngineServer):
27
-
28
27
  CMDS = {
29
28
  0: "resolve",
30
29
  1: "resolve_raw",
@@ -55,7 +54,7 @@ class DNSEngine(EngineServer):
55
54
  dns_omit_queries = self.dns_config.get("omit_queries", None)
56
55
  if not dns_omit_queries:
57
56
  dns_omit_queries = []
58
- self.dns_omit_queries = dict()
57
+ self.dns_omit_queries = {}
59
58
  for d in dns_omit_queries:
60
59
  d = d.split(":")
61
60
  if len(d) == 2:
@@ -73,7 +72,7 @@ class DNSEngine(EngineServer):
73
72
  self.wildcard_ignore = []
74
73
  self.wildcard_ignore = tuple([str(d).strip().lower() for d in self.wildcard_ignore])
75
74
  self.wildcard_tests = self.dns_config.get("wildcard_tests", 5)
76
- self._wildcard_cache = dict()
75
+ self._wildcard_cache = {}
77
76
  # since wildcard detection takes some time, This is to prevent multiple
78
77
  # modules from kicking off wildcard detection for the same domain at the same time
79
78
  self._wildcard_lock = NamedLock()
@@ -83,7 +82,7 @@ class DNSEngine(EngineServer):
83
82
  self._last_connectivity_warning = time.time()
84
83
  # keeps track of warnings issued for wildcard detection to prevent duplicate warnings
85
84
  self._dns_warnings = set()
86
- self._errors = dict()
85
+ self._errors = {}
87
86
  self._debug = self.dns_config.get("debug", False)
88
87
  self._dns_cache = LRUCache(maxsize=10000)
89
88
 
@@ -476,7 +475,6 @@ class DNSEngine(EngineServer):
476
475
  # for every parent domain, starting with the shortest
477
476
  parents = list(domain_parents(query))
478
477
  for parent in parents[::-1]:
479
-
480
478
  # check if the parent domain is set up with wildcards
481
479
  wildcard_results = await self.is_wildcard_domain(parent, rdtypes_to_check)
482
480
 
@@ -640,7 +638,7 @@ class DNSEngine(EngineServer):
640
638
  self._last_dns_success = time.time()
641
639
  return True
642
640
  if time.time() - self._last_connectivity_warning > interval:
643
- self.log.warning(f"DNS queries are failing, please check your internet connection")
641
+ self.log.warning("DNS queries are failing, please check your internet connection")
644
642
  self._last_connectivity_warning = time.time()
645
643
  self._errors.clear()
646
644
  return False
@@ -1,6 +1,6 @@
1
1
  import logging
2
2
 
3
- from bbot.core.helpers.regexes import dns_name_regex
3
+ from bbot.core.helpers.regexes import dns_name_extraction_regex
4
4
  from bbot.core.helpers.misc import clean_dns_record, smart_decode
5
5
 
6
6
  log = logging.getLogger("bbot.core.helpers.dns")
@@ -198,7 +198,7 @@ def extract_targets(record):
198
198
  elif rdtype == "TXT":
199
199
  for s in record.strings:
200
200
  s = smart_decode(s)
201
- for match in dns_name_regex.finditer(s):
201
+ for match in dns_name_extraction_regex.finditer(s):
202
202
  start, end = match.span()
203
203
  host = s[start:end]
204
204
  add_result(rdtype, host)
@@ -5,7 +5,6 @@ log = logging.getLogger("bbot.core.helpers.dns.mock")
5
5
 
6
6
 
7
7
  class MockResolver:
8
-
9
8
  def __init__(self, mock_data=None, custom_lookup_fn=None):
10
9
  self.mock_data = mock_data if mock_data else {}
11
10
  self._custom_lookup_fn = custom_lookup_fn
@@ -83,7 +83,7 @@ def _feed_pipe(self, pipe, content, text=True):
83
83
  for c in content:
84
84
  p.write(decode_fn(c) + newline)
85
85
  except BrokenPipeError:
86
- log.debug(f"Broken pipe in _feed_pipe()")
86
+ log.debug("Broken pipe in _feed_pipe()")
87
87
  except ValueError:
88
88
  log.debug(f"Error _feed_pipe(): {traceback.format_exc()}")
89
89
  except KeyboardInterrupt:
@@ -12,10 +12,11 @@ from .diff import HttpCompare
12
12
  from .regex import RegexHelper
13
13
  from .wordcloud import WordCloud
14
14
  from .interactsh import Interactsh
15
- from ...scanner.target import Target
16
15
  from .depsinstaller import DepsInstaller
17
16
  from .async_helpers import get_event_loop
18
17
 
18
+ from bbot.scanner.target import BaseTarget
19
+
19
20
  log = logging.getLogger("bbot.core.helpers")
20
21
 
21
22
 
@@ -152,11 +153,13 @@ class ConfigAwareHelper:
152
153
  return self.temp_dir / filename
153
154
 
154
155
  def clean_old_scans(self):
155
- _filter = lambda x: x.is_dir() and self.regexes.scan_name_regex.match(x.name)
156
+ def _filter(x):
157
+ return x.is_dir() and self.regexes.scan_name_regex.match(x.name)
158
+
156
159
  self.clean_old(self.scans_dir, keep=self.keep_old_scans, filter=_filter)
157
160
 
158
- def make_target(self, *events, **kwargs):
159
- return Target(*events, **kwargs)
161
+ def make_target(self, *targets, **kwargs):
162
+ return BaseTarget(*targets, scan=self.scan, **kwargs)
160
163
 
161
164
  @property
162
165
  def config(self):
@@ -155,7 +155,7 @@ class Interactsh:
155
155
  break
156
156
 
157
157
  if not self.server:
158
- raise InteractshError(f"Failed to register with an interactsh server")
158
+ raise InteractshError("Failed to register with an interactsh server")
159
159
 
160
160
  log.info(
161
161
  f"Successfully registered to interactsh server {self.server} with correlation_id {self.correlation_id} [{self.domain}]"
@@ -181,7 +181,7 @@ class Interactsh:
181
181
  >>> await interactsh_client.deregister()
182
182
  """
183
183
  if not self.server or not self.correlation_id or not self.secret:
184
- raise InteractshError(f"Missing required information to deregister")
184
+ raise InteractshError("Missing required information to deregister")
185
185
 
186
186
  headers = {}
187
187
  if self.token:
@@ -226,7 +226,7 @@ class Interactsh:
226
226
  ]
227
227
  """
228
228
  if not self.server or not self.correlation_id or not self.secret:
229
- raise InteractshError(f"Missing required information to poll")
229
+ raise InteractshError("Missing required information to poll")
230
230
 
231
231
  headers = {}
232
232
  if self.token:
@@ -0,0 +1,65 @@
1
+ import puremagic
2
+
3
+
4
+ def get_magic_info(file):
5
+ magic_detections = puremagic.magic_file(file)
6
+ if magic_detections:
7
+ magic_detections.sort(key=lambda x: x.confidence, reverse=True)
8
+ detection = magic_detections[0]
9
+ return detection.extension, detection.mime_type, detection.name, detection.confidence
10
+ return "", "", "", 0
11
+
12
+
13
+ def get_compression(mime_type):
14
+ mime_type = mime_type.lower()
15
+ # from https://github.com/cdgriffith/puremagic/blob/master/puremagic/magic_data.json
16
+ compression_map = {
17
+ "application/arj": "arj", # ARJ archive
18
+ "application/binhex": "binhex", # BinHex encoded file
19
+ "application/epub+zip": "zip", # EPUB book (Zip archive)
20
+ "application/fictionbook2+zip": "zip", # FictionBook 2.0 (Zip)
21
+ "application/fictionbook3+zip": "zip", # FictionBook 3.0 (Zip)
22
+ "application/gzip": "gzip", # Gzip compressed file
23
+ "application/java-archive": "zip", # Java Archive (JAR)
24
+ "application/pak": "pak", # PAK archive
25
+ "application/vnd.android.package-archive": "zip", # Android package (APK)
26
+ "application/vnd.comicbook-rar": "rar", # Comic book archive (RAR)
27
+ "application/vnd.comicbook+zip": "zip", # Comic book archive (Zip)
28
+ "application/vnd.ms-cab-compressed": "cab", # Microsoft Cabinet archive
29
+ "application/vnd.palm": "palm", # Palm OS data
30
+ "application/vnd.rar": "rar", # RAR archive
31
+ "application/x-7z-compressed": "7z", # 7-Zip archive
32
+ "application/x-ace": "ace", # ACE archive
33
+ "application/x-alz": "alz", # ALZip archive
34
+ "application/x-arc": "arc", # ARC archive
35
+ "application/x-archive": "ar", # Unix archive
36
+ "application/x-bzip2": "bzip2", # Bzip2 compressed file
37
+ "application/x-compress": "compress", # Unix compress file
38
+ "application/x-cpio": "cpio", # CPIO archive
39
+ "application/x-gzip": "gzip", # Gzip compressed file
40
+ "application/x-itunes-ipa": "zip", # iOS application archive (IPA)
41
+ "application/x-java-pack200": "pack200", # Java Pack200 archive
42
+ "application/x-lha": "lha", # LHA archive
43
+ "application/x-lrzip": "lrzip", # Long Range ZIP
44
+ "application/x-lz4-compressed-tar": "lz4", # LZ4 compressed Tar archive
45
+ "application/x-lz4": "lz4", # LZ4 compressed file
46
+ "application/x-lzip": "lzip", # Lzip compressed file
47
+ "application/x-lzma": "lzma", # LZMA compressed file
48
+ "application/x-par2": "par2", # PAR2 recovery file
49
+ "application/x-qpress": "qpress", # Qpress archive
50
+ "application/x-rar-compressed": "rar", # RAR archive
51
+ "application/x-sit": "sit", # StuffIt archive
52
+ "application/x-stuffit": "sit", # StuffIt archive
53
+ "application/x-tar": "tar", # Tar archive
54
+ "application/x-tgz": "tgz", # Gzip compressed Tar archive
55
+ "application/x-webarchive": "zip", # Web archive (Zip)
56
+ "application/x-xar": "xar", # XAR archive
57
+ "application/x-xz": "xz", # XZ compressed file
58
+ "application/x-zip-compressed-fb2": "zip", # Zip archive (FB2)
59
+ "application/x-zoo": "zoo", # Zoo archive
60
+ "application/x-zstd-compressed-tar": "zstd", # Zstandard compressed Tar archive
61
+ "application/zip": "zip", # Zip archive
62
+ "application/zstd": "zstd", # Zstandard compressed file
63
+ }
64
+
65
+ return compression_map.get(mime_type, "")
bbot/core/helpers/misc.py CHANGED
@@ -227,13 +227,13 @@ def split_host_port(d):
227
227
 
228
228
  match = bbot_regexes.extract_open_port_regex.match(netloc)
229
229
  if match is None:
230
- raise ValueError(f'split_port() failed to parse netloc "{netloc}"')
230
+ raise ValueError(f'split_port() failed to parse netloc "{netloc}" (original value: {d})')
231
231
 
232
232
  host = match.group(2)
233
233
  if host is None:
234
234
  host = match.group(1)
235
235
  if host is None:
236
- raise ValueError(f'split_port() failed to locate host in netloc "{netloc}"')
236
+ raise ValueError(f'split_port() failed to locate host in netloc "{netloc}" (original value: {d})')
237
237
 
238
238
  port = match.group(3)
239
239
  if port is None and scheme is not None:
@@ -365,7 +365,7 @@ def parent_url(u):
365
365
  if path.parent == path:
366
366
  return None
367
367
  else:
368
- return urlunparse(parsed._replace(path=str(path.parent)))
368
+ return urlunparse(parsed._replace(path=str(path.parent), query=""))
369
369
 
370
370
 
371
371
  def url_parents(u):
@@ -391,7 +391,7 @@ def url_parents(u):
391
391
  parent_list = []
392
392
  while 1:
393
393
  parent = parent_url(u)
394
- if parent == None:
394
+ if parent is None:
395
395
  return parent_list
396
396
  elif parent not in parent_list:
397
397
  parent_list.append(parent)
@@ -512,7 +512,7 @@ def domain_stem(domain):
512
512
  - Utilizes the `tldextract` function for domain parsing.
513
513
  """
514
514
  parsed = tldextract(str(domain))
515
- return f".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".")
515
+ return ".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".")
516
516
 
517
517
 
518
518
  def ip_network_parents(i, include_self=False):
@@ -586,17 +586,18 @@ def is_dns_name(d, include_local=True):
586
586
  if include_local:
587
587
  if bbot_regexes.hostname_regex.match(d):
588
588
  return True
589
- if bbot_regexes.dns_name_regex.match(d):
589
+ if bbot_regexes.dns_name_validation_regex.match(d):
590
590
  return True
591
591
  return False
592
592
 
593
593
 
594
- def is_ip(d, version=None):
594
+ def is_ip(d, version=None, include_network=False):
595
595
  """
596
596
  Checks if the given string or object represents a valid IP address.
597
597
 
598
598
  Args:
599
599
  d (str or ipaddress.IPvXAddress): The IP address to check.
600
+ include_network (bool, optional): Whether to include network types (IPv4Network or IPv6Network). Defaults to False.
600
601
  version (int, optional): The IP version to validate (4 or 6). Default is None.
601
602
 
602
603
  Returns:
@@ -612,21 +613,27 @@ def is_ip(d, version=None):
612
613
  >>> is_ip('evilcorp.com')
613
614
  False
614
615
  """
616
+ ip = None
615
617
  try:
616
618
  ip = ipaddress.ip_address(d)
617
- if version is None or ip.version == version:
618
- return True
619
619
  except Exception:
620
- pass
620
+ if include_network:
621
+ try:
622
+ ip = ipaddress.ip_network(d, strict=False)
623
+ except Exception:
624
+ pass
625
+ if ip is not None and (version is None or ip.version == version):
626
+ return True
621
627
  return False
622
628
 
623
629
 
624
- def is_ip_type(i):
630
+ def is_ip_type(i, network=None):
625
631
  """
626
632
  Checks if the given object is an instance of an IPv4 or IPv6 type from the ipaddress module.
627
633
 
628
634
  Args:
629
635
  i (ipaddress._BaseV4 or ipaddress._BaseV6): The IP object to check.
636
+ network (bool, optional): Whether to restrict the check to network types (IPv4Network or IPv6Network). Defaults to False.
630
637
 
631
638
  Returns:
632
639
  bool: True if the object is an instance of ipaddress._BaseV4 or ipaddress._BaseV6, False otherwise.
@@ -639,6 +646,12 @@ def is_ip_type(i):
639
646
  >>> is_ip_type("192.168.1.0/24")
640
647
  False
641
648
  """
649
+ if network is not None:
650
+ is_network = ipaddress._BaseNetwork in i.__class__.__mro__
651
+ if network:
652
+ return is_network
653
+ else:
654
+ return not is_network
642
655
  return ipaddress._IPAddressBase in i.__class__.__mro__
643
656
 
644
657
 
@@ -908,12 +921,12 @@ def extract_params_xml(xml_data, compare_mode="getparam"):
908
921
 
909
922
  # Define valid characters for each mode based on RFCs
910
923
  valid_chars_dict = {
911
- "header": set(
924
+ "header": {
912
925
  chr(c) for c in range(33, 127) if chr(c) in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_"
913
- ),
914
- "getparam": set(chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="),
915
- "postparam": set(chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="),
916
- "cookie": set(chr(c) for c in range(33, 127) if chr(c) not in '()<>@,;:"/[]?={} \t'),
926
+ },
927
+ "getparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="},
928
+ "postparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="},
929
+ "cookie": {chr(c) for c in range(33, 127) if chr(c) not in '()<>@,;:"/[]?={} \t'},
917
930
  }
918
931
 
919
932
 
@@ -1135,7 +1148,7 @@ def chain_lists(
1135
1148
  """
1136
1149
  if isinstance(l, str):
1137
1150
  l = [l]
1138
- final_list = dict()
1151
+ final_list = {}
1139
1152
  for entry in l:
1140
1153
  for s in split_regex.split(entry):
1141
1154
  f = s.strip()
@@ -1260,7 +1273,7 @@ def gen_numbers(n, padding=2):
1260
1273
  return results
1261
1274
 
1262
1275
 
1263
- def make_netloc(host, port):
1276
+ def make_netloc(host, port=None):
1264
1277
  """Constructs a network location string from a given host and port.
1265
1278
 
1266
1279
  Args:
@@ -1289,7 +1302,7 @@ def make_netloc(host, port):
1289
1302
  if is_ip(host, version=6):
1290
1303
  host = f"[{host}]"
1291
1304
  if port is None:
1292
- return host
1305
+ return str(host)
1293
1306
  return f"{host}:{port}"
1294
1307
 
1295
1308
 
@@ -1332,7 +1345,7 @@ def search_dict_by_key(key, d):
1332
1345
  if isinstance(d, dict):
1333
1346
  if key in d:
1334
1347
  yield d[key]
1335
- for k, v in d.items():
1348
+ for v in d.values():
1336
1349
  yield from search_dict_by_key(key, v)
1337
1350
  elif isinstance(d, list):
1338
1351
  for v in d:
@@ -1399,7 +1412,7 @@ def search_dict_values(d, *regexes):
1399
1412
  results.add(h)
1400
1413
  yield result
1401
1414
  elif isinstance(d, dict):
1402
- for _, v in d.items():
1415
+ for v in d.values():
1403
1416
  yield from search_dict_values(v, *regexes)
1404
1417
  elif isinstance(d, list):
1405
1418
  for v in d:
@@ -2384,7 +2397,7 @@ def in_exception_chain(e, exc_types):
2384
2397
  ... if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
2385
2398
  ... raise
2386
2399
  """
2387
- return any([isinstance(_, exc_types) for _ in get_exception_chain(e)])
2400
+ return any(isinstance(_, exc_types) for _ in get_exception_chain(e))
2388
2401
 
2389
2402
 
2390
2403
  def get_traceback_details(e):
@@ -2788,3 +2801,33 @@ def top_tcp_ports(n, as_string=False):
2788
2801
  if as_string:
2789
2802
  return ",".join([str(s) for s in top_ports])
2790
2803
  return top_ports
2804
+
2805
+
2806
+ class SafeDict(dict):
2807
+ def __missing__(self, key):
2808
+ return "{" + key + "}"
2809
+
2810
+
2811
+ def safe_format(s, **kwargs):
2812
+ """
2813
+ Format string while ignoring unused keys (prevents KeyError)
2814
+ """
2815
+ return s.format_map(SafeDict(kwargs))
2816
+
2817
+
2818
+ def get_python_constraints():
2819
+ req_regex = re.compile(r"([^(]+)\s*\((.*)\)", re.IGNORECASE)
2820
+
2821
+ def clean_requirement(req_string):
2822
+ # Extract package name and version constraints from format like "package (>=1.0,<2.0)"
2823
+ match = req_regex.match(req_string)
2824
+ if match:
2825
+ name, constraints = match.groups()
2826
+ return f"{name.strip()}{constraints}"
2827
+
2828
+ return req_string
2829
+
2830
+ from importlib.metadata import distribution
2831
+
2832
+ dist = distribution("bbot")
2833
+ return [clean_requirement(r) for r in dist.requires]
@@ -10,9 +10,11 @@ adjectives = [
10
10
  "affectionate",
11
11
  "aggravated",
12
12
  "aggrieved",
13
+ "agoraphobic",
13
14
  "almighty",
14
15
  "anal",
15
16
  "atrocious",
17
+ "autistic",
16
18
  "awkward",
17
19
  "baby",
18
20
  "begrudged",
@@ -65,6 +67,7 @@ adjectives = [
65
67
  "dramatic",
66
68
  "drunk",
67
69
  "effeminate",
70
+ "effervescent",
68
71
  "elden",
69
72
  "eldritch",
70
73
  "embarrassed",
@@ -75,6 +78,7 @@ adjectives = [
75
78
  "ethereal",
76
79
  "euphoric",
77
80
  "evil",
81
+ "expired",
78
82
  "exquisite",
79
83
  "extreme",
80
84
  "ferocious",
@@ -87,10 +91,8 @@ adjectives = [
87
91
  "foreboding",
88
92
  "frenetic",
89
93
  "frolicking",
90
- "frothy",
91
94
  "furry",
92
95
  "fuzzy",
93
- "gay",
94
96
  "gentle",
95
97
  "giddy",
96
98
  "glowering",
@@ -112,6 +114,7 @@ adjectives = [
112
114
  "imaginary",
113
115
  "immense",
114
116
  "immoral",
117
+ "impulsive",
115
118
  "incomprehensible",
116
119
  "inebriated",
117
120
  "inexplicable",
@@ -149,6 +152,7 @@ adjectives = [
149
152
  "muscular",
150
153
  "mushy",
151
154
  "mysterious",
155
+ "nascent",
152
156
  "naughty",
153
157
  "nefarious",
154
158
  "negligent",
@@ -163,6 +167,7 @@ adjectives = [
163
167
  "overzealous",
164
168
  "paranoid",
165
169
  "pasty",
170
+ "peckish",
166
171
  "pedantic",
167
172
  "pernicious",
168
173
  "perturbed",
@@ -183,7 +188,6 @@ adjectives = [
183
188
  "psychic",
184
189
  "puffy",
185
190
  "pure",
186
- "queer",
187
191
  "questionable",
188
192
  "rabid",
189
193
  "raging",
@@ -210,6 +214,7 @@ adjectives = [
210
214
  "sneaky",
211
215
  "soft",
212
216
  "sophisticated",
217
+ "spicy",
213
218
  "spiteful",
214
219
  "squishy",
215
220
  "steamy",
@@ -269,12 +274,14 @@ adjectives = [
269
274
  "wispy",
270
275
  "witty",
271
276
  "woolly",
277
+ "zesty",
272
278
  ]
273
279
 
274
280
  names = [
275
281
  "aaron",
276
282
  "abigail",
277
283
  "adam",
284
+ "adeem",
278
285
  "alan",
279
286
  "albert",
280
287
  "alex",
@@ -412,6 +419,7 @@ names = [
412
419
  "evelyn",
413
420
  "faramir",
414
421
  "florence",
422
+ "fox",
415
423
  "frances",
416
424
  "francis",
417
425
  "frank",
@@ -437,6 +445,7 @@ names = [
437
445
  "gregory",
438
446
  "gus",
439
447
  "hagrid",
448
+ "hank",
440
449
  "hannah",
441
450
  "harold",
442
451
  "harry",
@@ -447,6 +456,7 @@ names = [
447
456
  "hermione",
448
457
  "homer",
449
458
  "howard",
459
+ "hunter",
450
460
  "irene",
451
461
  "isaac",
452
462
  "isabella",
@@ -505,6 +515,7 @@ names = [
505
515
  "kevin",
506
516
  "kimberly",
507
517
  "kyle",
518
+ "kylie",
508
519
  "lantern",
509
520
  "larry",
510
521
  "laura",
@@ -528,6 +539,7 @@ names = [
528
539
  "lupin",
529
540
  "madison",
530
541
  "magnus",
542
+ "marcus",
531
543
  "margaret",
532
544
  "maria",
533
545
  "marie",
@@ -626,11 +638,13 @@ names = [
626
638
  "stephen",
627
639
  "steven",
628
640
  "susan",
641
+ "syrina",
629
642
  "tammy",
630
643
  "taylor",
631
644
  "teresa",
632
645
  "terry",
633
646
  "theoden",
647
+ "theon",
634
648
  "theresa",
635
649
  "thomas",
636
650
  "tiffany",