bbot 2.0.1.4654rc0__py3-none-any.whl → 2.3.0.5397rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +3 -7
- bbot/core/config/files.py +0 -1
- bbot/core/config/logger.py +34 -4
- bbot/core/core.py +21 -6
- bbot/core/engine.py +9 -8
- bbot/core/event/base.py +162 -63
- bbot/core/helpers/bloom.py +10 -3
- bbot/core/helpers/command.py +9 -8
- bbot/core/helpers/depsinstaller/installer.py +89 -32
- bbot/core/helpers/depsinstaller/sudo_askpass.py +38 -2
- bbot/core/helpers/diff.py +10 -10
- bbot/core/helpers/dns/brute.py +18 -14
- bbot/core/helpers/dns/dns.py +16 -15
- bbot/core/helpers/dns/engine.py +159 -132
- bbot/core/helpers/dns/helpers.py +2 -2
- bbot/core/helpers/dns/mock.py +26 -8
- bbot/core/helpers/files.py +1 -1
- bbot/core/helpers/helper.py +7 -4
- bbot/core/helpers/interactsh.py +3 -3
- bbot/core/helpers/libmagic.py +65 -0
- bbot/core/helpers/misc.py +65 -22
- bbot/core/helpers/names_generator.py +17 -3
- bbot/core/helpers/process.py +0 -20
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +12 -6
- bbot/core/helpers/validators.py +1 -2
- bbot/core/helpers/web/client.py +1 -1
- bbot/core/helpers/web/engine.py +18 -13
- bbot/core/helpers/web/web.py +25 -116
- bbot/core/helpers/wordcloud.py +5 -5
- bbot/core/modules.py +36 -27
- bbot/core/multiprocess.py +58 -0
- bbot/core/shared_deps.py +46 -3
- bbot/db/sql/models.py +147 -0
- bbot/defaults.yml +15 -10
- bbot/errors.py +0 -8
- bbot/modules/anubisdb.py +2 -2
- bbot/modules/apkpure.py +63 -0
- bbot/modules/azure_tenant.py +2 -2
- bbot/modules/baddns.py +35 -19
- bbot/modules/baddns_direct.py +92 -0
- bbot/modules/baddns_zone.py +3 -8
- bbot/modules/badsecrets.py +4 -3
- bbot/modules/base.py +195 -51
- bbot/modules/bevigil.py +7 -7
- bbot/modules/binaryedge.py +7 -4
- bbot/modules/bufferoverrun.py +47 -0
- bbot/modules/builtwith.py +6 -10
- bbot/modules/bypass403.py +5 -5
- bbot/modules/c99.py +10 -7
- bbot/modules/censys.py +9 -13
- bbot/modules/certspotter.py +5 -3
- bbot/modules/chaos.py +9 -7
- bbot/modules/code_repository.py +1 -0
- bbot/modules/columbus.py +3 -3
- bbot/modules/crt.py +5 -3
- bbot/modules/deadly/dastardly.py +1 -1
- bbot/modules/deadly/ffuf.py +9 -9
- bbot/modules/deadly/nuclei.py +3 -3
- bbot/modules/deadly/vhost.py +4 -3
- bbot/modules/dehashed.py +1 -1
- bbot/modules/digitorus.py +1 -1
- bbot/modules/dnsbimi.py +145 -0
- bbot/modules/dnscaa.py +3 -3
- bbot/modules/dnsdumpster.py +4 -4
- bbot/modules/dnstlsrpt.py +144 -0
- bbot/modules/docker_pull.py +7 -5
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +18 -19
- bbot/modules/emailformat.py +1 -1
- bbot/modules/extractous.py +122 -0
- bbot/modules/filedownload.py +9 -7
- bbot/modules/fullhunt.py +7 -4
- bbot/modules/generic_ssrf.py +5 -5
- bbot/modules/github_codesearch.py +3 -2
- bbot/modules/github_org.py +4 -4
- bbot/modules/github_workflows.py +4 -4
- bbot/modules/gitlab.py +2 -5
- bbot/modules/google_playstore.py +93 -0
- bbot/modules/gowitness.py +48 -50
- bbot/modules/hackertarget.py +5 -3
- bbot/modules/host_header.py +5 -5
- bbot/modules/httpx.py +1 -4
- bbot/modules/hunterio.py +3 -9
- bbot/modules/iis_shortnames.py +19 -30
- bbot/modules/internal/cloudcheck.py +27 -12
- bbot/modules/internal/dnsresolve.py +250 -276
- bbot/modules/internal/excavate.py +100 -64
- bbot/modules/internal/speculate.py +42 -33
- bbot/modules/internetdb.py +4 -2
- bbot/modules/ip2location.py +3 -5
- bbot/modules/ipneighbor.py +1 -1
- bbot/modules/ipstack.py +3 -8
- bbot/modules/jadx.py +87 -0
- bbot/modules/leakix.py +11 -10
- bbot/modules/myssl.py +2 -2
- bbot/modules/newsletters.py +2 -2
- bbot/modules/otx.py +5 -3
- bbot/modules/output/asset_inventory.py +7 -7
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/csv.py +1 -2
- bbot/modules/output/http.py +20 -14
- bbot/modules/output/mysql.py +51 -0
- bbot/modules/output/neo4j.py +7 -2
- bbot/modules/output/postgres.py +49 -0
- bbot/modules/output/slack.py +0 -1
- bbot/modules/output/sqlite.py +29 -0
- bbot/modules/output/stdout.py +2 -2
- bbot/modules/output/teams.py +107 -6
- bbot/modules/paramminer_headers.py +5 -8
- bbot/modules/passivetotal.py +13 -13
- bbot/modules/portscan.py +32 -6
- bbot/modules/postman.py +50 -126
- bbot/modules/postman_download.py +220 -0
- bbot/modules/rapiddns.py +3 -8
- bbot/modules/report/asn.py +11 -11
- bbot/modules/robots.py +3 -3
- bbot/modules/securitytrails.py +7 -10
- bbot/modules/securitytxt.py +128 -0
- bbot/modules/shodan_dns.py +7 -9
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +2 -2
- bbot/modules/social.py +2 -1
- bbot/modules/subdomaincenter.py +1 -1
- bbot/modules/subdomainradar.py +160 -0
- bbot/modules/telerik.py +8 -8
- bbot/modules/templates/bucket.py +1 -1
- bbot/modules/templates/github.py +22 -14
- bbot/modules/templates/postman.py +21 -0
- bbot/modules/templates/shodan.py +14 -13
- bbot/modules/templates/sql.py +95 -0
- bbot/modules/templates/subdomain_enum.py +53 -17
- bbot/modules/templates/webhook.py +2 -4
- bbot/modules/trickest.py +8 -37
- bbot/modules/trufflehog.py +18 -3
- bbot/modules/url_manipulation.py +3 -3
- bbot/modules/urlscan.py +1 -1
- bbot/modules/viewdns.py +1 -1
- bbot/modules/virustotal.py +8 -30
- bbot/modules/wafw00f.py +1 -1
- bbot/modules/wayback.py +1 -1
- bbot/modules/wpscan.py +17 -11
- bbot/modules/zoomeye.py +11 -6
- bbot/presets/baddns-thorough.yml +12 -0
- bbot/presets/fast.yml +16 -0
- bbot/presets/kitchen-sink.yml +1 -0
- bbot/presets/spider.yml +4 -0
- bbot/presets/subdomain-enum.yml +7 -7
- bbot/scanner/manager.py +5 -16
- bbot/scanner/preset/args.py +44 -26
- bbot/scanner/preset/environ.py +7 -2
- bbot/scanner/preset/path.py +7 -4
- bbot/scanner/preset/preset.py +36 -23
- bbot/scanner/scanner.py +176 -63
- bbot/scanner/target.py +236 -434
- bbot/scripts/docs.py +1 -1
- bbot/test/bbot_fixtures.py +22 -3
- bbot/test/conftest.py +132 -100
- bbot/test/fastapi_test.py +17 -0
- bbot/test/owasp_mastg.apk +0 -0
- bbot/test/run_tests.sh +4 -4
- bbot/test/test.conf +2 -0
- bbot/test/test_step_1/test_bbot_fastapi.py +82 -0
- bbot/test/test_step_1/test_bloom_filter.py +2 -0
- bbot/test/test_step_1/test_cli.py +138 -64
- bbot/test/test_step_1/test_dns.py +392 -70
- bbot/test/test_step_1/test_engine.py +17 -17
- bbot/test/test_step_1/test_events.py +203 -37
- bbot/test/test_step_1/test_helpers.py +64 -28
- bbot/test/test_step_1/test_manager_deduplication.py +1 -1
- bbot/test/test_step_1/test_manager_scope_accuracy.py +336 -338
- bbot/test/test_step_1/test_modules_basic.py +69 -71
- bbot/test/test_step_1/test_presets.py +184 -96
- bbot/test/test_step_1/test_python_api.py +7 -2
- bbot/test/test_step_1/test_regexes.py +35 -5
- bbot/test/test_step_1/test_scan.py +39 -5
- bbot/test/test_step_1/test_scope.py +5 -4
- bbot/test/test_step_1/test_target.py +243 -145
- bbot/test/test_step_1/test_web.py +48 -10
- bbot/test/test_step_2/module_tests/base.py +17 -20
- bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
- bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
- bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
- bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
- bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
- bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
- bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
- bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
- bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
- bbot/test/test_step_2/module_tests/test_module_excavate.py +17 -37
- bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
- bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
- bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
- bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
- bbot/test/test_step_2/module_tests/test_module_json.py +24 -11
- bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
- bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
- bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
- bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
- bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
- bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
- bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
- bbot/test/test_step_2/module_tests/test_module_securitytxt.py +50 -0
- bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
- bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
- bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
- bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
- bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
- bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -11
- bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
- bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +135 -0
- {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/METADATA +48 -18
- bbot-2.3.0.5397rc0.dist-info/RECORD +421 -0
- {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/WHEEL +1 -1
- bbot/modules/unstructured.py +0 -163
- bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
- bbot-2.0.1.4654rc0.dist-info/RECORD +0 -385
- {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/LICENSE +0 -0
- {bbot-2.0.1.4654rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/entry_points.txt +0 -0
bbot/core/helpers/web/web.py
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
|
-
import re
|
|
2
1
|
import logging
|
|
3
2
|
import warnings
|
|
4
|
-
import traceback
|
|
5
3
|
from pathlib import Path
|
|
6
4
|
from bs4 import BeautifulSoup
|
|
7
5
|
|
|
@@ -21,7 +19,6 @@ log = logging.getLogger("bbot.core.helpers.web")
|
|
|
21
19
|
|
|
22
20
|
|
|
23
21
|
class WebHelper(EngineClient):
|
|
24
|
-
|
|
25
22
|
SERVER_CLASS = HTTPEngine
|
|
26
23
|
ERROR_CLASS = WebError
|
|
27
24
|
|
|
@@ -60,7 +57,7 @@ class WebHelper(EngineClient):
|
|
|
60
57
|
self.ssl_verify = self.config.get("ssl_verify", False)
|
|
61
58
|
engine_debug = self.config.get("engine", {}).get("debug", False)
|
|
62
59
|
super().__init__(
|
|
63
|
-
server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.
|
|
60
|
+
server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.minimal},
|
|
64
61
|
debug=engine_debug,
|
|
65
62
|
)
|
|
66
63
|
|
|
@@ -121,7 +118,16 @@ class WebHelper(EngineClient):
|
|
|
121
118
|
Note:
|
|
122
119
|
If the web request fails, it will return None unless `raise_error` is `True`.
|
|
123
120
|
"""
|
|
124
|
-
|
|
121
|
+
raise_error = kwargs.get("raise_error", False)
|
|
122
|
+
result = await self.run_and_return("request", *args, **kwargs)
|
|
123
|
+
if isinstance(result, dict) and "_request_error" in result:
|
|
124
|
+
if raise_error:
|
|
125
|
+
error_msg = result["_request_error"]
|
|
126
|
+
response = result["_response"]
|
|
127
|
+
error = self.ERROR_CLASS(error_msg)
|
|
128
|
+
error.response = response
|
|
129
|
+
raise error
|
|
130
|
+
return result
|
|
125
131
|
|
|
126
132
|
async def request_batch(self, urls, *args, **kwargs):
|
|
127
133
|
"""
|
|
@@ -199,6 +205,7 @@ class WebHelper(EngineClient):
|
|
|
199
205
|
>>> filepath = await self.helpers.download("https://www.evilcorp.com/passwords.docx", cache_hrs=24)
|
|
200
206
|
"""
|
|
201
207
|
success = False
|
|
208
|
+
raise_error = kwargs.get("raise_error", False)
|
|
202
209
|
filename = kwargs.pop("filename", self.parent_helper.cache_filename(url))
|
|
203
210
|
filename = truncate_filename(Path(filename).resolve())
|
|
204
211
|
kwargs["filename"] = filename
|
|
@@ -211,7 +218,16 @@ class WebHelper(EngineClient):
|
|
|
211
218
|
log.debug(f"{url} is cached at {self.parent_helper.cache_filename(url)}")
|
|
212
219
|
success = True
|
|
213
220
|
else:
|
|
214
|
-
|
|
221
|
+
result = await self.run_and_return("download", url, **kwargs)
|
|
222
|
+
if isinstance(result, dict) and "_download_error" in result:
|
|
223
|
+
if raise_error:
|
|
224
|
+
error_msg = result["_download_error"]
|
|
225
|
+
response = result["_response"]
|
|
226
|
+
error = self.ERROR_CLASS(error_msg)
|
|
227
|
+
error.response = response
|
|
228
|
+
raise error
|
|
229
|
+
elif result:
|
|
230
|
+
success = True
|
|
215
231
|
|
|
216
232
|
if success:
|
|
217
233
|
return filename
|
|
@@ -245,7 +261,7 @@ class WebHelper(EngineClient):
|
|
|
245
261
|
"""
|
|
246
262
|
if not path:
|
|
247
263
|
raise WordlistError(f"Invalid wordlist: {path}")
|
|
248
|
-
if
|
|
264
|
+
if "cache_hrs" not in kwargs:
|
|
249
265
|
kwargs["cache_hrs"] = 720
|
|
250
266
|
if self.parent_helper.is_url(path):
|
|
251
267
|
filename = await self.download(str(path), **kwargs)
|
|
@@ -269,66 +285,6 @@ class WebHelper(EngineClient):
|
|
|
269
285
|
f.write(line)
|
|
270
286
|
return truncated_filename
|
|
271
287
|
|
|
272
|
-
async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **requests_kwargs):
|
|
273
|
-
"""
|
|
274
|
-
An asynchronous generator function for iterating through paginated API data.
|
|
275
|
-
|
|
276
|
-
This function continuously makes requests to a specified API URL, incrementing the page number
|
|
277
|
-
or applying a custom pagination function, and yields the received data one page at a time.
|
|
278
|
-
It is well-suited for APIs that provide paginated results.
|
|
279
|
-
|
|
280
|
-
Args:
|
|
281
|
-
url (str): The initial API URL. Can contain placeholders for 'page', 'page_size', and 'offset'.
|
|
282
|
-
page_size (int, optional): The number of items per page. Defaults to 100.
|
|
283
|
-
json (bool, optional): If True, attempts to deserialize the response content to a JSON object. Defaults to True.
|
|
284
|
-
next_key (callable, optional): A function that takes the last page's data and returns the URL for the next page. Defaults to None.
|
|
285
|
-
**requests_kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function.
|
|
286
|
-
|
|
287
|
-
Yields:
|
|
288
|
-
dict or httpx.Response: If 'json' is True, yields a dictionary containing the parsed JSON data. Otherwise, yields the raw HTTP response.
|
|
289
|
-
|
|
290
|
-
Note:
|
|
291
|
-
The loop will continue indefinitely unless manually stopped. Make sure to break out of the loop once the last page has been received.
|
|
292
|
-
|
|
293
|
-
Examples:
|
|
294
|
-
>>> agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}')
|
|
295
|
-
>>> try:
|
|
296
|
-
>>> async for page in agen:
|
|
297
|
-
>>> subdomains = page["subdomains"]
|
|
298
|
-
>>> self.hugesuccess(subdomains)
|
|
299
|
-
>>> if not subdomains:
|
|
300
|
-
>>> break
|
|
301
|
-
>>> finally:
|
|
302
|
-
>>> agen.aclose()
|
|
303
|
-
"""
|
|
304
|
-
page = 1
|
|
305
|
-
offset = 0
|
|
306
|
-
result = None
|
|
307
|
-
while 1:
|
|
308
|
-
if result and callable(next_key):
|
|
309
|
-
try:
|
|
310
|
-
new_url = next_key(result)
|
|
311
|
-
except Exception as e:
|
|
312
|
-
log.debug(f"Failed to extract next page of results from {url}: {e}")
|
|
313
|
-
log.debug(traceback.format_exc())
|
|
314
|
-
else:
|
|
315
|
-
new_url = url.format(page=page, page_size=page_size, offset=offset)
|
|
316
|
-
result = await self.request(new_url, **requests_kwargs)
|
|
317
|
-
if result is None:
|
|
318
|
-
log.verbose(f"api_page_iter() got no response for {url}")
|
|
319
|
-
break
|
|
320
|
-
try:
|
|
321
|
-
if json:
|
|
322
|
-
result = result.json()
|
|
323
|
-
yield result
|
|
324
|
-
except Exception:
|
|
325
|
-
log.warning(f'Error in api_page_iter() for url: "{new_url}"')
|
|
326
|
-
log.trace(traceback.format_exc())
|
|
327
|
-
break
|
|
328
|
-
finally:
|
|
329
|
-
offset += page_size
|
|
330
|
-
page += 1
|
|
331
|
-
|
|
332
288
|
async def curl(self, *args, **kwargs):
|
|
333
289
|
"""
|
|
334
290
|
An asynchronous function that runs a cURL command with specified arguments and options.
|
|
@@ -394,7 +350,7 @@ class WebHelper(EngineClient):
|
|
|
394
350
|
headers[hk] = hv
|
|
395
351
|
|
|
396
352
|
# add the timeout
|
|
397
|
-
if
|
|
353
|
+
if "timeout" not in kwargs:
|
|
398
354
|
timeout = http_timeout
|
|
399
355
|
|
|
400
356
|
curl_command.append("-m")
|
|
@@ -495,7 +451,7 @@ class WebHelper(EngineClient):
|
|
|
495
451
|
Perform an html parse of the 'markup' argument and return a soup instance
|
|
496
452
|
|
|
497
453
|
>>> email_type = soup.find(type="email")
|
|
498
|
-
Searches the soup instance for all
|
|
454
|
+
Searches the soup instance for all occurrences of the passed in argument
|
|
499
455
|
"""
|
|
500
456
|
try:
|
|
501
457
|
soup = BeautifulSoup(
|
|
@@ -506,53 +462,6 @@ class WebHelper(EngineClient):
|
|
|
506
462
|
log.debug(f"Error parsing beautifulsoup: {e}")
|
|
507
463
|
return False
|
|
508
464
|
|
|
509
|
-
user_keywords = [re.compile(r, re.I) for r in ["user", "login", "email"]]
|
|
510
|
-
pass_keywords = [re.compile(r, re.I) for r in ["pass"]]
|
|
511
|
-
|
|
512
|
-
def is_login_page(self, html):
|
|
513
|
-
"""
|
|
514
|
-
TODO: convert this into an excavate YARA rule
|
|
515
|
-
|
|
516
|
-
Determines if the provided HTML content contains a login page.
|
|
517
|
-
|
|
518
|
-
This function parses the HTML to search for forms with input fields typically used for
|
|
519
|
-
authentication. If it identifies password fields or a combination of username and password
|
|
520
|
-
fields, it returns True.
|
|
521
|
-
|
|
522
|
-
Args:
|
|
523
|
-
html (str): The HTML content to analyze.
|
|
524
|
-
|
|
525
|
-
Returns:
|
|
526
|
-
bool: True if the HTML contains a login page, otherwise False.
|
|
527
|
-
|
|
528
|
-
Examples:
|
|
529
|
-
>>> is_login_page('<form><input type="text" name="username"><input type="password" name="password"></form>')
|
|
530
|
-
True
|
|
531
|
-
|
|
532
|
-
>>> is_login_page('<form><input type="text" name="search"></form>')
|
|
533
|
-
False
|
|
534
|
-
"""
|
|
535
|
-
try:
|
|
536
|
-
soup = BeautifulSoup(html, "html.parser")
|
|
537
|
-
except Exception as e:
|
|
538
|
-
log.debug(f"Error parsing html: {e}")
|
|
539
|
-
return False
|
|
540
|
-
|
|
541
|
-
forms = soup.find_all("form")
|
|
542
|
-
|
|
543
|
-
# first, check for obvious password fields
|
|
544
|
-
for form in forms:
|
|
545
|
-
if form.find_all("input", {"type": "password"}):
|
|
546
|
-
return True
|
|
547
|
-
|
|
548
|
-
# next, check for forms that have both a user-like and password-like field
|
|
549
|
-
for form in forms:
|
|
550
|
-
user_fields = sum(bool(form.find_all("input", {"name": r})) for r in self.user_keywords)
|
|
551
|
-
pass_fields = sum(bool(form.find_all("input", {"name": r})) for r in self.pass_keywords)
|
|
552
|
-
if user_fields and pass_fields:
|
|
553
|
-
return True
|
|
554
|
-
return False
|
|
555
|
-
|
|
556
465
|
def response_to_json(self, response):
|
|
557
466
|
"""
|
|
558
467
|
Convert web response to JSON object, similar to the output of `httpx -irr -json`
|
bbot/core/helpers/wordcloud.py
CHANGED
|
@@ -111,7 +111,7 @@ class WordCloud(dict):
|
|
|
111
111
|
results = set()
|
|
112
112
|
for word in words:
|
|
113
113
|
h = hash(word)
|
|
114
|
-
if not
|
|
114
|
+
if h not in results:
|
|
115
115
|
results.add(h)
|
|
116
116
|
yield (word,)
|
|
117
117
|
if numbers > 0:
|
|
@@ -119,7 +119,7 @@ class WordCloud(dict):
|
|
|
119
119
|
for word in words:
|
|
120
120
|
for number_mutation in self.get_number_mutations(word, n=numbers, padding=number_padding):
|
|
121
121
|
h = hash(number_mutation)
|
|
122
|
-
if not
|
|
122
|
+
if h not in results:
|
|
123
123
|
results.add(h)
|
|
124
124
|
yield (number_mutation,)
|
|
125
125
|
for word in words:
|
|
@@ -322,7 +322,7 @@ class WordCloud(dict):
|
|
|
322
322
|
|
|
323
323
|
@property
|
|
324
324
|
def default_filename(self):
|
|
325
|
-
return self.parent_helper.preset.scan.home /
|
|
325
|
+
return self.parent_helper.preset.scan.home / "wordcloud.tsv"
|
|
326
326
|
|
|
327
327
|
def save(self, filename=None, limit=None):
|
|
328
328
|
"""
|
|
@@ -357,7 +357,7 @@ class WordCloud(dict):
|
|
|
357
357
|
log.debug(f"Saved word cloud ({len(self):,} words) to {filename}")
|
|
358
358
|
return True, filename
|
|
359
359
|
else:
|
|
360
|
-
log.debug(
|
|
360
|
+
log.debug("No words to save")
|
|
361
361
|
except Exception as e:
|
|
362
362
|
import traceback
|
|
363
363
|
|
|
@@ -421,7 +421,7 @@ class Mutator(dict):
|
|
|
421
421
|
def mutate(self, word, max_mutations=None, mutations=None):
|
|
422
422
|
if mutations is None:
|
|
423
423
|
mutations = self.top_mutations(max_mutations)
|
|
424
|
-
for mutation
|
|
424
|
+
for mutation in mutations.keys():
|
|
425
425
|
ret = []
|
|
426
426
|
for s in mutation:
|
|
427
427
|
if s is not None:
|
bbot/core/modules.py
CHANGED
|
@@ -153,7 +153,7 @@ class ModuleLoader:
|
|
|
153
153
|
else:
|
|
154
154
|
log.debug(f"Preloading {module_name} from disk")
|
|
155
155
|
if module_dir.name == "modules":
|
|
156
|
-
namespace =
|
|
156
|
+
namespace = "bbot.modules"
|
|
157
157
|
else:
|
|
158
158
|
namespace = f"bbot.modules.{module_dir.name}"
|
|
159
159
|
try:
|
|
@@ -235,7 +235,7 @@ class ModuleLoader:
|
|
|
235
235
|
return self.__preloaded
|
|
236
236
|
|
|
237
237
|
def get_recursive_dirs(self, *dirs):
|
|
238
|
-
dirs =
|
|
238
|
+
dirs = {Path(d).resolve() for d in dirs}
|
|
239
239
|
for d in list(dirs):
|
|
240
240
|
if not d.is_dir():
|
|
241
241
|
continue
|
|
@@ -337,77 +337,77 @@ class ModuleLoader:
|
|
|
337
337
|
# look for classes
|
|
338
338
|
if type(root_element) == ast.ClassDef:
|
|
339
339
|
for class_attr in root_element.body:
|
|
340
|
-
|
|
341
340
|
# class attributes that are dictionaries
|
|
342
341
|
if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict:
|
|
343
342
|
# module options
|
|
344
|
-
if any(
|
|
343
|
+
if any(target.id == "options" for target in class_attr.targets):
|
|
345
344
|
config.update(ast.literal_eval(class_attr.value))
|
|
346
345
|
# module options
|
|
347
|
-
elif any(
|
|
346
|
+
elif any(target.id == "options_desc" for target in class_attr.targets):
|
|
348
347
|
options_desc.update(ast.literal_eval(class_attr.value))
|
|
349
348
|
# module metadata
|
|
350
|
-
elif any(
|
|
349
|
+
elif any(target.id == "meta" for target in class_attr.targets):
|
|
351
350
|
meta = ast.literal_eval(class_attr.value)
|
|
352
351
|
|
|
353
352
|
# class attributes that are lists
|
|
354
353
|
if type(class_attr) == ast.Assign and type(class_attr.value) == ast.List:
|
|
355
354
|
# flags
|
|
356
|
-
if any(
|
|
355
|
+
if any(target.id == "flags" for target in class_attr.targets):
|
|
357
356
|
for flag in class_attr.value.elts:
|
|
358
357
|
if type(flag.value) == str:
|
|
359
358
|
flags.add(flag.value)
|
|
360
359
|
# watched events
|
|
361
|
-
elif any(
|
|
360
|
+
elif any(target.id == "watched_events" for target in class_attr.targets):
|
|
362
361
|
for event_type in class_attr.value.elts:
|
|
363
362
|
if type(event_type.value) == str:
|
|
364
363
|
watched_events.add(event_type.value)
|
|
365
364
|
# produced events
|
|
366
|
-
elif any(
|
|
365
|
+
elif any(target.id == "produced_events" for target in class_attr.targets):
|
|
367
366
|
for event_type in class_attr.value.elts:
|
|
368
367
|
if type(event_type.value) == str:
|
|
369
368
|
produced_events.add(event_type.value)
|
|
370
369
|
|
|
371
370
|
# bbot module dependencies
|
|
372
|
-
elif any(
|
|
371
|
+
elif any(target.id == "deps_modules" for target in class_attr.targets):
|
|
373
372
|
for dep_module in class_attr.value.elts:
|
|
374
373
|
if type(dep_module.value) == str:
|
|
375
374
|
deps_modules.add(dep_module.value)
|
|
376
375
|
# python dependencies
|
|
377
|
-
elif any(
|
|
376
|
+
elif any(target.id == "deps_pip" for target in class_attr.targets):
|
|
378
377
|
for dep_pip in class_attr.value.elts:
|
|
379
378
|
if type(dep_pip.value) == str:
|
|
380
379
|
deps_pip.append(dep_pip.value)
|
|
381
|
-
elif any(
|
|
380
|
+
elif any(target.id == "deps_pip_constraints" for target in class_attr.targets):
|
|
382
381
|
for dep_pip in class_attr.value.elts:
|
|
383
382
|
if type(dep_pip.value) == str:
|
|
384
383
|
deps_pip_constraints.append(dep_pip.value)
|
|
385
384
|
# apt dependencies
|
|
386
|
-
elif any(
|
|
385
|
+
elif any(target.id == "deps_apt" for target in class_attr.targets):
|
|
387
386
|
for dep_apt in class_attr.value.elts:
|
|
388
387
|
if type(dep_apt.value) == str:
|
|
389
388
|
deps_apt.append(dep_apt.value)
|
|
390
389
|
# bash dependencies
|
|
391
|
-
elif any(
|
|
390
|
+
elif any(target.id == "deps_shell" for target in class_attr.targets):
|
|
392
391
|
for dep_shell in class_attr.value.elts:
|
|
393
392
|
deps_shell.append(ast.literal_eval(dep_shell))
|
|
394
393
|
# ansible playbook
|
|
395
|
-
elif any(
|
|
394
|
+
elif any(target.id == "deps_ansible" for target in class_attr.targets):
|
|
396
395
|
ansible_tasks = ast.literal_eval(class_attr.value)
|
|
397
396
|
# shared/common module dependencies
|
|
398
|
-
elif any(
|
|
397
|
+
elif any(target.id == "deps_common" for target in class_attr.targets):
|
|
399
398
|
for dep_common in class_attr.value.elts:
|
|
400
399
|
if type(dep_common.value) == str:
|
|
401
400
|
deps_common.append(dep_common.value)
|
|
402
401
|
|
|
403
402
|
for task in ansible_tasks:
|
|
404
|
-
if
|
|
403
|
+
if "become" not in task:
|
|
405
404
|
task["become"] = False
|
|
406
405
|
# don't sudo brew
|
|
407
|
-
elif os_platform() == "darwin" and ("package" in task and task.get("become", False)
|
|
406
|
+
elif os_platform() == "darwin" and ("package" in task and task.get("become", False) is True):
|
|
408
407
|
task["become"] = False
|
|
409
408
|
|
|
410
409
|
preloaded_data = {
|
|
410
|
+
"path": str(module_file.resolve()),
|
|
411
411
|
"watched_events": sorted(watched_events),
|
|
412
412
|
"produced_events": sorted(produced_events),
|
|
413
413
|
"flags": sorted(flags),
|
|
@@ -436,8 +436,8 @@ class ModuleLoader:
|
|
|
436
436
|
f'Error while preloading module "{module_file}": No shared dependency named "{dep_common}" (choices: {common_choices})'
|
|
437
437
|
)
|
|
438
438
|
for ansible_task in ansible_task_list:
|
|
439
|
-
if any(x
|
|
440
|
-
x
|
|
439
|
+
if any(x is True for x in search_dict_by_key("become", ansible_task)) or any(
|
|
440
|
+
x is True for x in search_dict_by_key("ansible_become", ansible_tasks)
|
|
441
441
|
):
|
|
442
442
|
preloaded_data["sudo"] = True
|
|
443
443
|
return preloaded_data
|
|
@@ -467,14 +467,23 @@ class ModuleLoader:
|
|
|
467
467
|
>>> isinstance(module, object)
|
|
468
468
|
True
|
|
469
469
|
"""
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
470
|
+
preloaded = self._preloaded[module_name]
|
|
471
|
+
namespace = preloaded["namespace"]
|
|
472
|
+
try:
|
|
473
|
+
module_path = preloaded["path"]
|
|
474
|
+
except KeyError:
|
|
475
|
+
module_path = preloaded["cache_key"][0]
|
|
476
|
+
full_namespace = f"{namespace}.{module_name}"
|
|
477
|
+
|
|
478
|
+
spec = importlib.util.spec_from_file_location(full_namespace, module_path)
|
|
479
|
+
module = importlib.util.module_from_spec(spec)
|
|
480
|
+
sys.modules[full_namespace] = module
|
|
481
|
+
spec.loader.exec_module(module)
|
|
473
482
|
|
|
474
483
|
# for every top-level variable in the .py file
|
|
475
|
-
for variable in
|
|
484
|
+
for variable in module.__dict__.keys():
|
|
476
485
|
# get its value
|
|
477
|
-
value = getattr(
|
|
486
|
+
value = getattr(module, variable)
|
|
478
487
|
with suppress(AttributeError):
|
|
479
488
|
# if it has watched_events and produced_events
|
|
480
489
|
if all(
|
|
@@ -531,7 +540,7 @@ class ModuleLoader:
|
|
|
531
540
|
with suppress(KeyError):
|
|
532
541
|
choices.remove(modname)
|
|
533
542
|
if event_type not in resolve_choices:
|
|
534
|
-
resolve_choices[event_type] =
|
|
543
|
+
resolve_choices[event_type] = {}
|
|
535
544
|
deps = resolve_choices[event_type]
|
|
536
545
|
self.add_or_create(deps, "required_by", modname)
|
|
537
546
|
for c in choices:
|
|
@@ -630,7 +639,7 @@ class ModuleLoader:
|
|
|
630
639
|
def modules_options_table(self, modules=None, mod_type=None):
|
|
631
640
|
table = []
|
|
632
641
|
header = ["Config Option", "Type", "Description", "Default"]
|
|
633
|
-
for
|
|
642
|
+
for module_options in self.modules_options(modules, mod_type).values():
|
|
634
643
|
table += module_options
|
|
635
644
|
return make_table(table, header)
|
|
636
645
|
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import atexit
|
|
3
|
+
from contextlib import suppress
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class SharedInterpreterState:
|
|
7
|
+
"""
|
|
8
|
+
A class to track the primary BBOT process.
|
|
9
|
+
|
|
10
|
+
Used to prevent spawning multiple unwanted processes with multiprocessing.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.main_process_var_name = "_BBOT_MAIN_PID"
|
|
15
|
+
self.scan_process_var_name = "_BBOT_SCAN_PID"
|
|
16
|
+
atexit.register(self.cleanup)
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def is_main_process(self):
|
|
20
|
+
is_main_process = self.main_pid == os.getpid()
|
|
21
|
+
return is_main_process
|
|
22
|
+
|
|
23
|
+
@property
|
|
24
|
+
def is_scan_process(self):
|
|
25
|
+
is_scan_process = os.getpid() == self.scan_pid
|
|
26
|
+
return is_scan_process
|
|
27
|
+
|
|
28
|
+
@property
|
|
29
|
+
def main_pid(self):
|
|
30
|
+
main_pid = int(os.environ.get(self.main_process_var_name, 0))
|
|
31
|
+
if main_pid == 0:
|
|
32
|
+
main_pid = os.getpid()
|
|
33
|
+
# if main PID is not set, set it to the current PID
|
|
34
|
+
os.environ[self.main_process_var_name] = str(main_pid)
|
|
35
|
+
return main_pid
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def scan_pid(self):
|
|
39
|
+
scan_pid = int(os.environ.get(self.scan_process_var_name, 0))
|
|
40
|
+
if scan_pid == 0:
|
|
41
|
+
scan_pid = os.getpid()
|
|
42
|
+
# if scan PID is not set, set it to the current PID
|
|
43
|
+
os.environ[self.scan_process_var_name] = str(scan_pid)
|
|
44
|
+
return scan_pid
|
|
45
|
+
|
|
46
|
+
def update_scan_pid(self):
|
|
47
|
+
os.environ[self.scan_process_var_name] = str(os.getpid())
|
|
48
|
+
|
|
49
|
+
def cleanup(self):
|
|
50
|
+
with suppress(Exception):
|
|
51
|
+
if self.is_main_process:
|
|
52
|
+
with suppress(KeyError):
|
|
53
|
+
del os.environ[self.main_process_var_name]
|
|
54
|
+
with suppress(KeyError):
|
|
55
|
+
del os.environ[self.scan_process_var_name]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
SHARED_INTERPRETER_STATE = SharedInterpreterState()
|
bbot/core/shared_deps.py
CHANGED
|
@@ -79,13 +79,23 @@ DEP_CHROMIUM = [
|
|
|
79
79
|
"ignore_errors": True,
|
|
80
80
|
},
|
|
81
81
|
{
|
|
82
|
-
"name": "Install Chromium dependencies (
|
|
82
|
+
"name": "Install Chromium dependencies (Ubuntu 24.04)",
|
|
83
83
|
"package": {
|
|
84
|
-
"name": "
|
|
84
|
+
"name": "libasound2t64,libatk-bridge2.0-0,libatk1.0-0,libcairo2,libcups2,libdrm2,libgbm1,libnss3,libpango-1.0-0,libglib2.0-0,libxcomposite1,libxdamage1,libxfixes3,libxkbcommon0,libxrandr2",
|
|
85
85
|
"state": "present",
|
|
86
86
|
},
|
|
87
87
|
"become": True,
|
|
88
|
-
"when": "ansible_facts['
|
|
88
|
+
"when": "ansible_facts['distribution'] == 'Ubuntu' and ansible_facts['distribution_version'] == '24.04'",
|
|
89
|
+
"ignore_errors": True,
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
"name": "Install Chromium dependencies (Other Debian-based)",
|
|
93
|
+
"package": {
|
|
94
|
+
"name": "libasound2,libatk-bridge2.0-0,libatk1.0-0,libcairo2,libcups2,libdrm2,libgbm1,libnss3,libpango-1.0-0,libglib2.0-0,libxcomposite1,libxdamage1,libxfixes3,libxkbcommon0,libxrandr2",
|
|
95
|
+
"state": "present",
|
|
96
|
+
},
|
|
97
|
+
"become": True,
|
|
98
|
+
"when": "ansible_facts['os_family'] == 'Debian' and not (ansible_facts['distribution'] == 'Ubuntu' and ansible_facts['distribution_version'] == '24.04')",
|
|
89
99
|
"ignore_errors": True,
|
|
90
100
|
},
|
|
91
101
|
{
|
|
@@ -149,6 +159,39 @@ DEP_MASSCAN = [
|
|
|
149
159
|
},
|
|
150
160
|
]
|
|
151
161
|
|
|
162
|
+
DEP_JAVA = [
|
|
163
|
+
{
|
|
164
|
+
"name": "Check if Java is installed",
|
|
165
|
+
"command": "which java",
|
|
166
|
+
"register": "java_installed",
|
|
167
|
+
"ignore_errors": True,
|
|
168
|
+
},
|
|
169
|
+
{
|
|
170
|
+
"name": "Install latest JRE (Debian)",
|
|
171
|
+
"package": {"name": ["default-jre"], "state": "present"},
|
|
172
|
+
"become": True,
|
|
173
|
+
"when": "ansible_facts['os_family'] == 'Debian' and java_installed.rc != 0",
|
|
174
|
+
},
|
|
175
|
+
{
|
|
176
|
+
"name": "Install latest JRE (Arch)",
|
|
177
|
+
"package": {"name": ["jre-openjdk"], "state": "present"},
|
|
178
|
+
"become": True,
|
|
179
|
+
"when": "ansible_facts['os_family'] == 'Archlinux' and java_installed.rc != 0",
|
|
180
|
+
},
|
|
181
|
+
{
|
|
182
|
+
"name": "Install latest JRE (Fedora)",
|
|
183
|
+
"package": {"name": ["which", "java-latest-openjdk-headless"], "state": "present"},
|
|
184
|
+
"become": True,
|
|
185
|
+
"when": "ansible_facts['os_family'] == 'RedHat' and java_installed.rc != 0",
|
|
186
|
+
},
|
|
187
|
+
{
|
|
188
|
+
"name": "Install latest JRE (Alpine)",
|
|
189
|
+
"package": {"name": ["openjdk11"], "state": "present"},
|
|
190
|
+
"become": True,
|
|
191
|
+
"when": "ansible_facts['os_family'] == 'Alpine' and java_installed.rc != 0",
|
|
192
|
+
},
|
|
193
|
+
]
|
|
194
|
+
|
|
152
195
|
# shared module dependencies -- ffuf, massdns, chromium, etc.
|
|
153
196
|
SHARED_DEPS = {}
|
|
154
197
|
for var, val in list(locals().items()):
|