bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5397rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +3 -7
- bbot/core/config/files.py +0 -1
- bbot/core/config/logger.py +34 -4
- bbot/core/core.py +21 -4
- bbot/core/engine.py +9 -8
- bbot/core/event/base.py +131 -52
- bbot/core/helpers/bloom.py +10 -3
- bbot/core/helpers/command.py +8 -7
- bbot/core/helpers/depsinstaller/installer.py +31 -13
- bbot/core/helpers/diff.py +10 -10
- bbot/core/helpers/dns/brute.py +7 -4
- bbot/core/helpers/dns/dns.py +1 -2
- bbot/core/helpers/dns/engine.py +4 -6
- bbot/core/helpers/dns/helpers.py +2 -2
- bbot/core/helpers/dns/mock.py +0 -1
- bbot/core/helpers/files.py +1 -1
- bbot/core/helpers/helper.py +7 -4
- bbot/core/helpers/interactsh.py +3 -3
- bbot/core/helpers/libmagic.py +65 -0
- bbot/core/helpers/misc.py +65 -22
- bbot/core/helpers/names_generator.py +17 -3
- bbot/core/helpers/process.py +0 -20
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +12 -6
- bbot/core/helpers/validators.py +1 -2
- bbot/core/helpers/web/client.py +1 -1
- bbot/core/helpers/web/engine.py +1 -2
- bbot/core/helpers/web/web.py +4 -114
- bbot/core/helpers/wordcloud.py +5 -5
- bbot/core/modules.py +36 -27
- bbot/core/multiprocess.py +58 -0
- bbot/core/shared_deps.py +46 -3
- bbot/db/sql/models.py +147 -0
- bbot/defaults.yml +11 -9
- bbot/modules/anubisdb.py +2 -2
- bbot/modules/apkpure.py +63 -0
- bbot/modules/azure_tenant.py +2 -2
- bbot/modules/baddns.py +35 -19
- bbot/modules/baddns_direct.py +92 -0
- bbot/modules/baddns_zone.py +3 -8
- bbot/modules/badsecrets.py +4 -3
- bbot/modules/base.py +195 -51
- bbot/modules/bevigil.py +7 -7
- bbot/modules/binaryedge.py +7 -4
- bbot/modules/bufferoverrun.py +47 -0
- bbot/modules/builtwith.py +6 -10
- bbot/modules/bypass403.py +5 -5
- bbot/modules/c99.py +10 -7
- bbot/modules/censys.py +9 -13
- bbot/modules/certspotter.py +5 -3
- bbot/modules/chaos.py +9 -7
- bbot/modules/code_repository.py +1 -0
- bbot/modules/columbus.py +3 -3
- bbot/modules/crt.py +5 -3
- bbot/modules/deadly/dastardly.py +1 -1
- bbot/modules/deadly/ffuf.py +9 -9
- bbot/modules/deadly/nuclei.py +3 -3
- bbot/modules/deadly/vhost.py +4 -3
- bbot/modules/dehashed.py +1 -1
- bbot/modules/digitorus.py +1 -1
- bbot/modules/dnsbimi.py +145 -0
- bbot/modules/dnscaa.py +3 -3
- bbot/modules/dnsdumpster.py +4 -4
- bbot/modules/dnstlsrpt.py +144 -0
- bbot/modules/docker_pull.py +7 -5
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +18 -19
- bbot/modules/emailformat.py +1 -1
- bbot/modules/extractous.py +122 -0
- bbot/modules/filedownload.py +9 -7
- bbot/modules/fullhunt.py +7 -4
- bbot/modules/generic_ssrf.py +5 -5
- bbot/modules/github_codesearch.py +3 -2
- bbot/modules/github_org.py +4 -4
- bbot/modules/github_workflows.py +4 -4
- bbot/modules/gitlab.py +2 -5
- bbot/modules/google_playstore.py +93 -0
- bbot/modules/gowitness.py +48 -50
- bbot/modules/hackertarget.py +5 -3
- bbot/modules/host_header.py +5 -5
- bbot/modules/httpx.py +1 -4
- bbot/modules/hunterio.py +3 -9
- bbot/modules/iis_shortnames.py +19 -30
- bbot/modules/internal/cloudcheck.py +27 -12
- bbot/modules/internal/dnsresolve.py +22 -20
- bbot/modules/internal/excavate.py +85 -48
- bbot/modules/internal/speculate.py +41 -32
- bbot/modules/internetdb.py +4 -2
- bbot/modules/ip2location.py +3 -5
- bbot/modules/ipneighbor.py +1 -1
- bbot/modules/ipstack.py +3 -8
- bbot/modules/jadx.py +87 -0
- bbot/modules/leakix.py +11 -10
- bbot/modules/myssl.py +2 -2
- bbot/modules/newsletters.py +2 -2
- bbot/modules/otx.py +5 -3
- bbot/modules/output/asset_inventory.py +7 -7
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/csv.py +1 -1
- bbot/modules/output/http.py +20 -14
- bbot/modules/output/mysql.py +51 -0
- bbot/modules/output/neo4j.py +7 -2
- bbot/modules/output/postgres.py +49 -0
- bbot/modules/output/slack.py +0 -1
- bbot/modules/output/sqlite.py +29 -0
- bbot/modules/output/stdout.py +2 -2
- bbot/modules/output/teams.py +107 -6
- bbot/modules/paramminer_headers.py +5 -8
- bbot/modules/passivetotal.py +13 -13
- bbot/modules/portscan.py +32 -6
- bbot/modules/postman.py +50 -126
- bbot/modules/postman_download.py +220 -0
- bbot/modules/rapiddns.py +3 -8
- bbot/modules/report/asn.py +11 -11
- bbot/modules/robots.py +3 -3
- bbot/modules/securitytrails.py +7 -10
- bbot/modules/securitytxt.py +1 -1
- bbot/modules/shodan_dns.py +7 -9
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +2 -2
- bbot/modules/social.py +2 -1
- bbot/modules/subdomaincenter.py +1 -1
- bbot/modules/subdomainradar.py +160 -0
- bbot/modules/telerik.py +8 -8
- bbot/modules/templates/bucket.py +1 -1
- bbot/modules/templates/github.py +22 -14
- bbot/modules/templates/postman.py +21 -0
- bbot/modules/templates/shodan.py +14 -13
- bbot/modules/templates/sql.py +95 -0
- bbot/modules/templates/subdomain_enum.py +51 -16
- bbot/modules/templates/webhook.py +2 -4
- bbot/modules/trickest.py +8 -37
- bbot/modules/trufflehog.py +10 -12
- bbot/modules/url_manipulation.py +3 -3
- bbot/modules/urlscan.py +1 -1
- bbot/modules/viewdns.py +1 -1
- bbot/modules/virustotal.py +8 -30
- bbot/modules/wafw00f.py +1 -1
- bbot/modules/wayback.py +1 -1
- bbot/modules/wpscan.py +17 -11
- bbot/modules/zoomeye.py +11 -6
- bbot/presets/baddns-thorough.yml +12 -0
- bbot/presets/fast.yml +16 -0
- bbot/presets/kitchen-sink.yml +1 -0
- bbot/presets/spider.yml +4 -0
- bbot/presets/subdomain-enum.yml +7 -7
- bbot/scanner/manager.py +5 -16
- bbot/scanner/preset/args.py +44 -26
- bbot/scanner/preset/environ.py +7 -2
- bbot/scanner/preset/path.py +7 -4
- bbot/scanner/preset/preset.py +36 -23
- bbot/scanner/scanner.py +172 -62
- bbot/scanner/target.py +236 -434
- bbot/scripts/docs.py +1 -1
- bbot/test/bbot_fixtures.py +13 -3
- bbot/test/conftest.py +132 -100
- bbot/test/fastapi_test.py +17 -0
- bbot/test/owasp_mastg.apk +0 -0
- bbot/test/run_tests.sh +4 -4
- bbot/test/test.conf +2 -0
- bbot/test/test_step_1/test_bbot_fastapi.py +82 -0
- bbot/test/test_step_1/test_bloom_filter.py +2 -0
- bbot/test/test_step_1/test_cli.py +138 -64
- bbot/test/test_step_1/test_dns.py +62 -25
- bbot/test/test_step_1/test_engine.py +17 -17
- bbot/test/test_step_1/test_events.py +183 -28
- bbot/test/test_step_1/test_helpers.py +64 -28
- bbot/test/test_step_1/test_manager_deduplication.py +1 -1
- bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
- bbot/test/test_step_1/test_modules_basic.py +68 -70
- bbot/test/test_step_1/test_presets.py +184 -96
- bbot/test/test_step_1/test_python_api.py +7 -2
- bbot/test/test_step_1/test_regexes.py +35 -5
- bbot/test/test_step_1/test_scan.py +39 -5
- bbot/test/test_step_1/test_scope.py +4 -3
- bbot/test/test_step_1/test_target.py +243 -145
- bbot/test/test_step_1/test_web.py +14 -8
- bbot/test/test_step_2/module_tests/base.py +15 -7
- bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
- bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
- bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
- bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
- bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
- bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
- bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
- bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
- bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
- bbot/test/test_step_2/module_tests/test_module_excavate.py +17 -37
- bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
- bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
- bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
- bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
- bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
- bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
- bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
- bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
- bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
- bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
- bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
- bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
- bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
- bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
- bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
- bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
- bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
- bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
- bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
- bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/METADATA +48 -18
- bbot-2.3.0.5397rc0.dist-info/RECORD +421 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/WHEEL +1 -1
- bbot/modules/unstructured.py +0 -163
- bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
- bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/LICENSE +0 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
from bbot.modules.base import BaseModule
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class google_playstore(BaseModule):
|
|
5
|
+
watched_events = ["ORG_STUB", "CODE_REPOSITORY"]
|
|
6
|
+
produced_events = ["MOBILE_APP"]
|
|
7
|
+
flags = ["passive", "safe", "code-enum"]
|
|
8
|
+
meta = {
|
|
9
|
+
"description": "Search for android applications on play.google.com",
|
|
10
|
+
"created_date": "2024-10-08",
|
|
11
|
+
"author": "@domwhewell-sage",
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
base_url = "https://play.google.com"
|
|
15
|
+
|
|
16
|
+
async def setup(self):
|
|
17
|
+
self.app_link_regex = self.helpers.re.compile(r"/store/apps/details\?id=([a-zA-Z0-9._-]+)")
|
|
18
|
+
return True
|
|
19
|
+
|
|
20
|
+
async def filter_event(self, event):
|
|
21
|
+
if event.type == "CODE_REPOSITORY":
|
|
22
|
+
if "android" not in event.tags:
|
|
23
|
+
return False, "event is not an android repository"
|
|
24
|
+
return True
|
|
25
|
+
|
|
26
|
+
async def handle_event(self, event):
|
|
27
|
+
if event.type == "CODE_REPOSITORY":
|
|
28
|
+
await self.handle_url(event)
|
|
29
|
+
elif event.type == "ORG_STUB":
|
|
30
|
+
await self.handle_org_stub(event)
|
|
31
|
+
|
|
32
|
+
async def handle_url(self, event):
|
|
33
|
+
repo_url = event.data.get("url")
|
|
34
|
+
app_id = repo_url.split("id=")[1].split("&")[0]
|
|
35
|
+
await self.emit_event(
|
|
36
|
+
{"id": app_id, "url": repo_url},
|
|
37
|
+
"MOBILE_APP",
|
|
38
|
+
tags="android",
|
|
39
|
+
parent=event,
|
|
40
|
+
context=f'{{module}} extracted the mobile app name "{app_id}" from: {repo_url}',
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
async def handle_org_stub(self, event):
|
|
44
|
+
org_name = event.data
|
|
45
|
+
self.verbose(f"Searching for any android applications for {org_name}")
|
|
46
|
+
for apk_name in await self.query(org_name):
|
|
47
|
+
valid_apk = await self.validate_apk(apk_name)
|
|
48
|
+
if valid_apk:
|
|
49
|
+
self.verbose(f"Got {apk_name} from playstore")
|
|
50
|
+
await self.emit_event(
|
|
51
|
+
{"id": apk_name, "url": f"{self.base_url}/store/apps/details?id={apk_name}"},
|
|
52
|
+
"MOBILE_APP",
|
|
53
|
+
tags="android",
|
|
54
|
+
parent=event,
|
|
55
|
+
context=f'{{module}} searched play.google.com for apps belonging to "{org_name}" and found "{apk_name}" to be in scope',
|
|
56
|
+
)
|
|
57
|
+
else:
|
|
58
|
+
self.debug(f"Got {apk_name} from playstore app details does not contain any in-scope URLs or Emails")
|
|
59
|
+
|
|
60
|
+
async def query(self, query):
|
|
61
|
+
app_links = []
|
|
62
|
+
url = f"{self.base_url}/store/search?q={self.helpers.quote(query)}&c=apps"
|
|
63
|
+
r = await self.helpers.request(url)
|
|
64
|
+
if r is None:
|
|
65
|
+
return app_links
|
|
66
|
+
status_code = getattr(r, "status_code", 0)
|
|
67
|
+
try:
|
|
68
|
+
html_content = r.content.decode("utf-8")
|
|
69
|
+
# Use regex to find all app links
|
|
70
|
+
app_links = await self.helpers.re.findall(self.app_link_regex, html_content)
|
|
71
|
+
except Exception as e:
|
|
72
|
+
self.warning(f"Failed to parse html response from {r.url} (HTTP status: {status_code}): {e}")
|
|
73
|
+
return app_links
|
|
74
|
+
return app_links
|
|
75
|
+
|
|
76
|
+
async def validate_apk(self, apk_name):
|
|
77
|
+
"""
|
|
78
|
+
Check the app details page the "App support" section will include URLs or Emails to the app developer
|
|
79
|
+
"""
|
|
80
|
+
in_scope = False
|
|
81
|
+
url = f"{self.base_url}/store/apps/details?id={apk_name}"
|
|
82
|
+
r = await self.helpers.request(url)
|
|
83
|
+
if r is None:
|
|
84
|
+
return in_scope
|
|
85
|
+
status_code = getattr(r, "status_code", 0)
|
|
86
|
+
if status_code == 200:
|
|
87
|
+
html = r.text
|
|
88
|
+
in_scope_hosts = await self.scan.extract_in_scope_hostnames(html)
|
|
89
|
+
if in_scope_hosts:
|
|
90
|
+
in_scope = True
|
|
91
|
+
else:
|
|
92
|
+
self.warning(f"Failed to fetch {url} (HTTP status: {status_code})")
|
|
93
|
+
return in_scope
|
bbot/modules/gowitness.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import
|
|
2
|
+
import aiosqlite
|
|
3
3
|
import multiprocessing
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from contextlib import suppress
|
|
@@ -34,6 +34,7 @@ class gowitness(BaseModule):
|
|
|
34
34
|
"idle_timeout": "Skip the current gowitness batch if it stalls for longer than this many seconds",
|
|
35
35
|
}
|
|
36
36
|
deps_common = ["chromium"]
|
|
37
|
+
deps_pip = ["aiosqlite"]
|
|
37
38
|
deps_ansible = [
|
|
38
39
|
{
|
|
39
40
|
"name": "Download gowitness",
|
|
@@ -72,7 +73,7 @@ class gowitness(BaseModule):
|
|
|
72
73
|
|
|
73
74
|
# make sure we have a working chrome install
|
|
74
75
|
chrome_test_pass = False
|
|
75
|
-
for binary in ("chrome", "chromium", custom_chrome_path):
|
|
76
|
+
for binary in ("chrome", "chromium", "chromium-browser", custom_chrome_path):
|
|
76
77
|
binary_path = self.helpers.which(binary)
|
|
77
78
|
if binary_path and Path(binary_path).is_file():
|
|
78
79
|
chrome_test_proc = await self.run_process([binary_path, "--version"])
|
|
@@ -87,7 +88,7 @@ class gowitness(BaseModule):
|
|
|
87
88
|
self.screenshot_path = self.base_path / "screenshots"
|
|
88
89
|
self.command = self.construct_command()
|
|
89
90
|
self.prepped = False
|
|
90
|
-
self.screenshots_taken =
|
|
91
|
+
self.screenshots_taken = {}
|
|
91
92
|
self.connections_logged = set()
|
|
92
93
|
self.technologies_found = set()
|
|
93
94
|
return True
|
|
@@ -136,7 +137,8 @@ class gowitness(BaseModule):
|
|
|
136
137
|
return
|
|
137
138
|
|
|
138
139
|
# emit web screenshots
|
|
139
|
-
|
|
140
|
+
new_screenshots = await self.get_new_screenshots()
|
|
141
|
+
for filename, screenshot in new_screenshots.items():
|
|
140
142
|
url = screenshot["url"]
|
|
141
143
|
final_url = screenshot["final_url"]
|
|
142
144
|
filename = self.screenshot_path / screenshot["filename"]
|
|
@@ -150,7 +152,8 @@ class gowitness(BaseModule):
|
|
|
150
152
|
)
|
|
151
153
|
|
|
152
154
|
# emit URLs
|
|
153
|
-
|
|
155
|
+
new_network_logs = await self.get_new_network_logs()
|
|
156
|
+
for url, row in new_network_logs.items():
|
|
154
157
|
ip = row["ip"]
|
|
155
158
|
status_code = row["status_code"]
|
|
156
159
|
tags = [f"status-{status_code}", f"ip-{ip}", "spider-danger"]
|
|
@@ -168,7 +171,8 @@ class gowitness(BaseModule):
|
|
|
168
171
|
)
|
|
169
172
|
|
|
170
173
|
# emit technologies
|
|
171
|
-
|
|
174
|
+
new_technologies = await self.get_new_technologies()
|
|
175
|
+
for row in new_technologies.values():
|
|
172
176
|
parent_id = row["url_id"]
|
|
173
177
|
parent_url = self.screenshots_taken[parent_id]
|
|
174
178
|
parent_event = event_dict[parent_url]
|
|
@@ -207,67 +211,61 @@ class gowitness(BaseModule):
|
|
|
207
211
|
command += ["--timeout", str(self.timeout)]
|
|
208
212
|
return command
|
|
209
213
|
|
|
210
|
-
|
|
211
|
-
def new_screenshots(self):
|
|
214
|
+
async def get_new_screenshots(self):
|
|
212
215
|
screenshots = {}
|
|
213
216
|
if self.db_path.is_file():
|
|
214
|
-
with
|
|
215
|
-
con.row_factory =
|
|
217
|
+
async with aiosqlite.connect(str(self.db_path)) as con:
|
|
218
|
+
con.row_factory = aiosqlite.Row
|
|
216
219
|
con.text_factory = self.helpers.smart_decode
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
screenshots[_id] = row
|
|
220
|
+
async with con.execute("SELECT * FROM urls") as cur:
|
|
221
|
+
async for row in cur:
|
|
222
|
+
row = dict(row)
|
|
223
|
+
_id = row["id"]
|
|
224
|
+
if _id not in self.screenshots_taken:
|
|
225
|
+
self.screenshots_taken[_id] = row["url"]
|
|
226
|
+
screenshots[_id] = row
|
|
225
227
|
return screenshots
|
|
226
228
|
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
network_logs = dict()
|
|
229
|
+
async def get_new_network_logs(self):
|
|
230
|
+
network_logs = {}
|
|
230
231
|
if self.db_path.is_file():
|
|
231
|
-
with
|
|
232
|
-
con.row_factory =
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
network_logs[url] = row
|
|
232
|
+
async with aiosqlite.connect(str(self.db_path)) as con:
|
|
233
|
+
con.row_factory = aiosqlite.Row
|
|
234
|
+
async with con.execute("SELECT * FROM network_logs") as cur:
|
|
235
|
+
async for row in cur:
|
|
236
|
+
row = dict(row)
|
|
237
|
+
url = row["final_url"]
|
|
238
|
+
if url not in self.connections_logged:
|
|
239
|
+
self.connections_logged.add(url)
|
|
240
|
+
network_logs[url] = row
|
|
241
241
|
return network_logs
|
|
242
242
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
technologies = dict()
|
|
243
|
+
async def get_new_technologies(self):
|
|
244
|
+
technologies = {}
|
|
246
245
|
if self.db_path.is_file():
|
|
247
|
-
with
|
|
248
|
-
con.row_factory =
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
technologies[_id] = row
|
|
246
|
+
async with aiosqlite.connect(str(self.db_path)) as con:
|
|
247
|
+
con.row_factory = aiosqlite.Row
|
|
248
|
+
async with con.execute("SELECT * FROM technologies") as cur:
|
|
249
|
+
async for row in cur:
|
|
250
|
+
_id = row["id"]
|
|
251
|
+
if _id not in self.technologies_found:
|
|
252
|
+
self.technologies_found.add(_id)
|
|
253
|
+
row = dict(row)
|
|
254
|
+
technologies[_id] = row
|
|
257
255
|
return technologies
|
|
258
256
|
|
|
259
|
-
def cur_execute(self, cur, query):
|
|
257
|
+
async def cur_execute(self, cur, query):
|
|
260
258
|
try:
|
|
261
|
-
return cur.execute(query)
|
|
262
|
-
except
|
|
259
|
+
return await cur.execute(query)
|
|
260
|
+
except aiosqlite.OperationalError as e:
|
|
263
261
|
self.warning(f"Error executing query: {query}: {e}")
|
|
264
262
|
return []
|
|
265
263
|
|
|
266
264
|
async def report(self):
|
|
267
265
|
if self.screenshots_taken:
|
|
268
266
|
self.success(f"{len(self.screenshots_taken):,} web screenshots captured. To view:")
|
|
269
|
-
self.success(
|
|
267
|
+
self.success(" - Start gowitness")
|
|
270
268
|
self.success(f" - cd {self.base_path} && ./gowitness server")
|
|
271
|
-
self.success(
|
|
269
|
+
self.success(" - Browse to http://localhost:7171")
|
|
272
270
|
else:
|
|
273
|
-
self.info(
|
|
271
|
+
self.info("No web screenshots captured")
|
bbot/modules/hackertarget.py
CHANGED
|
@@ -15,15 +15,17 @@ class hackertarget(subdomain_enum):
|
|
|
15
15
|
|
|
16
16
|
async def request_url(self, query):
|
|
17
17
|
url = f"{self.base_url}/hostsearch/?q={self.helpers.quote(query)}"
|
|
18
|
-
response = await self.
|
|
18
|
+
response = await self.api_request(url)
|
|
19
19
|
return response
|
|
20
20
|
|
|
21
|
-
def parse_results(self, r, query):
|
|
21
|
+
async def parse_results(self, r, query):
|
|
22
|
+
results = set()
|
|
22
23
|
for line in r.text.splitlines():
|
|
23
24
|
host = line.split(",")[0]
|
|
24
25
|
try:
|
|
25
26
|
self.helpers.validators.validate_host(host)
|
|
26
|
-
|
|
27
|
+
results.add(host)
|
|
27
28
|
except ValueError:
|
|
28
29
|
self.debug(f"Error validating API result: {line}")
|
|
29
30
|
continue
|
|
31
|
+
return results
|
bbot/modules/host_header.py
CHANGED
|
@@ -19,7 +19,7 @@ class host_header(BaseModule):
|
|
|
19
19
|
|
|
20
20
|
async def setup(self):
|
|
21
21
|
self.subdomain_tags = {}
|
|
22
|
-
if self.scan.config.get("interactsh_disable", False)
|
|
22
|
+
if self.scan.config.get("interactsh_disable", False) is False:
|
|
23
23
|
try:
|
|
24
24
|
self.interactsh_instance = self.helpers.interactsh()
|
|
25
25
|
self.domain = await self.interactsh_instance.register(callback=self.interactsh_callback)
|
|
@@ -60,7 +60,7 @@ class host_header(BaseModule):
|
|
|
60
60
|
self.debug("skipping results because subdomain tag was missing")
|
|
61
61
|
|
|
62
62
|
async def finish(self):
|
|
63
|
-
if self.scan.config.get("interactsh_disable", False)
|
|
63
|
+
if self.scan.config.get("interactsh_disable", False) is False:
|
|
64
64
|
await self.helpers.sleep(5)
|
|
65
65
|
try:
|
|
66
66
|
for r in await self.interactsh_instance.poll():
|
|
@@ -69,7 +69,7 @@ class host_header(BaseModule):
|
|
|
69
69
|
self.debug(f"Error in interact.sh: {e}")
|
|
70
70
|
|
|
71
71
|
async def cleanup(self):
|
|
72
|
-
if self.scan.config.get("interactsh_disable", False)
|
|
72
|
+
if self.scan.config.get("interactsh_disable", False) is False:
|
|
73
73
|
try:
|
|
74
74
|
await self.interactsh_instance.deregister()
|
|
75
75
|
self.debug(
|
|
@@ -84,7 +84,7 @@ class host_header(BaseModule):
|
|
|
84
84
|
|
|
85
85
|
added_cookies = {}
|
|
86
86
|
|
|
87
|
-
for
|
|
87
|
+
for header_values in event.data["header-dict"].values():
|
|
88
88
|
for header_value in header_values:
|
|
89
89
|
if header_value.lower() == "set-cookie":
|
|
90
90
|
header_split = header_value.split("=")
|
|
@@ -136,7 +136,7 @@ class host_header(BaseModule):
|
|
|
136
136
|
|
|
137
137
|
split_output = output.split("\n")
|
|
138
138
|
if " 4" in split_output:
|
|
139
|
-
description =
|
|
139
|
+
description = "Duplicate Host Header Tolerated"
|
|
140
140
|
await self.emit_event(
|
|
141
141
|
{
|
|
142
142
|
"host": str(event.host),
|
bbot/modules/httpx.py
CHANGED
|
@@ -90,7 +90,7 @@ class httpx(BaseModule):
|
|
|
90
90
|
else:
|
|
91
91
|
url = str(event.data)
|
|
92
92
|
url_hash = hash((event.host, event.port, has_spider_max))
|
|
93
|
-
if url_hash
|
|
93
|
+
if url_hash is None:
|
|
94
94
|
url_hash = hash((url, has_spider_max))
|
|
95
95
|
return url, url_hash
|
|
96
96
|
|
|
@@ -172,9 +172,6 @@ class httpx(BaseModule):
|
|
|
172
172
|
httpx_ip = j.get("host", "")
|
|
173
173
|
if httpx_ip:
|
|
174
174
|
tags.append(f"ip-{httpx_ip}")
|
|
175
|
-
# detect login pages
|
|
176
|
-
if self.helpers.web.is_login_page(j.get("body", "")):
|
|
177
|
-
tags.append("login-page")
|
|
178
175
|
# grab title
|
|
179
176
|
title = self.helpers.tagify(j.get("title", ""), maxlen=30)
|
|
180
177
|
if title:
|
bbot/modules/hunterio.py
CHANGED
|
@@ -15,14 +15,9 @@ class hunterio(subdomain_enum_apikey):
|
|
|
15
15
|
options_desc = {"api_key": "Hunter.IO API key"}
|
|
16
16
|
|
|
17
17
|
base_url = "https://api.hunter.io/v2"
|
|
18
|
+
ping_url = f"{base_url}/account?api_key={{api_key}}"
|
|
18
19
|
limit = 100
|
|
19
20
|
|
|
20
|
-
async def ping(self):
|
|
21
|
-
url = f"{self.base_url}/account?api_key={self.api_key}"
|
|
22
|
-
r = await self.helpers.request(url)
|
|
23
|
-
resp_content = getattr(r, "text", "")
|
|
24
|
-
assert getattr(r, "status_code", 0) == 200, resp_content
|
|
25
|
-
|
|
26
21
|
async def handle_event(self, event):
|
|
27
22
|
query = self.make_query(event)
|
|
28
23
|
for entry in await self.query(query):
|
|
@@ -56,10 +51,9 @@ class hunterio(subdomain_enum_apikey):
|
|
|
56
51
|
async def query(self, query):
|
|
57
52
|
emails = []
|
|
58
53
|
url = (
|
|
59
|
-
f"{self.base_url}/domain-search?domain={query}&api_key={
|
|
60
|
-
+ "&limit={page_size}&offset={offset}"
|
|
54
|
+
f"{self.base_url}/domain-search?domain={query}&api_key={{api_key}}" + "&limit={page_size}&offset={offset}"
|
|
61
55
|
)
|
|
62
|
-
agen = self.
|
|
56
|
+
agen = self.api_page_iter(url, page_size=self.limit)
|
|
63
57
|
try:
|
|
64
58
|
async for j in agen:
|
|
65
59
|
new_emails = j.get("data", {}).get("emails", [])
|
bbot/modules/iis_shortnames.py
CHANGED
|
@@ -20,7 +20,7 @@ class iis_shortnames(BaseModule):
|
|
|
20
20
|
meta = {
|
|
21
21
|
"description": "Check for IIS shortname vulnerability",
|
|
22
22
|
"created_date": "2022-04-15",
|
|
23
|
-
"author": "@
|
|
23
|
+
"author": "@liquidsec",
|
|
24
24
|
}
|
|
25
25
|
options = {"detect_only": True, "max_node_count": 50}
|
|
26
26
|
options_desc = {
|
|
@@ -38,37 +38,26 @@ class iis_shortnames(BaseModule):
|
|
|
38
38
|
control_url = f"{target}{random_string}*~1*/a.aspx"
|
|
39
39
|
test_url = f"{target}*~1*/a.aspx"
|
|
40
40
|
|
|
41
|
-
urls_and_kwargs = []
|
|
42
41
|
for method in ["GET", "POST", "OPTIONS", "DEBUG", "HEAD", "TRACE"]:
|
|
43
|
-
kwargs =
|
|
44
|
-
urls_and_kwargs.append((control_url, kwargs, method))
|
|
45
|
-
urls_and_kwargs.append((test_url, kwargs, method))
|
|
46
|
-
|
|
47
|
-
results = {}
|
|
48
|
-
async for url, kwargs, method, response in self.helpers.request_custom_batch(urls_and_kwargs):
|
|
49
|
-
try:
|
|
50
|
-
results[method][url] = response
|
|
51
|
-
except KeyError:
|
|
52
|
-
results[method] = {url: response}
|
|
53
|
-
for method, result in results.items():
|
|
42
|
+
kwargs = {"method": method, "allow_redirects": False, "timeout": 10}
|
|
54
43
|
confirmations = 0
|
|
55
|
-
iterations =
|
|
44
|
+
iterations = 5 # one failed detection is tolerated, as long as its not the first run
|
|
56
45
|
while iterations > 0:
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
if
|
|
60
|
-
if
|
|
46
|
+
control_result = await self.helpers.request(control_url, **kwargs)
|
|
47
|
+
test_result = await self.helpers.request(test_url, **kwargs)
|
|
48
|
+
if control_result and test_result:
|
|
49
|
+
if control_result.status_code != test_result.status_code:
|
|
61
50
|
confirmations += 1
|
|
62
|
-
self.debug(f"New detection, number of confirmations: [{str(confirmations)}]")
|
|
63
|
-
if confirmations >
|
|
64
|
-
technique = f"{str(
|
|
65
|
-
detections.append((method,
|
|
51
|
+
self.debug(f"New detection on {target}, number of confirmations: [{str(confirmations)}]")
|
|
52
|
+
if confirmations > 3:
|
|
53
|
+
technique = f"{str(control_result.status_code)}/{str(test_result.status_code)} HTTP Code"
|
|
54
|
+
detections.append((method, test_result.status_code, technique))
|
|
66
55
|
break
|
|
67
|
-
elif ("Error Code</th><td>0x80070002" in
|
|
68
|
-
"Error Code</th><td>0x00000000" in
|
|
56
|
+
elif ("Error Code</th><td>0x80070002" in control_result.text) and (
|
|
57
|
+
"Error Code</th><td>0x00000000" in test_result.text
|
|
69
58
|
):
|
|
70
59
|
confirmations += 1
|
|
71
|
-
if confirmations >
|
|
60
|
+
if confirmations > 3:
|
|
72
61
|
detections.append((method, 0, technique))
|
|
73
62
|
technique = "HTTP Body Error Message"
|
|
74
63
|
break
|
|
@@ -139,7 +128,7 @@ class iis_shortnames(BaseModule):
|
|
|
139
128
|
suffix = "/a.aspx"
|
|
140
129
|
|
|
141
130
|
urls_and_kwargs = []
|
|
142
|
-
kwargs =
|
|
131
|
+
kwargs = {"method": method, "allow_redirects": False, "retries": 2, "timeout": 10}
|
|
143
132
|
for c in valid_chars:
|
|
144
133
|
for file_part in ("stem", "ext"):
|
|
145
134
|
payload = encode_all(f"*{c}*~1*")
|
|
@@ -171,7 +160,7 @@ class iis_shortnames(BaseModule):
|
|
|
171
160
|
url_hint_list = []
|
|
172
161
|
found_results = False
|
|
173
162
|
|
|
174
|
-
cl = ext_char_list if extension_mode
|
|
163
|
+
cl = ext_char_list if extension_mode is True else char_list
|
|
175
164
|
|
|
176
165
|
urls_and_kwargs = []
|
|
177
166
|
|
|
@@ -180,7 +169,7 @@ class iis_shortnames(BaseModule):
|
|
|
180
169
|
wildcard = "*" if extension_mode else "*~1*"
|
|
181
170
|
payload = encode_all(f"{prefix}{c}{wildcard}")
|
|
182
171
|
url = f"{target}{payload}{suffix}"
|
|
183
|
-
kwargs =
|
|
172
|
+
kwargs = {"method": method}
|
|
184
173
|
urls_and_kwargs.append((url, kwargs, c))
|
|
185
174
|
|
|
186
175
|
async for url, kwargs, c, response in self.helpers.request_custom_batch(urls_and_kwargs):
|
|
@@ -220,7 +209,7 @@ class iis_shortnames(BaseModule):
|
|
|
220
209
|
extension_mode,
|
|
221
210
|
node_count=node_count,
|
|
222
211
|
)
|
|
223
|
-
if len(prefix) > 0 and found_results
|
|
212
|
+
if len(prefix) > 0 and found_results is False:
|
|
224
213
|
url_hint_list.append(f"{prefix}")
|
|
225
214
|
self.verbose(f"Found new (possibly partial) URL_HINT: {prefix} from node {target}")
|
|
226
215
|
return url_hint_list
|
|
@@ -245,7 +234,7 @@ class iis_shortnames(BaseModule):
|
|
|
245
234
|
{"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description},
|
|
246
235
|
"VULNERABILITY",
|
|
247
236
|
event,
|
|
248
|
-
context=
|
|
237
|
+
context="{module} detected low {event.type}: IIS shortname enumeration",
|
|
249
238
|
)
|
|
250
239
|
if not self.config.get("detect_only"):
|
|
251
240
|
for detection in detections:
|
|
@@ -1,7 +1,9 @@
|
|
|
1
|
-
from
|
|
1
|
+
from contextlib import suppress
|
|
2
2
|
|
|
3
|
+
from bbot.modules.base import BaseInterceptModule
|
|
3
4
|
|
|
4
|
-
|
|
5
|
+
|
|
6
|
+
class CloudCheck(BaseInterceptModule):
|
|
5
7
|
watched_events = ["*"]
|
|
6
8
|
meta = {"description": "Tag events by cloud provider, identify cloud resources like storage buckets"}
|
|
7
9
|
scope_distance_modifier = 1
|
|
@@ -13,7 +15,7 @@ class CloudCheck(InterceptModule):
|
|
|
13
15
|
|
|
14
16
|
def make_dummy_modules(self):
|
|
15
17
|
self.dummy_modules = {}
|
|
16
|
-
for provider_name
|
|
18
|
+
for provider_name in self.helpers.cloud.providers.keys():
|
|
17
19
|
module = self.scan._make_dummy_module(f"cloud_{provider_name}", _type="scan")
|
|
18
20
|
module.default_discovery_context = "{module} derived {event.type}: {event.host}"
|
|
19
21
|
self.dummy_modules[provider_name] = module
|
|
@@ -28,22 +30,35 @@ class CloudCheck(InterceptModule):
|
|
|
28
30
|
if self.dummy_modules is None:
|
|
29
31
|
self.make_dummy_modules()
|
|
30
32
|
# cloud tagging by hosts
|
|
31
|
-
hosts_to_check = set(
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
33
|
+
hosts_to_check = set(event.resolved_hosts)
|
|
34
|
+
with suppress(KeyError):
|
|
35
|
+
hosts_to_check.remove(event.host_original)
|
|
36
|
+
hosts_to_check = [event.host_original] + list(hosts_to_check)
|
|
37
|
+
|
|
38
|
+
for i, host in enumerate(hosts_to_check):
|
|
39
|
+
host_is_ip = self.helpers.is_ip(host)
|
|
35
40
|
for provider, provider_type, subnet in self.helpers.cloudcheck(host):
|
|
36
41
|
if provider:
|
|
37
42
|
event.add_tag(f"{provider_type}-{provider}")
|
|
43
|
+
if host_is_ip:
|
|
44
|
+
event.add_tag(f"{provider_type}-ip")
|
|
45
|
+
else:
|
|
46
|
+
# if the original hostname is a cloud domain, tag it as such
|
|
47
|
+
if i == 0:
|
|
48
|
+
event.add_tag(f"{provider_type}-domain")
|
|
49
|
+
# any children are tagged as CNAMEs
|
|
50
|
+
else:
|
|
51
|
+
event.add_tag(f"{provider_type}-cname")
|
|
38
52
|
|
|
39
53
|
found = set()
|
|
54
|
+
str_hosts_to_check = [str(host) for host in hosts_to_check]
|
|
40
55
|
# look for cloud assets in hosts, http responses
|
|
41
56
|
# loop through each provider
|
|
42
57
|
for provider in self.helpers.cloud.providers.values():
|
|
43
58
|
provider_name = provider.name.lower()
|
|
44
|
-
base_kwargs =
|
|
45
|
-
parent
|
|
46
|
-
|
|
59
|
+
base_kwargs = {
|
|
60
|
+
"parent": event, "tags": [f"{provider.provider_type}-{provider_name}"], "_provider": provider_name
|
|
61
|
+
}
|
|
47
62
|
# loop through the provider's regex signatures, if any
|
|
48
63
|
for event_type, sigs in provider.signatures.items():
|
|
49
64
|
if event_type != "STORAGE_BUCKET":
|
|
@@ -54,12 +69,12 @@ class CloudCheck(InterceptModule):
|
|
|
54
69
|
if event.type == "HTTP_RESPONSE":
|
|
55
70
|
matches = await self.helpers.re.findall(sig, event.data.get("body", ""))
|
|
56
71
|
elif event.type.startswith("DNS_NAME"):
|
|
57
|
-
for host in
|
|
72
|
+
for host in str_hosts_to_check:
|
|
58
73
|
match = sig.match(host)
|
|
59
74
|
if match:
|
|
60
75
|
matches.append(match.groups())
|
|
61
76
|
for match in matches:
|
|
62
|
-
if not
|
|
77
|
+
if match not in found:
|
|
63
78
|
found.add(match)
|
|
64
79
|
|
|
65
80
|
_kwargs = dict(base_kwargs)
|