bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5401rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +3 -7
- bbot/core/config/files.py +0 -1
- bbot/core/config/logger.py +34 -4
- bbot/core/core.py +21 -4
- bbot/core/engine.py +9 -8
- bbot/core/event/base.py +131 -52
- bbot/core/helpers/bloom.py +10 -3
- bbot/core/helpers/command.py +8 -7
- bbot/core/helpers/depsinstaller/installer.py +31 -13
- bbot/core/helpers/diff.py +10 -10
- bbot/core/helpers/dns/brute.py +7 -4
- bbot/core/helpers/dns/dns.py +1 -2
- bbot/core/helpers/dns/engine.py +4 -6
- bbot/core/helpers/dns/helpers.py +2 -2
- bbot/core/helpers/dns/mock.py +0 -1
- bbot/core/helpers/files.py +1 -1
- bbot/core/helpers/helper.py +7 -4
- bbot/core/helpers/interactsh.py +3 -3
- bbot/core/helpers/libmagic.py +65 -0
- bbot/core/helpers/misc.py +65 -22
- bbot/core/helpers/names_generator.py +17 -3
- bbot/core/helpers/process.py +0 -20
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +12 -6
- bbot/core/helpers/validators.py +1 -2
- bbot/core/helpers/web/client.py +1 -1
- bbot/core/helpers/web/engine.py +1 -2
- bbot/core/helpers/web/web.py +4 -114
- bbot/core/helpers/wordcloud.py +5 -5
- bbot/core/modules.py +36 -27
- bbot/core/multiprocess.py +58 -0
- bbot/core/shared_deps.py +46 -3
- bbot/db/sql/models.py +147 -0
- bbot/defaults.yml +12 -10
- bbot/modules/anubisdb.py +2 -2
- bbot/modules/apkpure.py +63 -0
- bbot/modules/azure_tenant.py +2 -2
- bbot/modules/baddns.py +35 -19
- bbot/modules/baddns_direct.py +92 -0
- bbot/modules/baddns_zone.py +3 -8
- bbot/modules/badsecrets.py +4 -3
- bbot/modules/base.py +195 -51
- bbot/modules/bevigil.py +7 -7
- bbot/modules/binaryedge.py +7 -4
- bbot/modules/bufferoverrun.py +47 -0
- bbot/modules/builtwith.py +6 -10
- bbot/modules/bypass403.py +5 -5
- bbot/modules/c99.py +10 -7
- bbot/modules/censys.py +9 -13
- bbot/modules/certspotter.py +5 -3
- bbot/modules/chaos.py +9 -7
- bbot/modules/code_repository.py +1 -0
- bbot/modules/columbus.py +3 -3
- bbot/modules/crt.py +5 -3
- bbot/modules/deadly/dastardly.py +1 -1
- bbot/modules/deadly/ffuf.py +9 -9
- bbot/modules/deadly/nuclei.py +3 -3
- bbot/modules/deadly/vhost.py +4 -3
- bbot/modules/dehashed.py +1 -1
- bbot/modules/digitorus.py +1 -1
- bbot/modules/dnsbimi.py +145 -0
- bbot/modules/dnscaa.py +3 -3
- bbot/modules/dnsdumpster.py +4 -4
- bbot/modules/dnstlsrpt.py +144 -0
- bbot/modules/docker_pull.py +7 -5
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +20 -21
- bbot/modules/emailformat.py +1 -1
- bbot/modules/extractous.py +122 -0
- bbot/modules/filedownload.py +9 -7
- bbot/modules/fullhunt.py +7 -4
- bbot/modules/generic_ssrf.py +5 -5
- bbot/modules/github_codesearch.py +3 -2
- bbot/modules/github_org.py +4 -4
- bbot/modules/github_workflows.py +4 -4
- bbot/modules/gitlab.py +2 -5
- bbot/modules/google_playstore.py +93 -0
- bbot/modules/gowitness.py +48 -50
- bbot/modules/hackertarget.py +5 -3
- bbot/modules/host_header.py +5 -5
- bbot/modules/httpx.py +1 -4
- bbot/modules/hunterio.py +3 -9
- bbot/modules/iis_shortnames.py +19 -30
- bbot/modules/internal/cloudcheck.py +29 -12
- bbot/modules/internal/dnsresolve.py +22 -22
- bbot/modules/internal/excavate.py +97 -59
- bbot/modules/internal/speculate.py +41 -32
- bbot/modules/internetdb.py +4 -2
- bbot/modules/ip2location.py +3 -5
- bbot/modules/ipneighbor.py +1 -1
- bbot/modules/ipstack.py +3 -8
- bbot/modules/jadx.py +87 -0
- bbot/modules/leakix.py +11 -10
- bbot/modules/myssl.py +2 -2
- bbot/modules/newsletters.py +2 -2
- bbot/modules/otx.py +5 -3
- bbot/modules/output/asset_inventory.py +7 -7
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/csv.py +1 -1
- bbot/modules/output/http.py +20 -14
- bbot/modules/output/mysql.py +51 -0
- bbot/modules/output/neo4j.py +7 -2
- bbot/modules/output/postgres.py +49 -0
- bbot/modules/output/slack.py +0 -1
- bbot/modules/output/sqlite.py +29 -0
- bbot/modules/output/stdout.py +2 -2
- bbot/modules/output/teams.py +107 -6
- bbot/modules/paramminer_headers.py +8 -11
- bbot/modules/passivetotal.py +13 -13
- bbot/modules/portscan.py +32 -6
- bbot/modules/postman.py +50 -126
- bbot/modules/postman_download.py +220 -0
- bbot/modules/rapiddns.py +3 -8
- bbot/modules/report/asn.py +18 -11
- bbot/modules/robots.py +3 -3
- bbot/modules/securitytrails.py +7 -10
- bbot/modules/securitytxt.py +1 -1
- bbot/modules/shodan_dns.py +7 -9
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +2 -2
- bbot/modules/social.py +2 -1
- bbot/modules/subdomaincenter.py +1 -1
- bbot/modules/subdomainradar.py +160 -0
- bbot/modules/telerik.py +8 -8
- bbot/modules/templates/bucket.py +1 -1
- bbot/modules/templates/github.py +22 -14
- bbot/modules/templates/postman.py +21 -0
- bbot/modules/templates/shodan.py +14 -13
- bbot/modules/templates/sql.py +95 -0
- bbot/modules/templates/subdomain_enum.py +51 -16
- bbot/modules/templates/webhook.py +2 -4
- bbot/modules/trickest.py +8 -37
- bbot/modules/trufflehog.py +10 -12
- bbot/modules/url_manipulation.py +3 -3
- bbot/modules/urlscan.py +1 -1
- bbot/modules/viewdns.py +1 -1
- bbot/modules/virustotal.py +8 -30
- bbot/modules/wafw00f.py +1 -1
- bbot/modules/wayback.py +1 -1
- bbot/modules/wpscan.py +17 -11
- bbot/modules/zoomeye.py +11 -6
- bbot/presets/baddns-thorough.yml +12 -0
- bbot/presets/fast.yml +16 -0
- bbot/presets/kitchen-sink.yml +1 -2
- bbot/presets/spider.yml +4 -0
- bbot/presets/subdomain-enum.yml +7 -7
- bbot/presets/web/dotnet-audit.yml +0 -1
- bbot/scanner/manager.py +5 -16
- bbot/scanner/preset/args.py +46 -26
- bbot/scanner/preset/environ.py +7 -2
- bbot/scanner/preset/path.py +7 -4
- bbot/scanner/preset/preset.py +36 -23
- bbot/scanner/scanner.py +172 -62
- bbot/scanner/target.py +236 -434
- bbot/scripts/docs.py +1 -1
- bbot/test/bbot_fixtures.py +13 -3
- bbot/test/conftest.py +132 -100
- bbot/test/fastapi_test.py +17 -0
- bbot/test/owasp_mastg.apk +0 -0
- bbot/test/run_tests.sh +4 -4
- bbot/test/test.conf +2 -0
- bbot/test/test_step_1/test__module__tests.py +0 -1
- bbot/test/test_step_1/test_bbot_fastapi.py +79 -0
- bbot/test/test_step_1/test_bloom_filter.py +2 -1
- bbot/test/test_step_1/test_cli.py +138 -64
- bbot/test/test_step_1/test_dns.py +61 -27
- bbot/test/test_step_1/test_engine.py +17 -19
- bbot/test/test_step_1/test_events.py +183 -30
- bbot/test/test_step_1/test_helpers.py +64 -29
- bbot/test/test_step_1/test_manager_deduplication.py +1 -1
- bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
- bbot/test/test_step_1/test_modules_basic.py +68 -70
- bbot/test/test_step_1/test_presets.py +183 -100
- bbot/test/test_step_1/test_python_api.py +7 -2
- bbot/test/test_step_1/test_regexes.py +35 -5
- bbot/test/test_step_1/test_scan.py +39 -5
- bbot/test/test_step_1/test_scope.py +4 -3
- bbot/test/test_step_1/test_target.py +242 -145
- bbot/test/test_step_1/test_web.py +14 -10
- bbot/test/test_step_2/module_tests/base.py +15 -7
- bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
- bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
- bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
- bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
- bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
- bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
- bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
- bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
- bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
- bbot/test/test_step_2/module_tests/test_module_excavate.py +28 -48
- bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
- bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -6
- bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
- bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
- bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
- bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
- bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
- bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
- bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_newsletters.py +16 -16
- bbot/test/test_step_2/module_tests/test_module_ntlm.py +8 -7
- bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
- bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_pgp.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
- bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
- bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
- bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
- bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_smuggler.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_speculate.py +4 -8
- bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
- bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
- bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
- bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
- bbot/test/test_step_2/module_tests/test_module_viewdns.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
- bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
- bbot/wordlists/devops_mutations.txt +1 -1
- bbot/wordlists/ffuf_shortname_candidates.txt +1 -1
- bbot/wordlists/nameservers.txt +1 -1
- bbot/wordlists/paramminer_headers.txt +1 -1
- bbot/wordlists/paramminer_parameters.txt +1 -1
- bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt +1 -1
- bbot/wordlists/valid_url_schemes.txt +1 -1
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/METADATA +48 -18
- bbot-2.3.0.5401rc0.dist-info/RECORD +421 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/WHEEL +1 -1
- bbot/modules/unstructured.py +0 -163
- bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
- bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/LICENSE +0 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
import zipfile
|
|
2
|
+
import json
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from bbot.modules.templates.postman import postman
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class postman_download(postman):
|
|
8
|
+
watched_events = ["CODE_REPOSITORY"]
|
|
9
|
+
produced_events = ["FILESYSTEM"]
|
|
10
|
+
flags = ["passive", "subdomain-enum", "safe", "code-enum"]
|
|
11
|
+
meta = {
|
|
12
|
+
"description": "Download workspaces, collections, requests from Postman",
|
|
13
|
+
"created_date": "2024-09-07",
|
|
14
|
+
"author": "@domwhewell-sage",
|
|
15
|
+
}
|
|
16
|
+
options = {"output_folder": "", "api_key": ""}
|
|
17
|
+
options_desc = {"output_folder": "Folder to download postman workspaces to", "api_key": "Postman API Key"}
|
|
18
|
+
scope_distance_modifier = 2
|
|
19
|
+
|
|
20
|
+
async def setup(self):
|
|
21
|
+
output_folder = self.config.get("output_folder")
|
|
22
|
+
if output_folder:
|
|
23
|
+
self.output_dir = Path(output_folder) / "postman_workspaces"
|
|
24
|
+
else:
|
|
25
|
+
self.output_dir = self.scan.home / "postman_workspaces"
|
|
26
|
+
self.helpers.mkdir(self.output_dir)
|
|
27
|
+
return await self.require_api_key()
|
|
28
|
+
|
|
29
|
+
def prepare_api_request(self, url, kwargs):
|
|
30
|
+
kwargs["headers"]["X-Api-Key"] = self.api_key
|
|
31
|
+
return url, kwargs
|
|
32
|
+
|
|
33
|
+
async def filter_event(self, event):
|
|
34
|
+
if event.type == "CODE_REPOSITORY":
|
|
35
|
+
if "postman" not in event.tags:
|
|
36
|
+
return False, "event is not a postman workspace"
|
|
37
|
+
return True
|
|
38
|
+
|
|
39
|
+
async def handle_event(self, event):
|
|
40
|
+
repo_url = event.data.get("url")
|
|
41
|
+
workspace_id = await self.get_workspace_id(repo_url)
|
|
42
|
+
if workspace_id:
|
|
43
|
+
self.verbose(f"Found workspace ID {workspace_id} for {repo_url}")
|
|
44
|
+
data = await self.request_workspace(workspace_id)
|
|
45
|
+
workspace = data["workspace"]
|
|
46
|
+
environments = data["environments"]
|
|
47
|
+
collections = data["collections"]
|
|
48
|
+
in_scope = await self.validate_workspace(workspace, environments, collections)
|
|
49
|
+
if in_scope:
|
|
50
|
+
workspace_path = self.save_workspace(workspace, environments, collections)
|
|
51
|
+
if workspace_path:
|
|
52
|
+
self.verbose(f"Downloaded workspace from {repo_url} to {workspace_path}")
|
|
53
|
+
codebase_event = self.make_event(
|
|
54
|
+
{"path": str(workspace_path)}, "FILESYSTEM", tags=["postman", "workspace"], parent=event
|
|
55
|
+
)
|
|
56
|
+
await self.emit_event(
|
|
57
|
+
codebase_event,
|
|
58
|
+
context=f"{{module}} downloaded postman workspace at {repo_url} to {{event.type}}: {workspace_path}",
|
|
59
|
+
)
|
|
60
|
+
else:
|
|
61
|
+
self.verbose(
|
|
62
|
+
f"Failed to validate {repo_url} is in our scope as it does not contain any in-scope dns_names / emails, skipping download"
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
async def get_workspace_id(self, repo_url):
|
|
66
|
+
workspace_id = ""
|
|
67
|
+
profile = repo_url.split("/")[-2]
|
|
68
|
+
name = repo_url.split("/")[-1]
|
|
69
|
+
url = f"{self.base_url}/ws/proxy"
|
|
70
|
+
json = {
|
|
71
|
+
"service": "workspaces",
|
|
72
|
+
"method": "GET",
|
|
73
|
+
"path": f"/workspaces?handle={profile}&slug={name}",
|
|
74
|
+
}
|
|
75
|
+
r = await self.helpers.request(url, method="POST", json=json, headers=self.headers)
|
|
76
|
+
if r is None:
|
|
77
|
+
return workspace_id
|
|
78
|
+
status_code = getattr(r, "status_code", 0)
|
|
79
|
+
try:
|
|
80
|
+
json = r.json()
|
|
81
|
+
except Exception as e:
|
|
82
|
+
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
|
|
83
|
+
return workspace_id
|
|
84
|
+
data = json.get("data", [])
|
|
85
|
+
if len(data) == 1:
|
|
86
|
+
workspace_id = data[0]["id"]
|
|
87
|
+
return workspace_id
|
|
88
|
+
|
|
89
|
+
async def request_workspace(self, id):
|
|
90
|
+
data = {"workspace": {}, "environments": [], "collections": []}
|
|
91
|
+
workspace = await self.get_workspace(id)
|
|
92
|
+
if workspace:
|
|
93
|
+
# Main Workspace
|
|
94
|
+
name = workspace["name"]
|
|
95
|
+
data["workspace"] = workspace
|
|
96
|
+
|
|
97
|
+
# Workspace global variables
|
|
98
|
+
self.verbose(f"Downloading globals for workspace {name}")
|
|
99
|
+
globals = await self.get_globals(id)
|
|
100
|
+
data["environments"].append(globals)
|
|
101
|
+
|
|
102
|
+
# Workspace Environments
|
|
103
|
+
workspace_environments = workspace.get("environments", [])
|
|
104
|
+
if workspace_environments:
|
|
105
|
+
self.verbose(f"Downloading environments for workspace {name}")
|
|
106
|
+
for _ in workspace_environments:
|
|
107
|
+
environment_id = _["uid"]
|
|
108
|
+
environment = await self.get_environment(environment_id)
|
|
109
|
+
data["environments"].append(environment)
|
|
110
|
+
|
|
111
|
+
# Workspace Collections
|
|
112
|
+
workspace_collections = workspace.get("collections", [])
|
|
113
|
+
if workspace_collections:
|
|
114
|
+
self.verbose(f"Downloading collections for workspace {name}")
|
|
115
|
+
for _ in workspace_collections:
|
|
116
|
+
collection_id = _["uid"]
|
|
117
|
+
collection = await self.get_collection(collection_id)
|
|
118
|
+
data["collections"].append(collection)
|
|
119
|
+
return data
|
|
120
|
+
|
|
121
|
+
async def get_workspace(self, workspace_id):
|
|
122
|
+
workspace = {}
|
|
123
|
+
workspace_url = f"{self.api_url}/workspaces/{workspace_id}"
|
|
124
|
+
r = await self.api_request(workspace_url)
|
|
125
|
+
if r is None:
|
|
126
|
+
return workspace
|
|
127
|
+
status_code = getattr(r, "status_code", 0)
|
|
128
|
+
try:
|
|
129
|
+
json = r.json()
|
|
130
|
+
except Exception as e:
|
|
131
|
+
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
|
|
132
|
+
return workspace
|
|
133
|
+
workspace = json.get("workspace", {})
|
|
134
|
+
return workspace
|
|
135
|
+
|
|
136
|
+
async def get_globals(self, workspace_id):
|
|
137
|
+
globals = {}
|
|
138
|
+
globals_url = f"{self.base_url}/workspace/{workspace_id}/globals"
|
|
139
|
+
r = await self.helpers.request(globals_url, headers=self.headers)
|
|
140
|
+
if r is None:
|
|
141
|
+
return globals
|
|
142
|
+
status_code = getattr(r, "status_code", 0)
|
|
143
|
+
try:
|
|
144
|
+
json = r.json()
|
|
145
|
+
except Exception as e:
|
|
146
|
+
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
|
|
147
|
+
return globals
|
|
148
|
+
globals = json.get("data", {})
|
|
149
|
+
return globals
|
|
150
|
+
|
|
151
|
+
async def get_environment(self, environment_id):
|
|
152
|
+
environment = {}
|
|
153
|
+
environment_url = f"{self.api_url}/environments/{environment_id}"
|
|
154
|
+
r = await self.api_request(environment_url)
|
|
155
|
+
if r is None:
|
|
156
|
+
return environment
|
|
157
|
+
status_code = getattr(r, "status_code", 0)
|
|
158
|
+
try:
|
|
159
|
+
json = r.json()
|
|
160
|
+
except Exception as e:
|
|
161
|
+
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
|
|
162
|
+
return environment
|
|
163
|
+
environment = json.get("environment", {})
|
|
164
|
+
return environment
|
|
165
|
+
|
|
166
|
+
async def get_collection(self, collection_id):
|
|
167
|
+
collection = {}
|
|
168
|
+
collection_url = f"{self.api_url}/collections/{collection_id}"
|
|
169
|
+
r = await self.api_request(collection_url)
|
|
170
|
+
if r is None:
|
|
171
|
+
return collection
|
|
172
|
+
status_code = getattr(r, "status_code", 0)
|
|
173
|
+
try:
|
|
174
|
+
json = r.json()
|
|
175
|
+
except Exception as e:
|
|
176
|
+
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
|
|
177
|
+
return collection
|
|
178
|
+
collection = json.get("collection", {})
|
|
179
|
+
return collection
|
|
180
|
+
|
|
181
|
+
async def validate_workspace(self, workspace, environments, collections):
|
|
182
|
+
name = workspace.get("name", "")
|
|
183
|
+
full_wks = str([workspace, environments, collections])
|
|
184
|
+
in_scope_hosts = await self.scan.extract_in_scope_hostnames(full_wks)
|
|
185
|
+
if in_scope_hosts:
|
|
186
|
+
self.verbose(
|
|
187
|
+
f'Found in-scope hostname(s): "{in_scope_hosts}" in workspace {name}, it appears to be in-scope'
|
|
188
|
+
)
|
|
189
|
+
return True
|
|
190
|
+
return False
|
|
191
|
+
|
|
192
|
+
def save_workspace(self, workspace, environments, collections):
|
|
193
|
+
zip_path = None
|
|
194
|
+
# Create a folder for the workspace
|
|
195
|
+
name = workspace["name"]
|
|
196
|
+
id = workspace["id"]
|
|
197
|
+
folder = self.output_dir / name
|
|
198
|
+
self.helpers.mkdir(folder)
|
|
199
|
+
zip_path = folder / f"{id}.zip"
|
|
200
|
+
|
|
201
|
+
# Main Workspace
|
|
202
|
+
self.add_json_to_zip(zip_path, workspace, f"{name}.postman_workspace.json")
|
|
203
|
+
|
|
204
|
+
# Workspace Environments
|
|
205
|
+
if environments:
|
|
206
|
+
for environment in environments:
|
|
207
|
+
environment_id = environment["id"]
|
|
208
|
+
self.add_json_to_zip(zip_path, environment, f"{environment_id}.postman_environment.json")
|
|
209
|
+
|
|
210
|
+
# Workspace Collections
|
|
211
|
+
if collections:
|
|
212
|
+
for collection in collections:
|
|
213
|
+
collection_name = collection["info"]["name"]
|
|
214
|
+
self.add_json_to_zip(zip_path, collection, f"{collection_name}.postman_collection.json")
|
|
215
|
+
return zip_path
|
|
216
|
+
|
|
217
|
+
def add_json_to_zip(self, zip_path, data, filename):
|
|
218
|
+
with zipfile.ZipFile(zip_path, "a") as zipf:
|
|
219
|
+
json_content = json.dumps(data, indent=4)
|
|
220
|
+
zipf.writestr(filename, json_content)
|
bbot/modules/rapiddns.py
CHANGED
|
@@ -15,14 +15,9 @@ class rapiddns(subdomain_enum):
|
|
|
15
15
|
|
|
16
16
|
async def request_url(self, query):
|
|
17
17
|
url = f"{self.base_url}/subdomain/{self.helpers.quote(query)}?full=1#result"
|
|
18
|
-
response = await self.
|
|
18
|
+
response = await self.api_request(url, timeout=self.http_timeout + 10)
|
|
19
19
|
return response
|
|
20
20
|
|
|
21
|
-
def parse_results(self, r, query):
|
|
22
|
-
results = set()
|
|
21
|
+
async def parse_results(self, r, query):
|
|
23
22
|
text = getattr(r, "text", "")
|
|
24
|
-
|
|
25
|
-
match = match.lower()
|
|
26
|
-
if match.endswith(query):
|
|
27
|
-
results.add(match)
|
|
28
|
-
return results
|
|
23
|
+
return await self.scan.extract_in_scope_hostnames(text)
|
bbot/modules/report/asn.py
CHANGED
|
@@ -38,7 +38,7 @@ class asn(BaseReportModule):
|
|
|
38
38
|
|
|
39
39
|
async def handle_event(self, event):
|
|
40
40
|
host = event.host
|
|
41
|
-
if self.cache_get(host)
|
|
41
|
+
if self.cache_get(host) is False:
|
|
42
42
|
asns, source = await self.get_asn(host)
|
|
43
43
|
if not asns:
|
|
44
44
|
self.cache_put(self.unknown_asn)
|
|
@@ -96,7 +96,7 @@ class asn(BaseReportModule):
|
|
|
96
96
|
for p in self.helpers.ip_network_parents(ip):
|
|
97
97
|
try:
|
|
98
98
|
self.asn_counts[p] += 1
|
|
99
|
-
if ret
|
|
99
|
+
if ret is False:
|
|
100
100
|
ret = p
|
|
101
101
|
except KeyError:
|
|
102
102
|
continue
|
|
@@ -112,7 +112,7 @@ class asn(BaseReportModule):
|
|
|
112
112
|
for i, source in enumerate(list(self.sources)):
|
|
113
113
|
get_asn_fn = getattr(self, f"get_asn_{source}")
|
|
114
114
|
res = await get_asn_fn(ip)
|
|
115
|
-
if res
|
|
115
|
+
if res is False:
|
|
116
116
|
# demote the current source to lowest priority since it just failed
|
|
117
117
|
self.sources.append(self.sources.pop(i))
|
|
118
118
|
self.verbose(f"Failed to contact {source}, retrying")
|
|
@@ -125,7 +125,7 @@ class asn(BaseReportModule):
|
|
|
125
125
|
url = f"https://stat.ripe.net/data/network-info/data.json?resource={ip}"
|
|
126
126
|
response = await self.get_url(url, "ASN")
|
|
127
127
|
asns = []
|
|
128
|
-
if response
|
|
128
|
+
if response is False:
|
|
129
129
|
return False
|
|
130
130
|
data = response.get("data", {})
|
|
131
131
|
if not data:
|
|
@@ -138,7 +138,7 @@ class asn(BaseReportModule):
|
|
|
138
138
|
asn_numbers = []
|
|
139
139
|
for number in asn_numbers:
|
|
140
140
|
asn = await self.get_asn_metadata_ripe(number)
|
|
141
|
-
if asn
|
|
141
|
+
if asn is False:
|
|
142
142
|
return False
|
|
143
143
|
asn["subnet"] = prefix
|
|
144
144
|
asns.append(asn)
|
|
@@ -155,7 +155,7 @@ class asn(BaseReportModule):
|
|
|
155
155
|
}
|
|
156
156
|
url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}"
|
|
157
157
|
response = await self.get_url(url, "ASN Metadata", cache=True)
|
|
158
|
-
if response
|
|
158
|
+
if response is False:
|
|
159
159
|
return False
|
|
160
160
|
data = response.get("data", {})
|
|
161
161
|
if not data:
|
|
@@ -187,7 +187,7 @@ class asn(BaseReportModule):
|
|
|
187
187
|
data = await self.get_url(url, "ASN")
|
|
188
188
|
asns = []
|
|
189
189
|
asns_tried = set()
|
|
190
|
-
if data
|
|
190
|
+
if data is False:
|
|
191
191
|
return False
|
|
192
192
|
data = data.get("data", {})
|
|
193
193
|
prefixes = data.get("prefixes", [])
|
|
@@ -201,13 +201,20 @@ class asn(BaseReportModule):
|
|
|
201
201
|
description = details.get("description") or prefix.get("description") or ""
|
|
202
202
|
country = details.get("country_code") or prefix.get("country_code") or ""
|
|
203
203
|
emails = []
|
|
204
|
-
if not
|
|
204
|
+
if asn not in asns_tried:
|
|
205
205
|
emails = await self.get_emails_bgpview(asn)
|
|
206
|
-
if emails
|
|
206
|
+
if emails is False:
|
|
207
207
|
return False
|
|
208
208
|
asns_tried.add(asn)
|
|
209
209
|
asns.append(
|
|
210
|
-
|
|
210
|
+
{
|
|
211
|
+
"asn": asn,
|
|
212
|
+
"subnet": subnet,
|
|
213
|
+
"name": name,
|
|
214
|
+
"description": description,
|
|
215
|
+
"country": country,
|
|
216
|
+
"emails": emails,
|
|
217
|
+
}
|
|
211
218
|
)
|
|
212
219
|
if not asns:
|
|
213
220
|
self.debug(f'No results for "{ip}"')
|
|
@@ -217,7 +224,7 @@ class asn(BaseReportModule):
|
|
|
217
224
|
contacts = []
|
|
218
225
|
url = f"https://api.bgpview.io/asn/{asn}"
|
|
219
226
|
data = await self.get_url(url, "ASN metadata", cache=True)
|
|
220
|
-
if data
|
|
227
|
+
if data is False:
|
|
221
228
|
return False
|
|
222
229
|
data = data.get("data", {})
|
|
223
230
|
if not data:
|
bbot/modules/robots.py
CHANGED
|
@@ -33,14 +33,14 @@ class robots(BaseModule):
|
|
|
33
33
|
for l in lines:
|
|
34
34
|
if len(l) > 0:
|
|
35
35
|
split_l = l.split(": ")
|
|
36
|
-
if (split_l[0].lower() == "allow" and self.config.get("include_allow")
|
|
37
|
-
split_l[0].lower() == "disallow" and self.config.get("include_disallow")
|
|
36
|
+
if (split_l[0].lower() == "allow" and self.config.get("include_allow") is True) or (
|
|
37
|
+
split_l[0].lower() == "disallow" and self.config.get("include_disallow") is True
|
|
38
38
|
):
|
|
39
39
|
unverified_url = f"{host}{split_l[1].lstrip('/')}".replace(
|
|
40
40
|
"*", self.helpers.rand_string(4)
|
|
41
41
|
)
|
|
42
42
|
|
|
43
|
-
elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap")
|
|
43
|
+
elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") is True:
|
|
44
44
|
unverified_url = split_l[1]
|
|
45
45
|
else:
|
|
46
46
|
continue
|
bbot/modules/securitytrails.py
CHANGED
|
@@ -15,24 +15,21 @@ class securitytrails(subdomain_enum_apikey):
|
|
|
15
15
|
options_desc = {"api_key": "SecurityTrails API key"}
|
|
16
16
|
|
|
17
17
|
base_url = "https://api.securitytrails.com/v1"
|
|
18
|
+
ping_url = f"{base_url}/ping?apikey={{api_key}}"
|
|
18
19
|
|
|
19
20
|
async def setup(self):
|
|
20
21
|
self.limit = 100
|
|
21
22
|
return await super().setup()
|
|
22
23
|
|
|
23
|
-
async def ping(self):
|
|
24
|
-
url = f"{self.base_url}/ping?apikey={self.api_key}"
|
|
25
|
-
r = await self.request_with_fail_count(url)
|
|
26
|
-
resp_content = getattr(r, "text", "")
|
|
27
|
-
assert getattr(r, "status_code", 0) == 200, resp_content
|
|
28
|
-
|
|
29
24
|
async def request_url(self, query):
|
|
30
|
-
url = f"{self.base_url}/domain/{query}/subdomains?apikey={
|
|
31
|
-
response = await self.
|
|
25
|
+
url = f"{self.base_url}/domain/{query}/subdomains?apikey={{api_key}}"
|
|
26
|
+
response = await self.api_request(url)
|
|
32
27
|
return response
|
|
33
28
|
|
|
34
|
-
def parse_results(self, r, query):
|
|
29
|
+
async def parse_results(self, r, query):
|
|
30
|
+
results = set()
|
|
35
31
|
j = r.json()
|
|
36
32
|
if isinstance(j, dict):
|
|
37
33
|
for host in j.get("subdomains", []):
|
|
38
|
-
|
|
34
|
+
results.add(f"{host}.{query}")
|
|
35
|
+
return results
|
bbot/modules/securitytxt.py
CHANGED
|
@@ -121,7 +121,7 @@ class securitytxt(BaseModule):
|
|
|
121
121
|
start, end = match.span()
|
|
122
122
|
found_url = v[start:end]
|
|
123
123
|
|
|
124
|
-
if found_url != url and self._urls
|
|
124
|
+
if found_url != url and self._urls is True:
|
|
125
125
|
await self.emit_event(found_url, "URL_UNVERIFIED", parent=event, tags=tags)
|
|
126
126
|
|
|
127
127
|
|
bbot/modules/shodan_dns.py
CHANGED
|
@@ -16,13 +16,11 @@ class shodan_dns(shodan):
|
|
|
16
16
|
|
|
17
17
|
base_url = "https://api.shodan.io"
|
|
18
18
|
|
|
19
|
-
async def
|
|
20
|
-
|
|
21
|
-
response = await self.request_with_fail_count(url)
|
|
22
|
-
return response
|
|
19
|
+
async def handle_event(self, event):
|
|
20
|
+
await self.handle_event_paginated(event)
|
|
23
21
|
|
|
24
|
-
def
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
22
|
+
def make_url(self, query):
|
|
23
|
+
return f"{self.base_url}/dns/domain/{self.helpers.quote(query)}?key={{api_key}}&page={{page}}"
|
|
24
|
+
|
|
25
|
+
async def parse_results(self, json, query):
|
|
26
|
+
return [f"{sub}.{query}" for sub in json.get("subdomains", [])]
|
bbot/modules/sitedossier.py
CHANGED
bbot/modules/skymem.py
CHANGED
|
@@ -24,7 +24,7 @@ class skymem(emailformat):
|
|
|
24
24
|
_, query = self.helpers.split_domain(event.data)
|
|
25
25
|
# get first page
|
|
26
26
|
url = f"{self.base_url}/srch?q={self.helpers.quote(query)}"
|
|
27
|
-
r = await self.
|
|
27
|
+
r = await self.api_request(url)
|
|
28
28
|
if not r:
|
|
29
29
|
return
|
|
30
30
|
responses = [r]
|
|
@@ -34,7 +34,7 @@ class skymem(emailformat):
|
|
|
34
34
|
if domain_ids:
|
|
35
35
|
domain_id = domain_ids[0]
|
|
36
36
|
for page in range(2, 22):
|
|
37
|
-
r2 = await self.
|
|
37
|
+
r2 = await self.api_request(f"{self.base_url}/domain/{domain_id}?p={page}")
|
|
38
38
|
if not r2:
|
|
39
39
|
continue
|
|
40
40
|
responses.append(r2)
|
bbot/modules/social.py
CHANGED
|
@@ -25,6 +25,7 @@ class social(BaseModule):
|
|
|
25
25
|
"discord": (r"discord.gg/([a-zA-Z0-9_-]+)", True),
|
|
26
26
|
"docker": (r"hub.docker.com/[ru]/([a-zA-Z0-9_-]+)", False),
|
|
27
27
|
"huggingface": (r"huggingface.co/([a-zA-Z0-9_-]+)", False),
|
|
28
|
+
"postman": (r"www.postman.com/([a-zA-Z0-9_-]+)", False),
|
|
28
29
|
}
|
|
29
30
|
|
|
30
31
|
scope_distance_modifier = 1
|
|
@@ -44,7 +45,7 @@ class social(BaseModule):
|
|
|
44
45
|
url = f"https://{url}"
|
|
45
46
|
event_data = {"platform": platform, "url": url, "profile_name": profile_name}
|
|
46
47
|
# only emit if the same event isn't already in the parent chain
|
|
47
|
-
if not any(
|
|
48
|
+
if not any(e.type == "SOCIAL" and e.data == event_data for e in event.get_parents()):
|
|
48
49
|
social_event = self.make_event(
|
|
49
50
|
event_data,
|
|
50
51
|
"SOCIAL",
|
bbot/modules/subdomaincenter.py
CHANGED
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import asyncio
|
|
3
|
+
|
|
4
|
+
from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SubdomainRadar(subdomain_enum_apikey):
|
|
8
|
+
watched_events = ["DNS_NAME"]
|
|
9
|
+
produced_events = ["DNS_NAME"]
|
|
10
|
+
flags = ["subdomain-enum", "passive", "safe"]
|
|
11
|
+
meta = {
|
|
12
|
+
"description": "Query the Subdomain API for subdomains",
|
|
13
|
+
"created_date": "2022-07-08",
|
|
14
|
+
"author": "@TheTechromancer",
|
|
15
|
+
"auth_required": True,
|
|
16
|
+
}
|
|
17
|
+
options = {"api_key": "", "group": "fast", "timeout": 120}
|
|
18
|
+
options_desc = {
|
|
19
|
+
"api_key": "SubDomainRadar.io API key",
|
|
20
|
+
"group": "The enumeration group to use. Choose from fast, medium, deep",
|
|
21
|
+
"timeout": "Timeout in seconds",
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
base_url = "https://api.subdomainradar.io"
|
|
25
|
+
ping_url = f"{base_url}/profile"
|
|
26
|
+
group_choices = ("fast", "medium", "deep")
|
|
27
|
+
|
|
28
|
+
# set this really high so the poll loop finishes as soon as possible
|
|
29
|
+
_qsize = 9999999
|
|
30
|
+
|
|
31
|
+
async def setup(self):
|
|
32
|
+
self.group = self.config.get("group", "fast").strip().lower()
|
|
33
|
+
self.timeout = self.config.get("timeout", 120)
|
|
34
|
+
if self.group not in self.group_choices:
|
|
35
|
+
return False, f'Invalid group: "{self.group}", please choose from {",".join(self.group_choices)}'
|
|
36
|
+
success, reason = await self.require_api_key()
|
|
37
|
+
if not success:
|
|
38
|
+
return success, reason
|
|
39
|
+
# convert groups to enumerators
|
|
40
|
+
enumerators = {}
|
|
41
|
+
response = await self.api_request(f"{self.base_url}/enumerators/groups")
|
|
42
|
+
status_code = getattr(response, "status_code", 0)
|
|
43
|
+
if status_code != 200:
|
|
44
|
+
return False, f"Failed to get enumerators: (HTTP status code: {status_code})"
|
|
45
|
+
else:
|
|
46
|
+
try:
|
|
47
|
+
j = response.json()
|
|
48
|
+
except Exception:
|
|
49
|
+
return False, "Failed to get enumerators: failed to parse response as JSON"
|
|
50
|
+
for group in j:
|
|
51
|
+
group_name = group.get("name", "").strip().lower()
|
|
52
|
+
if group_name:
|
|
53
|
+
group_enumerators = []
|
|
54
|
+
for enumerator in group.get("enumerators", []):
|
|
55
|
+
enumerator_name = enumerator.get("display_name", "")
|
|
56
|
+
if enumerator_name:
|
|
57
|
+
group_enumerators.append(enumerator_name)
|
|
58
|
+
if group_enumerators:
|
|
59
|
+
enumerators[group_name] = group_enumerators
|
|
60
|
+
|
|
61
|
+
self.enumerators = enumerators.get(self.group, [])
|
|
62
|
+
if not self.enumerators:
|
|
63
|
+
return False, f'No enumerators found for group: "{self.group}" ({self.enumerators})'
|
|
64
|
+
|
|
65
|
+
self.enum_tasks = {}
|
|
66
|
+
self.poll_task = asyncio.create_task(self.task_poll_loop())
|
|
67
|
+
|
|
68
|
+
return True
|
|
69
|
+
|
|
70
|
+
def prepare_api_request(self, url, kwargs):
|
|
71
|
+
if self.api_key:
|
|
72
|
+
kwargs["headers"] = {"Authorization": f"Bearer {self.api_key}"}
|
|
73
|
+
return url, kwargs
|
|
74
|
+
|
|
75
|
+
async def handle_event(self, event):
|
|
76
|
+
query = self.make_query(event)
|
|
77
|
+
# start enumeration task
|
|
78
|
+
url = f"{self.base_url}/enumerate"
|
|
79
|
+
response = await self.api_request(
|
|
80
|
+
url, method="POST", json={"domains": [query], "enumerators": self.enumerators}
|
|
81
|
+
)
|
|
82
|
+
try:
|
|
83
|
+
j = response.json()
|
|
84
|
+
except Exception:
|
|
85
|
+
self.warning(f"Failed to parse response as JSON: {getattr(response, 'text', '')}")
|
|
86
|
+
return
|
|
87
|
+
task_id = j.get("tasks", {}).get(query, "")
|
|
88
|
+
if not task_id:
|
|
89
|
+
self.warning(f"Failed to start enumeration for {query}")
|
|
90
|
+
return
|
|
91
|
+
self.enum_tasks[query] = (task_id, time.time(), event)
|
|
92
|
+
self.debug(f"Started enumeration task for {query}; task id: {task_id}")
|
|
93
|
+
|
|
94
|
+
async def task_poll_loop(self):
|
|
95
|
+
# async with self._task_counter.count(f"{self.name}.task_poll_loop()"):
|
|
96
|
+
while 1:
|
|
97
|
+
for query, (task_id, start_time, event) in list(self.enum_tasks.items()):
|
|
98
|
+
url = f"{self.base_url}/tasks/{task_id}"
|
|
99
|
+
response = await self.api_request(url)
|
|
100
|
+
if getattr(response, "status_code", 0) == 200:
|
|
101
|
+
finished = await self.parse_response(response, query, event)
|
|
102
|
+
if finished:
|
|
103
|
+
self.enum_tasks.pop(query)
|
|
104
|
+
continue
|
|
105
|
+
# if scan is finishing, consider timeout
|
|
106
|
+
if self.scan.status == "FINISHING":
|
|
107
|
+
if start_time + self.timeout < time.time():
|
|
108
|
+
self.enum_tasks.pop(query)
|
|
109
|
+
self.info(f"Enumeration task for {query} timed out")
|
|
110
|
+
|
|
111
|
+
if self.scan.status == "FINISHING" and not self.enum_tasks:
|
|
112
|
+
break
|
|
113
|
+
await self.helpers.sleep(5)
|
|
114
|
+
|
|
115
|
+
async def parse_response(self, response, query, event):
|
|
116
|
+
j = response.json()
|
|
117
|
+
status = j.get("status", "")
|
|
118
|
+
if status.lower() == "completed":
|
|
119
|
+
for subdomain in j.get("subdomains", []):
|
|
120
|
+
hostname = subdomain.get("subdomain", "")
|
|
121
|
+
if hostname and hostname.endswith(f".{query}") and not hostname == event.data:
|
|
122
|
+
await self.emit_event(
|
|
123
|
+
hostname,
|
|
124
|
+
"DNS_NAME",
|
|
125
|
+
event,
|
|
126
|
+
abort_if=self.abort_if,
|
|
127
|
+
context=f'{{module}} searched SubDomainRadar.io API for "{query}" and found {{event.type}}: {{event.data}}',
|
|
128
|
+
)
|
|
129
|
+
return True
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
async def finish(self):
|
|
133
|
+
start_time = time.time()
|
|
134
|
+
while self.enum_tasks and not self.poll_task.done():
|
|
135
|
+
elapsed_time = time.time() - start_time
|
|
136
|
+
if elapsed_time >= self.timeout:
|
|
137
|
+
self.warning(f"Timed out waiting for the following tasks to finish: {self.enum_tasks}")
|
|
138
|
+
for query, (task_id, _, _) in list(self.enum_tasks.items()):
|
|
139
|
+
url = f"{self.base_url}/tasks/{task_id}"
|
|
140
|
+
self.warning(f" - {query} ({url})")
|
|
141
|
+
break
|
|
142
|
+
|
|
143
|
+
self.verbose(
|
|
144
|
+
f"Waiting for enumeration task poll loop to finish ({int(elapsed_time)}/{self.timeout} seconds)"
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
# Wait for the task to complete or for 10 seconds, whichever comes first
|
|
149
|
+
await asyncio.wait_for(asyncio.shield(self.poll_task), timeout=10)
|
|
150
|
+
except asyncio.TimeoutError:
|
|
151
|
+
# This just means our 10-second check has elapsed, not that the task failed
|
|
152
|
+
pass
|
|
153
|
+
|
|
154
|
+
# Cancel the poll_task if it's still running
|
|
155
|
+
if not self.poll_task.done():
|
|
156
|
+
self.poll_task.cancel()
|
|
157
|
+
try:
|
|
158
|
+
await self.poll_task
|
|
159
|
+
except asyncio.CancelledError:
|
|
160
|
+
pass
|