bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5397rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +3 -7
- bbot/core/config/files.py +0 -1
- bbot/core/config/logger.py +34 -4
- bbot/core/core.py +21 -4
- bbot/core/engine.py +9 -8
- bbot/core/event/base.py +131 -52
- bbot/core/helpers/bloom.py +10 -3
- bbot/core/helpers/command.py +8 -7
- bbot/core/helpers/depsinstaller/installer.py +31 -13
- bbot/core/helpers/diff.py +10 -10
- bbot/core/helpers/dns/brute.py +7 -4
- bbot/core/helpers/dns/dns.py +1 -2
- bbot/core/helpers/dns/engine.py +4 -6
- bbot/core/helpers/dns/helpers.py +2 -2
- bbot/core/helpers/dns/mock.py +0 -1
- bbot/core/helpers/files.py +1 -1
- bbot/core/helpers/helper.py +7 -4
- bbot/core/helpers/interactsh.py +3 -3
- bbot/core/helpers/libmagic.py +65 -0
- bbot/core/helpers/misc.py +65 -22
- bbot/core/helpers/names_generator.py +17 -3
- bbot/core/helpers/process.py +0 -20
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +12 -6
- bbot/core/helpers/validators.py +1 -2
- bbot/core/helpers/web/client.py +1 -1
- bbot/core/helpers/web/engine.py +1 -2
- bbot/core/helpers/web/web.py +4 -114
- bbot/core/helpers/wordcloud.py +5 -5
- bbot/core/modules.py +36 -27
- bbot/core/multiprocess.py +58 -0
- bbot/core/shared_deps.py +46 -3
- bbot/db/sql/models.py +147 -0
- bbot/defaults.yml +11 -9
- bbot/modules/anubisdb.py +2 -2
- bbot/modules/apkpure.py +63 -0
- bbot/modules/azure_tenant.py +2 -2
- bbot/modules/baddns.py +35 -19
- bbot/modules/baddns_direct.py +92 -0
- bbot/modules/baddns_zone.py +3 -8
- bbot/modules/badsecrets.py +4 -3
- bbot/modules/base.py +195 -51
- bbot/modules/bevigil.py +7 -7
- bbot/modules/binaryedge.py +7 -4
- bbot/modules/bufferoverrun.py +47 -0
- bbot/modules/builtwith.py +6 -10
- bbot/modules/bypass403.py +5 -5
- bbot/modules/c99.py +10 -7
- bbot/modules/censys.py +9 -13
- bbot/modules/certspotter.py +5 -3
- bbot/modules/chaos.py +9 -7
- bbot/modules/code_repository.py +1 -0
- bbot/modules/columbus.py +3 -3
- bbot/modules/crt.py +5 -3
- bbot/modules/deadly/dastardly.py +1 -1
- bbot/modules/deadly/ffuf.py +9 -9
- bbot/modules/deadly/nuclei.py +3 -3
- bbot/modules/deadly/vhost.py +4 -3
- bbot/modules/dehashed.py +1 -1
- bbot/modules/digitorus.py +1 -1
- bbot/modules/dnsbimi.py +145 -0
- bbot/modules/dnscaa.py +3 -3
- bbot/modules/dnsdumpster.py +4 -4
- bbot/modules/dnstlsrpt.py +144 -0
- bbot/modules/docker_pull.py +7 -5
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +18 -19
- bbot/modules/emailformat.py +1 -1
- bbot/modules/extractous.py +122 -0
- bbot/modules/filedownload.py +9 -7
- bbot/modules/fullhunt.py +7 -4
- bbot/modules/generic_ssrf.py +5 -5
- bbot/modules/github_codesearch.py +3 -2
- bbot/modules/github_org.py +4 -4
- bbot/modules/github_workflows.py +4 -4
- bbot/modules/gitlab.py +2 -5
- bbot/modules/google_playstore.py +93 -0
- bbot/modules/gowitness.py +48 -50
- bbot/modules/hackertarget.py +5 -3
- bbot/modules/host_header.py +5 -5
- bbot/modules/httpx.py +1 -4
- bbot/modules/hunterio.py +3 -9
- bbot/modules/iis_shortnames.py +19 -30
- bbot/modules/internal/cloudcheck.py +27 -12
- bbot/modules/internal/dnsresolve.py +22 -20
- bbot/modules/internal/excavate.py +85 -48
- bbot/modules/internal/speculate.py +41 -32
- bbot/modules/internetdb.py +4 -2
- bbot/modules/ip2location.py +3 -5
- bbot/modules/ipneighbor.py +1 -1
- bbot/modules/ipstack.py +3 -8
- bbot/modules/jadx.py +87 -0
- bbot/modules/leakix.py +11 -10
- bbot/modules/myssl.py +2 -2
- bbot/modules/newsletters.py +2 -2
- bbot/modules/otx.py +5 -3
- bbot/modules/output/asset_inventory.py +7 -7
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/csv.py +1 -1
- bbot/modules/output/http.py +20 -14
- bbot/modules/output/mysql.py +51 -0
- bbot/modules/output/neo4j.py +7 -2
- bbot/modules/output/postgres.py +49 -0
- bbot/modules/output/slack.py +0 -1
- bbot/modules/output/sqlite.py +29 -0
- bbot/modules/output/stdout.py +2 -2
- bbot/modules/output/teams.py +107 -6
- bbot/modules/paramminer_headers.py +5 -8
- bbot/modules/passivetotal.py +13 -13
- bbot/modules/portscan.py +32 -6
- bbot/modules/postman.py +50 -126
- bbot/modules/postman_download.py +220 -0
- bbot/modules/rapiddns.py +3 -8
- bbot/modules/report/asn.py +11 -11
- bbot/modules/robots.py +3 -3
- bbot/modules/securitytrails.py +7 -10
- bbot/modules/securitytxt.py +1 -1
- bbot/modules/shodan_dns.py +7 -9
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +2 -2
- bbot/modules/social.py +2 -1
- bbot/modules/subdomaincenter.py +1 -1
- bbot/modules/subdomainradar.py +160 -0
- bbot/modules/telerik.py +8 -8
- bbot/modules/templates/bucket.py +1 -1
- bbot/modules/templates/github.py +22 -14
- bbot/modules/templates/postman.py +21 -0
- bbot/modules/templates/shodan.py +14 -13
- bbot/modules/templates/sql.py +95 -0
- bbot/modules/templates/subdomain_enum.py +51 -16
- bbot/modules/templates/webhook.py +2 -4
- bbot/modules/trickest.py +8 -37
- bbot/modules/trufflehog.py +10 -12
- bbot/modules/url_manipulation.py +3 -3
- bbot/modules/urlscan.py +1 -1
- bbot/modules/viewdns.py +1 -1
- bbot/modules/virustotal.py +8 -30
- bbot/modules/wafw00f.py +1 -1
- bbot/modules/wayback.py +1 -1
- bbot/modules/wpscan.py +17 -11
- bbot/modules/zoomeye.py +11 -6
- bbot/presets/baddns-thorough.yml +12 -0
- bbot/presets/fast.yml +16 -0
- bbot/presets/kitchen-sink.yml +1 -0
- bbot/presets/spider.yml +4 -0
- bbot/presets/subdomain-enum.yml +7 -7
- bbot/scanner/manager.py +5 -16
- bbot/scanner/preset/args.py +44 -26
- bbot/scanner/preset/environ.py +7 -2
- bbot/scanner/preset/path.py +7 -4
- bbot/scanner/preset/preset.py +36 -23
- bbot/scanner/scanner.py +172 -62
- bbot/scanner/target.py +236 -434
- bbot/scripts/docs.py +1 -1
- bbot/test/bbot_fixtures.py +13 -3
- bbot/test/conftest.py +132 -100
- bbot/test/fastapi_test.py +17 -0
- bbot/test/owasp_mastg.apk +0 -0
- bbot/test/run_tests.sh +4 -4
- bbot/test/test.conf +2 -0
- bbot/test/test_step_1/test_bbot_fastapi.py +82 -0
- bbot/test/test_step_1/test_bloom_filter.py +2 -0
- bbot/test/test_step_1/test_cli.py +138 -64
- bbot/test/test_step_1/test_dns.py +62 -25
- bbot/test/test_step_1/test_engine.py +17 -17
- bbot/test/test_step_1/test_events.py +183 -28
- bbot/test/test_step_1/test_helpers.py +64 -28
- bbot/test/test_step_1/test_manager_deduplication.py +1 -1
- bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
- bbot/test/test_step_1/test_modules_basic.py +68 -70
- bbot/test/test_step_1/test_presets.py +184 -96
- bbot/test/test_step_1/test_python_api.py +7 -2
- bbot/test/test_step_1/test_regexes.py +35 -5
- bbot/test/test_step_1/test_scan.py +39 -5
- bbot/test/test_step_1/test_scope.py +4 -3
- bbot/test/test_step_1/test_target.py +243 -145
- bbot/test/test_step_1/test_web.py +14 -8
- bbot/test/test_step_2/module_tests/base.py +15 -7
- bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
- bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
- bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
- bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
- bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
- bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
- bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
- bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
- bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
- bbot/test/test_step_2/module_tests/test_module_excavate.py +17 -37
- bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
- bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
- bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
- bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
- bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
- bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
- bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
- bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
- bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
- bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
- bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
- bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
- bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
- bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
- bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
- bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
- bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
- bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
- bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
- bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/METADATA +48 -18
- bbot-2.3.0.5397rc0.dist-info/RECORD +421 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/WHEEL +1 -1
- bbot/modules/unstructured.py +0 -163
- bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
- bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/LICENSE +0 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from contextlib import suppress
|
|
2
|
+
from sqlmodel import SQLModel
|
|
3
|
+
from sqlalchemy.orm import sessionmaker
|
|
4
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
|
5
|
+
|
|
6
|
+
from bbot.db.sql.models import Event, Scan, Target
|
|
7
|
+
from bbot.modules.output.base import BaseOutputModule
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SQLTemplate(BaseOutputModule):
|
|
11
|
+
meta = {"description": "SQL output module template"}
|
|
12
|
+
options = {
|
|
13
|
+
"database": "bbot",
|
|
14
|
+
"username": "",
|
|
15
|
+
"password": "",
|
|
16
|
+
"host": "127.0.0.1",
|
|
17
|
+
"port": 0,
|
|
18
|
+
}
|
|
19
|
+
options_desc = {
|
|
20
|
+
"database": "The database to use",
|
|
21
|
+
"username": "The username to use to connect to the database",
|
|
22
|
+
"password": "The password to use to connect to the database",
|
|
23
|
+
"host": "The host to use to connect to the database",
|
|
24
|
+
"port": "The port to use to connect to the database",
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
protocol = ""
|
|
28
|
+
|
|
29
|
+
async def setup(self):
|
|
30
|
+
self.database = self.config.get("database", "bbot")
|
|
31
|
+
self.username = self.config.get("username", "")
|
|
32
|
+
self.password = self.config.get("password", "")
|
|
33
|
+
self.host = self.config.get("host", "127.0.0.1")
|
|
34
|
+
self.port = self.config.get("port", 0)
|
|
35
|
+
|
|
36
|
+
await self.init_database()
|
|
37
|
+
return True
|
|
38
|
+
|
|
39
|
+
async def handle_event(self, event):
|
|
40
|
+
event_obj = Event(**event.json()).validated
|
|
41
|
+
|
|
42
|
+
async with self.async_session() as session:
|
|
43
|
+
async with session.begin():
|
|
44
|
+
# insert event
|
|
45
|
+
session.add(event_obj)
|
|
46
|
+
|
|
47
|
+
# if it's a SCAN event, create/update the scan and target
|
|
48
|
+
if event_obj.type == "SCAN":
|
|
49
|
+
event_data = event_obj.get_data()
|
|
50
|
+
if not isinstance(event_data, dict):
|
|
51
|
+
raise ValueError(f"Invalid data for SCAN event: {event_data}")
|
|
52
|
+
scan = Scan(**event_data).validated
|
|
53
|
+
await session.merge(scan) # Insert or update scan
|
|
54
|
+
|
|
55
|
+
target_data = event_data.get("target", {})
|
|
56
|
+
if not isinstance(target_data, dict):
|
|
57
|
+
raise ValueError(f"Invalid target for SCAN event: {target_data}")
|
|
58
|
+
target = Target(**target_data).validated
|
|
59
|
+
await session.merge(target) # Insert or update target
|
|
60
|
+
|
|
61
|
+
await session.commit()
|
|
62
|
+
|
|
63
|
+
async def create_database(self):
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
async def init_database(self):
|
|
67
|
+
await self.create_database()
|
|
68
|
+
|
|
69
|
+
# Now create the engine for the actual database
|
|
70
|
+
self.engine = create_async_engine(self.connection_string())
|
|
71
|
+
# Create a session factory bound to the engine
|
|
72
|
+
self.async_session = sessionmaker(self.engine, expire_on_commit=False, class_=AsyncSession)
|
|
73
|
+
|
|
74
|
+
# Use the engine directly to create all tables
|
|
75
|
+
async with self.engine.begin() as conn:
|
|
76
|
+
await conn.run_sync(SQLModel.metadata.create_all)
|
|
77
|
+
|
|
78
|
+
def connection_string(self, mask_password=False):
|
|
79
|
+
connection_string = f"{self.protocol}://"
|
|
80
|
+
if self.username:
|
|
81
|
+
password = self.password
|
|
82
|
+
if mask_password:
|
|
83
|
+
password = "****"
|
|
84
|
+
connection_string += f"{self.username}:{password}"
|
|
85
|
+
if self.host:
|
|
86
|
+
connection_string += f"@{self.host}"
|
|
87
|
+
if self.port:
|
|
88
|
+
connection_string += f":{self.port}"
|
|
89
|
+
if self.database:
|
|
90
|
+
connection_string += f"/{self.database}"
|
|
91
|
+
return connection_string
|
|
92
|
+
|
|
93
|
+
async def cleanup(self):
|
|
94
|
+
with suppress(Exception):
|
|
95
|
+
await self.engine.dispose()
|
|
@@ -20,8 +20,8 @@ class subdomain_enum(BaseModule):
|
|
|
20
20
|
# whether to reject wildcard DNS_NAMEs
|
|
21
21
|
reject_wildcards = "strict"
|
|
22
22
|
|
|
23
|
-
# set qsize to 10. this helps combat rate limiting by ensuring
|
|
24
|
-
# until the queue
|
|
23
|
+
# set qsize to 10. this helps combat rate limiting by ensuring the next query doesn't execute
|
|
24
|
+
# until the result from the previous queue have been consumed by the scan
|
|
25
25
|
# we don't use 1 because it causes delays due to the asyncio.sleep; 10 gives us reasonable buffer room
|
|
26
26
|
_qsize = 10
|
|
27
27
|
|
|
@@ -31,6 +31,11 @@ class subdomain_enum(BaseModule):
|
|
|
31
31
|
# "lowest_parent": dedupe by lowest parent (lowest parent of www.api.test.evilcorp.com is api.test.evilcorp.com)
|
|
32
32
|
dedup_strategy = "highest_parent"
|
|
33
33
|
|
|
34
|
+
# how many results to request per API call
|
|
35
|
+
page_size = 100
|
|
36
|
+
# arguments to pass to api_page_iter
|
|
37
|
+
api_page_iter_kwargs = {}
|
|
38
|
+
|
|
34
39
|
@property
|
|
35
40
|
def source_pretty_name(self):
|
|
36
41
|
return f"{self.__class__.__name__} API"
|
|
@@ -61,9 +66,30 @@ class subdomain_enum(BaseModule):
|
|
|
61
66
|
context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}',
|
|
62
67
|
)
|
|
63
68
|
|
|
69
|
+
async def handle_event_paginated(self, event):
|
|
70
|
+
query = self.make_query(event)
|
|
71
|
+
async for result_batch in self.query_paginated(query):
|
|
72
|
+
for hostname in set(result_batch):
|
|
73
|
+
try:
|
|
74
|
+
hostname = self.helpers.validators.validate_host(hostname)
|
|
75
|
+
except ValueError as e:
|
|
76
|
+
self.verbose(e)
|
|
77
|
+
continue
|
|
78
|
+
if hostname and hostname.endswith(f".{query}") and not hostname == event.data:
|
|
79
|
+
await self.emit_event(
|
|
80
|
+
hostname,
|
|
81
|
+
"DNS_NAME",
|
|
82
|
+
event,
|
|
83
|
+
abort_if=self.abort_if,
|
|
84
|
+
context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}',
|
|
85
|
+
)
|
|
86
|
+
|
|
64
87
|
async def request_url(self, query):
|
|
65
|
-
url =
|
|
66
|
-
return await self.
|
|
88
|
+
url = self.make_url(query)
|
|
89
|
+
return await self.api_request(url)
|
|
90
|
+
|
|
91
|
+
def make_url(self, query):
|
|
92
|
+
return f"{self.base_url}/subdomains/{self.helpers.quote(query)}"
|
|
67
93
|
|
|
68
94
|
def make_query(self, event):
|
|
69
95
|
query = event.data
|
|
@@ -78,30 +104,26 @@ class subdomain_enum(BaseModule):
|
|
|
78
104
|
if self.scan.in_scope(p):
|
|
79
105
|
query = p
|
|
80
106
|
break
|
|
81
|
-
|
|
82
|
-
return ".".join([s for s in query.split(".") if s != "_wildcard"])
|
|
83
|
-
except Exception:
|
|
84
|
-
self.critical(query)
|
|
85
|
-
raise
|
|
107
|
+
return ".".join([s for s in query.split(".") if s != "_wildcard"])
|
|
86
108
|
|
|
87
|
-
def parse_results(self, r, query=None):
|
|
109
|
+
async def parse_results(self, r, query=None):
|
|
88
110
|
json = r.json()
|
|
89
111
|
if json:
|
|
90
112
|
for hostname in json:
|
|
91
113
|
yield hostname
|
|
92
114
|
|
|
93
|
-
async def query(self, query,
|
|
94
|
-
if parse_fn is None:
|
|
95
|
-
parse_fn = self.parse_results
|
|
115
|
+
async def query(self, query, request_fn=None, parse_fn=None):
|
|
96
116
|
if request_fn is None:
|
|
97
117
|
request_fn = self.request_url
|
|
118
|
+
if parse_fn is None:
|
|
119
|
+
parse_fn = self.parse_results
|
|
98
120
|
try:
|
|
99
121
|
response = await request_fn(query)
|
|
100
122
|
if response is None:
|
|
101
123
|
self.info(f'Query "{query}" failed (no response)')
|
|
102
124
|
return []
|
|
103
125
|
try:
|
|
104
|
-
results = list(parse_fn(response, query))
|
|
126
|
+
results = list(await parse_fn(response, query))
|
|
105
127
|
except Exception as e:
|
|
106
128
|
if response:
|
|
107
129
|
self.info(
|
|
@@ -117,10 +139,23 @@ class subdomain_enum(BaseModule):
|
|
|
117
139
|
except Exception as e:
|
|
118
140
|
self.info(f"Error retrieving results for {query}: {e}", trace=True)
|
|
119
141
|
|
|
142
|
+
async def query_paginated(self, query):
|
|
143
|
+
url = self.make_url(query)
|
|
144
|
+
agen = self.api_page_iter(url, page_size=self.page_size, **self.api_page_iter_kwargs)
|
|
145
|
+
try:
|
|
146
|
+
async for response in agen:
|
|
147
|
+
subdomains = await self.parse_results(response, query)
|
|
148
|
+
self.verbose(f'Got {len(subdomains):,} subdomains for "{query}"')
|
|
149
|
+
if not subdomains:
|
|
150
|
+
break
|
|
151
|
+
yield subdomains
|
|
152
|
+
finally:
|
|
153
|
+
agen.aclose()
|
|
154
|
+
|
|
120
155
|
async def _is_wildcard(self, query):
|
|
121
156
|
rdtypes = ("A", "AAAA", "CNAME")
|
|
122
157
|
if self.helpers.is_dns_name(query):
|
|
123
|
-
for
|
|
158
|
+
for wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).values():
|
|
124
159
|
if any(t in wildcard_rdtypes for t in rdtypes):
|
|
125
160
|
return True
|
|
126
161
|
return False
|
|
@@ -134,7 +169,7 @@ class subdomain_enum(BaseModule):
|
|
|
134
169
|
if any(t.startswith("cloud-") for t in event.tags):
|
|
135
170
|
is_cloud = True
|
|
136
171
|
# reject if it's a cloud resource and not in our target
|
|
137
|
-
if is_cloud and event not in self.scan.target:
|
|
172
|
+
if is_cloud and event not in self.scan.target.whitelist:
|
|
138
173
|
return False, "Event is a cloud resource and not a direct target"
|
|
139
174
|
# optionally reject events with wildcards / errors
|
|
140
175
|
if self.reject_wildcards:
|
|
@@ -9,7 +9,6 @@ class WebhookOutputModule(BaseOutputModule):
|
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
11
|
accept_dupes = False
|
|
12
|
-
good_status_code = 204
|
|
13
12
|
message_size_limit = 2000
|
|
14
13
|
content_key = "content"
|
|
15
14
|
vuln_severities = ["UNKNOWN", "LOW", "MEDIUM", "HIGH", "CRITICAL"]
|
|
@@ -61,7 +60,7 @@ class WebhookOutputModule(BaseOutputModule):
|
|
|
61
60
|
async def filter_event(self, event):
|
|
62
61
|
if event.type == "VULNERABILITY":
|
|
63
62
|
severity = event.data.get("severity", "UNKNOWN")
|
|
64
|
-
if not
|
|
63
|
+
if severity not in self.allowed_severities:
|
|
65
64
|
return False, f"{severity} is below min_severity threshold"
|
|
66
65
|
return True
|
|
67
66
|
|
|
@@ -94,5 +93,4 @@ class WebhookOutputModule(BaseOutputModule):
|
|
|
94
93
|
return msg
|
|
95
94
|
|
|
96
95
|
def evaluate_response(self, response):
|
|
97
|
-
|
|
98
|
-
return status_code == self.good_status_code
|
|
96
|
+
return getattr(response, "is_success", False)
|
bbot/modules/trickest.py
CHANGED
|
@@ -19,53 +19,24 @@ class Trickest(subdomain_enum_apikey):
|
|
|
19
19
|
}
|
|
20
20
|
|
|
21
21
|
base_url = "https://api.trickest.io/solutions/v1/public/solution/a7cba1f1-df07-4a5c-876a-953f178996be"
|
|
22
|
+
ping_url = f"{base_url}/dataset"
|
|
22
23
|
dataset_id = "a0a49ca9-03bb-45e0-aa9a-ad59082ebdfc"
|
|
23
24
|
page_size = 50
|
|
24
25
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
url
|
|
28
|
-
response = await self.helpers.request(url, headers=self.headers)
|
|
29
|
-
status_code = getattr(response, "status_code", 0)
|
|
30
|
-
if status_code != 200:
|
|
31
|
-
response_text = getattr(response, "text", "no response from server")
|
|
32
|
-
return False, response_text
|
|
33
|
-
return True
|
|
26
|
+
def prepare_api_request(self, url, kwargs):
|
|
27
|
+
kwargs["headers"]["Authorization"] = f"Token {self.api_key}"
|
|
28
|
+
return url, kwargs
|
|
34
29
|
|
|
35
30
|
async def handle_event(self, event):
|
|
36
|
-
|
|
37
|
-
async for result_batch in self.query(query):
|
|
38
|
-
for hostname in set(result_batch):
|
|
39
|
-
try:
|
|
40
|
-
hostname = self.helpers.validators.validate_host(hostname)
|
|
41
|
-
except ValueError as e:
|
|
42
|
-
self.verbose(e)
|
|
43
|
-
continue
|
|
44
|
-
if hostname and hostname.endswith(f".{query}") and not hostname == event.data:
|
|
45
|
-
await self.emit_event(
|
|
46
|
-
hostname,
|
|
47
|
-
"DNS_NAME",
|
|
48
|
-
event,
|
|
49
|
-
abort_if=self.abort_if,
|
|
50
|
-
context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}',
|
|
51
|
-
)
|
|
31
|
+
await self.handle_event_paginated(event)
|
|
52
32
|
|
|
53
|
-
|
|
33
|
+
def make_url(self, query):
|
|
54
34
|
url = f"{self.base_url}/view?q=hostname%20~%20%22.{self.helpers.quote(query)}%22"
|
|
55
35
|
url += f"&dataset_id={self.dataset_id}"
|
|
56
36
|
url += "&limit={page_size}&offset={offset}&select=hostname&orderby=hostname"
|
|
57
|
-
|
|
58
|
-
try:
|
|
59
|
-
async for response in agen:
|
|
60
|
-
subdomains = self.parse_results(response)
|
|
61
|
-
self.verbose(f'Got {len(subdomains):,} subdomains for "{query}"')
|
|
62
|
-
if not subdomains:
|
|
63
|
-
break
|
|
64
|
-
yield subdomains
|
|
65
|
-
finally:
|
|
66
|
-
agen.aclose()
|
|
37
|
+
return url
|
|
67
38
|
|
|
68
|
-
def parse_results(self, j):
|
|
39
|
+
async def parse_results(self, j, query):
|
|
69
40
|
results = j.get("results", [])
|
|
70
41
|
subdomains = set()
|
|
71
42
|
for item in results:
|
bbot/modules/trufflehog.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import json
|
|
2
|
-
from pathlib import Path
|
|
3
2
|
from bbot.modules.base import BaseModule
|
|
4
3
|
|
|
5
4
|
|
|
@@ -14,7 +13,7 @@ class trufflehog(BaseModule):
|
|
|
14
13
|
}
|
|
15
14
|
|
|
16
15
|
options = {
|
|
17
|
-
"version": "3.
|
|
16
|
+
"version": "3.84.1",
|
|
18
17
|
"config": "",
|
|
19
18
|
"only_verified": True,
|
|
20
19
|
"concurrency": 8,
|
|
@@ -31,7 +30,7 @@ class trufflehog(BaseModule):
|
|
|
31
30
|
{
|
|
32
31
|
"name": "Download trufflehog",
|
|
33
32
|
"unarchive": {
|
|
34
|
-
"src": "https://github.com/trufflesecurity/trufflehog/releases/download/v#{BBOT_MODULES_TRUFFLEHOG_VERSION}/trufflehog_#{BBOT_MODULES_TRUFFLEHOG_VERSION}_#{
|
|
33
|
+
"src": "https://github.com/trufflesecurity/trufflehog/releases/download/v#{BBOT_MODULES_TRUFFLEHOG_VERSION}/trufflehog_#{BBOT_MODULES_TRUFFLEHOG_VERSION}_#{BBOT_OS_PLATFORM}_#{BBOT_CPU_ARCH}.tar.gz",
|
|
35
34
|
"include": "trufflehog",
|
|
36
35
|
"dest": "#{BBOT_TOOLS}",
|
|
37
36
|
"remote_src": True,
|
|
@@ -52,7 +51,7 @@ class trufflehog(BaseModule):
|
|
|
52
51
|
self.github_token = ""
|
|
53
52
|
if self.deleted_forks:
|
|
54
53
|
self.warning(
|
|
55
|
-
|
|
54
|
+
"Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours."
|
|
56
55
|
)
|
|
57
56
|
for module_name in ("github", "github_codesearch", "github_org", "git_clone"):
|
|
58
57
|
module_config = self.scan.config.get("modules", {}).get(module_name, {})
|
|
@@ -65,7 +64,6 @@ class trufflehog(BaseModule):
|
|
|
65
64
|
if not self.github_token:
|
|
66
65
|
self.deleted_forks = False
|
|
67
66
|
return None, "A github api_key must be provided to the github modules for deleted forks to be scanned"
|
|
68
|
-
self.processed = set()
|
|
69
67
|
return True
|
|
70
68
|
|
|
71
69
|
async def filter_event(self, event):
|
|
@@ -78,12 +76,8 @@ class trufflehog(BaseModule):
|
|
|
78
76
|
else:
|
|
79
77
|
return False, "Deleted forks is not enabled"
|
|
80
78
|
else:
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
processed_path = Path(processed)
|
|
84
|
-
new_path = Path(path)
|
|
85
|
-
if new_path.is_relative_to(processed_path):
|
|
86
|
-
return False, "Parent folder has already been processed"
|
|
79
|
+
if "parsed-folder" in event.tags:
|
|
80
|
+
return False, "Not accepting parsed-folder events"
|
|
87
81
|
return True
|
|
88
82
|
|
|
89
83
|
async def handle_event(self, event):
|
|
@@ -94,11 +88,12 @@ class trufflehog(BaseModule):
|
|
|
94
88
|
module = "github-experimental"
|
|
95
89
|
else:
|
|
96
90
|
path = event.data["path"]
|
|
97
|
-
self.processed.add(path)
|
|
98
91
|
if "git" in event.tags:
|
|
99
92
|
module = "git"
|
|
100
93
|
elif "docker" in event.tags:
|
|
101
94
|
module = "docker"
|
|
95
|
+
elif "postman" in event.tags:
|
|
96
|
+
module = "postman"
|
|
102
97
|
else:
|
|
103
98
|
module = "filesystem"
|
|
104
99
|
if event.type == "CODE_REPOSITORY":
|
|
@@ -164,6 +159,9 @@ class trufflehog(BaseModule):
|
|
|
164
159
|
elif module == "docker":
|
|
165
160
|
command.append("docker")
|
|
166
161
|
command.append("--image=file://" + path)
|
|
162
|
+
elif module == "postman":
|
|
163
|
+
command.append("postman")
|
|
164
|
+
command.append("--workspace-paths=" + path)
|
|
167
165
|
elif module == "filesystem":
|
|
168
166
|
command.append("filesystem")
|
|
169
167
|
command.append(path)
|
bbot/modules/url_manipulation.py
CHANGED
|
@@ -69,11 +69,11 @@ class url_manipulation(BaseModule):
|
|
|
69
69
|
|
|
70
70
|
if subject_response:
|
|
71
71
|
subject_content = "".join([str(x) for x in subject_response.headers])
|
|
72
|
-
if subject_response.text
|
|
72
|
+
if subject_response.text is not None:
|
|
73
73
|
subject_content += subject_response.text
|
|
74
74
|
|
|
75
75
|
if self.rand_string not in subject_content:
|
|
76
|
-
if match
|
|
76
|
+
if match is False:
|
|
77
77
|
if str(subject_response.status_code).startswith("2"):
|
|
78
78
|
if "body" in reasons:
|
|
79
79
|
reported_signature = f"Modified URL: {sig[1]}"
|
|
@@ -98,7 +98,7 @@ class url_manipulation(BaseModule):
|
|
|
98
98
|
return False
|
|
99
99
|
|
|
100
100
|
def format_signature(self, sig, event):
|
|
101
|
-
if sig[2]
|
|
101
|
+
if sig[2] is True:
|
|
102
102
|
cleaned_path = event.parsed_url.path.strip("/")
|
|
103
103
|
else:
|
|
104
104
|
cleaned_path = event.parsed_url.path.lstrip("/")
|
bbot/modules/urlscan.py
CHANGED
bbot/modules/viewdns.py
CHANGED
|
@@ -48,7 +48,7 @@ class viewdns(BaseModule):
|
|
|
48
48
|
|
|
49
49
|
html = self.helpers.beautifulsoup(content, "html.parser")
|
|
50
50
|
if html is False:
|
|
51
|
-
self.debug(
|
|
51
|
+
self.debug("BeautifulSoup returned False")
|
|
52
52
|
return results
|
|
53
53
|
found = set()
|
|
54
54
|
for table_row in html.findAll("tr"):
|
bbot/modules/virustotal.py
CHANGED
|
@@ -15,37 +15,15 @@ class virustotal(subdomain_enum_apikey):
|
|
|
15
15
|
options_desc = {"api_key": "VirusTotal API Key"}
|
|
16
16
|
|
|
17
17
|
base_url = "https://www.virustotal.com/api/v3"
|
|
18
|
+
api_page_iter_kwargs = {"json": False, "next_key": lambda r: r.json().get("links", {}).get("next", "")}
|
|
18
19
|
|
|
19
|
-
|
|
20
|
-
self.
|
|
21
|
-
self.headers = {"x-apikey": self.api_key}
|
|
22
|
-
return await super().setup()
|
|
20
|
+
def make_url(self, query):
|
|
21
|
+
return f"{self.base_url}/domains/{self.helpers.quote(query)}/subdomains"
|
|
23
22
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
return
|
|
23
|
+
def prepare_api_request(self, url, kwargs):
|
|
24
|
+
kwargs["headers"]["x-apikey"] = self.api_key
|
|
25
|
+
return url, kwargs
|
|
27
26
|
|
|
28
|
-
def parse_results(self, r, query):
|
|
29
|
-
results = set()
|
|
27
|
+
async def parse_results(self, r, query):
|
|
30
28
|
text = getattr(r, "text", "")
|
|
31
|
-
|
|
32
|
-
match = match.lower()
|
|
33
|
-
if match.endswith(query):
|
|
34
|
-
results.add(match)
|
|
35
|
-
return results
|
|
36
|
-
|
|
37
|
-
async def query(self, query):
|
|
38
|
-
results = set()
|
|
39
|
-
url = f"{self.base_url}/domains/{self.helpers.quote(query)}/subdomains"
|
|
40
|
-
agen = self.helpers.api_page_iter(
|
|
41
|
-
url, json=False, headers=self.headers, next_key=lambda r: r.json().get("links", {}).get("next", "")
|
|
42
|
-
)
|
|
43
|
-
try:
|
|
44
|
-
async for response in agen:
|
|
45
|
-
r = self.parse_results(response, query)
|
|
46
|
-
if not r:
|
|
47
|
-
break
|
|
48
|
-
results.update(r)
|
|
49
|
-
finally:
|
|
50
|
-
agen.aclose()
|
|
51
|
-
return results
|
|
29
|
+
return await self.scan.extract_in_scope_hostnames(text)
|
bbot/modules/wafw00f.py
CHANGED
|
@@ -52,7 +52,7 @@ class wafw00f(BaseModule):
|
|
|
52
52
|
context=f"{{module}} scanned {url} and identified {{event.type}}: {waf}",
|
|
53
53
|
)
|
|
54
54
|
else:
|
|
55
|
-
if self.config.get("generic_detect")
|
|
55
|
+
if self.config.get("generic_detect") is True:
|
|
56
56
|
generic = await self.helpers.run_in_executor(WW.genericdetect)
|
|
57
57
|
if generic:
|
|
58
58
|
waf = "generic detection"
|
bbot/modules/wayback.py
CHANGED
|
@@ -10,7 +10,7 @@ class wayback(subdomain_enum):
|
|
|
10
10
|
meta = {
|
|
11
11
|
"description": "Query archive.org's API for subdomains",
|
|
12
12
|
"created_date": "2022-04-01",
|
|
13
|
-
"author": "@
|
|
13
|
+
"author": "@liquidsec",
|
|
14
14
|
}
|
|
15
15
|
options = {"urls": False, "garbage_threshold": 10}
|
|
16
16
|
options_desc = {
|
bbot/modules/wpscan.py
CHANGED
|
@@ -14,26 +14,26 @@ class wpscan(BaseModule):
|
|
|
14
14
|
|
|
15
15
|
options = {
|
|
16
16
|
"api_key": "",
|
|
17
|
-
"enumerate": "vp,vt,
|
|
17
|
+
"enumerate": "vp,vt,cb,dbe",
|
|
18
18
|
"threads": 5,
|
|
19
|
-
"request_timeout":
|
|
20
|
-
"connection_timeout":
|
|
19
|
+
"request_timeout": 5,
|
|
20
|
+
"connection_timeout": 2,
|
|
21
21
|
"disable_tls_checks": True,
|
|
22
22
|
"force": False,
|
|
23
23
|
}
|
|
24
24
|
options_desc = {
|
|
25
25
|
"api_key": "WPScan API Key",
|
|
26
|
-
"enumerate": "Enumeration Process see wpscan help documentation (default: vp,vt,
|
|
26
|
+
"enumerate": "Enumeration Process see wpscan help documentation (default: vp,vt,cb,dbe)",
|
|
27
27
|
"threads": "How many wpscan threads to spawn (default is 5)",
|
|
28
|
-
"request_timeout": "The request timeout in seconds (default
|
|
29
|
-
"connection_timeout": "The connection timeout in seconds (default
|
|
28
|
+
"request_timeout": "The request timeout in seconds (default 5)",
|
|
29
|
+
"connection_timeout": "The connection timeout in seconds (default 2)",
|
|
30
30
|
"disable_tls_checks": "Disables the SSL/TLS certificate verification (Default True)",
|
|
31
31
|
"force": "Do not check if the target is running WordPress or returns a 403",
|
|
32
32
|
}
|
|
33
33
|
deps_apt = ["curl", "make", "gcc"]
|
|
34
34
|
deps_ansible = [
|
|
35
35
|
{
|
|
36
|
-
"name": "Install Ruby Deps (Debian
|
|
36
|
+
"name": "Install Ruby Deps (Debian)",
|
|
37
37
|
"package": {"name": ["ruby-rubygems", "ruby-dev"], "state": "present"},
|
|
38
38
|
"become": True,
|
|
39
39
|
"when": "ansible_facts['os_family'] == 'Debian'",
|
|
@@ -48,7 +48,13 @@ class wpscan(BaseModule):
|
|
|
48
48
|
"name": "Install Ruby Deps (Fedora)",
|
|
49
49
|
"package": {"name": ["rubygems", "ruby-devel"], "state": "present"},
|
|
50
50
|
"become": True,
|
|
51
|
-
"when": "ansible_facts['os_family'] == '
|
|
51
|
+
"when": "ansible_facts['os_family'] == 'RedHat'",
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
"name": "Install Ruby Deps (Alpine)",
|
|
55
|
+
"package": {"name": ["ruby-dev", "ruby-bundler"], "state": "present"},
|
|
56
|
+
"become": True,
|
|
57
|
+
"when": "ansible_facts['os_family'] == 'Alpine'",
|
|
52
58
|
},
|
|
53
59
|
{
|
|
54
60
|
"name": "Install wpscan gem",
|
|
@@ -61,11 +67,11 @@ class wpscan(BaseModule):
|
|
|
61
67
|
self.processed = set()
|
|
62
68
|
self.ignore_events = ["xmlrpc", "readme"]
|
|
63
69
|
self.api_key = self.config.get("api_key", "")
|
|
64
|
-
self.enumerate = self.config.get("enumerate", "vp,vt,
|
|
70
|
+
self.enumerate = self.config.get("enumerate", "vp,vt,cb,dbe")
|
|
65
71
|
self.proxy = self.scan.web_config.get("http_proxy", "")
|
|
66
72
|
self.threads = self.config.get("threads", 5)
|
|
67
|
-
self.request_timeout = self.config.get("request_timeout",
|
|
68
|
-
self.connection_timeout = self.config.get("connection_timeout",
|
|
73
|
+
self.request_timeout = self.config.get("request_timeout", 5)
|
|
74
|
+
self.connection_timeout = self.config.get("connection_timeout", 2)
|
|
69
75
|
self.disable_tls_checks = self.config.get("disable_tls_checks", True)
|
|
70
76
|
self.force = self.config.get("force", False)
|
|
71
77
|
return True
|
bbot/modules/zoomeye.py
CHANGED
|
@@ -22,13 +22,16 @@ class zoomeye(subdomain_enum_apikey):
|
|
|
22
22
|
|
|
23
23
|
async def setup(self):
|
|
24
24
|
self.max_pages = self.config.get("max_pages", 20)
|
|
25
|
-
self.headers = {"API-KEY": self.config.get("api_key", "")}
|
|
26
25
|
self.include_related = self.config.get("include_related", False)
|
|
27
26
|
return await super().setup()
|
|
28
27
|
|
|
28
|
+
def prepare_api_request(self, url, kwargs):
|
|
29
|
+
kwargs["headers"]["API-KEY"] = self.api_key
|
|
30
|
+
return url, kwargs
|
|
31
|
+
|
|
29
32
|
async def ping(self):
|
|
30
33
|
url = f"{self.base_url}/resources-info"
|
|
31
|
-
r = await self.
|
|
34
|
+
r = await self.api_request(url)
|
|
32
35
|
assert int(r.json()["quota_info"]["remain_total_quota"]) > 0, "No quota remaining"
|
|
33
36
|
|
|
34
37
|
async def handle_event(self, event):
|
|
@@ -54,10 +57,10 @@ class zoomeye(subdomain_enum_apikey):
|
|
|
54
57
|
query_type = 0 if self.include_related else 1
|
|
55
58
|
url = f"{self.base_url}/domain/search?q={self.helpers.quote(query)}&type={query_type}&page=" + "{page}"
|
|
56
59
|
i = 0
|
|
57
|
-
agen = self.
|
|
60
|
+
agen = self.api_page_iter(url)
|
|
58
61
|
try:
|
|
59
62
|
async for j in agen:
|
|
60
|
-
r = list(self.parse_results(j))
|
|
63
|
+
r = list(await self.parse_results(j))
|
|
61
64
|
if r:
|
|
62
65
|
results.update(set(r))
|
|
63
66
|
if not r or i >= (self.max_pages - 1):
|
|
@@ -67,6 +70,8 @@ class zoomeye(subdomain_enum_apikey):
|
|
|
67
70
|
agen.aclose()
|
|
68
71
|
return results
|
|
69
72
|
|
|
70
|
-
def parse_results(self, r):
|
|
73
|
+
async def parse_results(self, r):
|
|
74
|
+
results = set()
|
|
71
75
|
for entry in r.get("list", []):
|
|
72
|
-
|
|
76
|
+
results.add(entry["name"])
|
|
77
|
+
return results
|
bbot/presets/fast.yml
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
description: Scan only the provided targets as fast as possible - no extra discovery
|
|
2
|
+
|
|
3
|
+
exclude_modules:
|
|
4
|
+
- excavate
|
|
5
|
+
|
|
6
|
+
config:
|
|
7
|
+
# only scan the exact targets specified
|
|
8
|
+
scope:
|
|
9
|
+
strict: true
|
|
10
|
+
# speed up dns resolution by doing A/AAAA only - not MX/NS/SRV/etc
|
|
11
|
+
dns:
|
|
12
|
+
minimal: true
|
|
13
|
+
# essential speculation only
|
|
14
|
+
modules:
|
|
15
|
+
speculate:
|
|
16
|
+
essential_only: true
|
bbot/presets/kitchen-sink.yml
CHANGED