bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5401rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +3 -7
- bbot/core/config/files.py +0 -1
- bbot/core/config/logger.py +34 -4
- bbot/core/core.py +21 -4
- bbot/core/engine.py +9 -8
- bbot/core/event/base.py +131 -52
- bbot/core/helpers/bloom.py +10 -3
- bbot/core/helpers/command.py +8 -7
- bbot/core/helpers/depsinstaller/installer.py +31 -13
- bbot/core/helpers/diff.py +10 -10
- bbot/core/helpers/dns/brute.py +7 -4
- bbot/core/helpers/dns/dns.py +1 -2
- bbot/core/helpers/dns/engine.py +4 -6
- bbot/core/helpers/dns/helpers.py +2 -2
- bbot/core/helpers/dns/mock.py +0 -1
- bbot/core/helpers/files.py +1 -1
- bbot/core/helpers/helper.py +7 -4
- bbot/core/helpers/interactsh.py +3 -3
- bbot/core/helpers/libmagic.py +65 -0
- bbot/core/helpers/misc.py +65 -22
- bbot/core/helpers/names_generator.py +17 -3
- bbot/core/helpers/process.py +0 -20
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +12 -6
- bbot/core/helpers/validators.py +1 -2
- bbot/core/helpers/web/client.py +1 -1
- bbot/core/helpers/web/engine.py +1 -2
- bbot/core/helpers/web/web.py +4 -114
- bbot/core/helpers/wordcloud.py +5 -5
- bbot/core/modules.py +36 -27
- bbot/core/multiprocess.py +58 -0
- bbot/core/shared_deps.py +46 -3
- bbot/db/sql/models.py +147 -0
- bbot/defaults.yml +12 -10
- bbot/modules/anubisdb.py +2 -2
- bbot/modules/apkpure.py +63 -0
- bbot/modules/azure_tenant.py +2 -2
- bbot/modules/baddns.py +35 -19
- bbot/modules/baddns_direct.py +92 -0
- bbot/modules/baddns_zone.py +3 -8
- bbot/modules/badsecrets.py +4 -3
- bbot/modules/base.py +195 -51
- bbot/modules/bevigil.py +7 -7
- bbot/modules/binaryedge.py +7 -4
- bbot/modules/bufferoverrun.py +47 -0
- bbot/modules/builtwith.py +6 -10
- bbot/modules/bypass403.py +5 -5
- bbot/modules/c99.py +10 -7
- bbot/modules/censys.py +9 -13
- bbot/modules/certspotter.py +5 -3
- bbot/modules/chaos.py +9 -7
- bbot/modules/code_repository.py +1 -0
- bbot/modules/columbus.py +3 -3
- bbot/modules/crt.py +5 -3
- bbot/modules/deadly/dastardly.py +1 -1
- bbot/modules/deadly/ffuf.py +9 -9
- bbot/modules/deadly/nuclei.py +3 -3
- bbot/modules/deadly/vhost.py +4 -3
- bbot/modules/dehashed.py +1 -1
- bbot/modules/digitorus.py +1 -1
- bbot/modules/dnsbimi.py +145 -0
- bbot/modules/dnscaa.py +3 -3
- bbot/modules/dnsdumpster.py +4 -4
- bbot/modules/dnstlsrpt.py +144 -0
- bbot/modules/docker_pull.py +7 -5
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +20 -21
- bbot/modules/emailformat.py +1 -1
- bbot/modules/extractous.py +122 -0
- bbot/modules/filedownload.py +9 -7
- bbot/modules/fullhunt.py +7 -4
- bbot/modules/generic_ssrf.py +5 -5
- bbot/modules/github_codesearch.py +3 -2
- bbot/modules/github_org.py +4 -4
- bbot/modules/github_workflows.py +4 -4
- bbot/modules/gitlab.py +2 -5
- bbot/modules/google_playstore.py +93 -0
- bbot/modules/gowitness.py +48 -50
- bbot/modules/hackertarget.py +5 -3
- bbot/modules/host_header.py +5 -5
- bbot/modules/httpx.py +1 -4
- bbot/modules/hunterio.py +3 -9
- bbot/modules/iis_shortnames.py +19 -30
- bbot/modules/internal/cloudcheck.py +29 -12
- bbot/modules/internal/dnsresolve.py +22 -22
- bbot/modules/internal/excavate.py +97 -59
- bbot/modules/internal/speculate.py +41 -32
- bbot/modules/internetdb.py +4 -2
- bbot/modules/ip2location.py +3 -5
- bbot/modules/ipneighbor.py +1 -1
- bbot/modules/ipstack.py +3 -8
- bbot/modules/jadx.py +87 -0
- bbot/modules/leakix.py +11 -10
- bbot/modules/myssl.py +2 -2
- bbot/modules/newsletters.py +2 -2
- bbot/modules/otx.py +5 -3
- bbot/modules/output/asset_inventory.py +7 -7
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/csv.py +1 -1
- bbot/modules/output/http.py +20 -14
- bbot/modules/output/mysql.py +51 -0
- bbot/modules/output/neo4j.py +7 -2
- bbot/modules/output/postgres.py +49 -0
- bbot/modules/output/slack.py +0 -1
- bbot/modules/output/sqlite.py +29 -0
- bbot/modules/output/stdout.py +2 -2
- bbot/modules/output/teams.py +107 -6
- bbot/modules/paramminer_headers.py +8 -11
- bbot/modules/passivetotal.py +13 -13
- bbot/modules/portscan.py +32 -6
- bbot/modules/postman.py +50 -126
- bbot/modules/postman_download.py +220 -0
- bbot/modules/rapiddns.py +3 -8
- bbot/modules/report/asn.py +18 -11
- bbot/modules/robots.py +3 -3
- bbot/modules/securitytrails.py +7 -10
- bbot/modules/securitytxt.py +1 -1
- bbot/modules/shodan_dns.py +7 -9
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +2 -2
- bbot/modules/social.py +2 -1
- bbot/modules/subdomaincenter.py +1 -1
- bbot/modules/subdomainradar.py +160 -0
- bbot/modules/telerik.py +8 -8
- bbot/modules/templates/bucket.py +1 -1
- bbot/modules/templates/github.py +22 -14
- bbot/modules/templates/postman.py +21 -0
- bbot/modules/templates/shodan.py +14 -13
- bbot/modules/templates/sql.py +95 -0
- bbot/modules/templates/subdomain_enum.py +51 -16
- bbot/modules/templates/webhook.py +2 -4
- bbot/modules/trickest.py +8 -37
- bbot/modules/trufflehog.py +10 -12
- bbot/modules/url_manipulation.py +3 -3
- bbot/modules/urlscan.py +1 -1
- bbot/modules/viewdns.py +1 -1
- bbot/modules/virustotal.py +8 -30
- bbot/modules/wafw00f.py +1 -1
- bbot/modules/wayback.py +1 -1
- bbot/modules/wpscan.py +17 -11
- bbot/modules/zoomeye.py +11 -6
- bbot/presets/baddns-thorough.yml +12 -0
- bbot/presets/fast.yml +16 -0
- bbot/presets/kitchen-sink.yml +1 -2
- bbot/presets/spider.yml +4 -0
- bbot/presets/subdomain-enum.yml +7 -7
- bbot/presets/web/dotnet-audit.yml +0 -1
- bbot/scanner/manager.py +5 -16
- bbot/scanner/preset/args.py +46 -26
- bbot/scanner/preset/environ.py +7 -2
- bbot/scanner/preset/path.py +7 -4
- bbot/scanner/preset/preset.py +36 -23
- bbot/scanner/scanner.py +172 -62
- bbot/scanner/target.py +236 -434
- bbot/scripts/docs.py +1 -1
- bbot/test/bbot_fixtures.py +13 -3
- bbot/test/conftest.py +132 -100
- bbot/test/fastapi_test.py +17 -0
- bbot/test/owasp_mastg.apk +0 -0
- bbot/test/run_tests.sh +4 -4
- bbot/test/test.conf +2 -0
- bbot/test/test_step_1/test__module__tests.py +0 -1
- bbot/test/test_step_1/test_bbot_fastapi.py +79 -0
- bbot/test/test_step_1/test_bloom_filter.py +2 -1
- bbot/test/test_step_1/test_cli.py +138 -64
- bbot/test/test_step_1/test_dns.py +61 -27
- bbot/test/test_step_1/test_engine.py +17 -19
- bbot/test/test_step_1/test_events.py +183 -30
- bbot/test/test_step_1/test_helpers.py +64 -29
- bbot/test/test_step_1/test_manager_deduplication.py +1 -1
- bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
- bbot/test/test_step_1/test_modules_basic.py +68 -70
- bbot/test/test_step_1/test_presets.py +183 -100
- bbot/test/test_step_1/test_python_api.py +7 -2
- bbot/test/test_step_1/test_regexes.py +35 -5
- bbot/test/test_step_1/test_scan.py +39 -5
- bbot/test/test_step_1/test_scope.py +4 -3
- bbot/test/test_step_1/test_target.py +242 -145
- bbot/test/test_step_1/test_web.py +14 -10
- bbot/test/test_step_2/module_tests/base.py +15 -7
- bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
- bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
- bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
- bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
- bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
- bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
- bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
- bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
- bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
- bbot/test/test_step_2/module_tests/test_module_excavate.py +28 -48
- bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
- bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -6
- bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
- bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
- bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
- bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
- bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
- bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
- bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_newsletters.py +16 -16
- bbot/test/test_step_2/module_tests/test_module_ntlm.py +8 -7
- bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
- bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_pgp.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
- bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
- bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
- bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
- bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_smuggler.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_speculate.py +4 -8
- bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
- bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
- bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
- bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
- bbot/test/test_step_2/module_tests/test_module_viewdns.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
- bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
- bbot/wordlists/devops_mutations.txt +1 -1
- bbot/wordlists/ffuf_shortname_candidates.txt +1 -1
- bbot/wordlists/nameservers.txt +1 -1
- bbot/wordlists/paramminer_headers.txt +1 -1
- bbot/wordlists/paramminer_parameters.txt +1 -1
- bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt +1 -1
- bbot/wordlists/valid_url_schemes.txt +1 -1
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/METADATA +48 -18
- bbot-2.3.0.5401rc0.dist-info/RECORD +421 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/WHEEL +1 -1
- bbot/modules/unstructured.py +0 -163
- bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
- bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/LICENSE +0 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5401rc0.dist-info}/entry_points.txt +0 -0
|
@@ -32,10 +32,11 @@ class speculate(BaseInternalModule):
|
|
|
32
32
|
"author": "@liquidsec",
|
|
33
33
|
}
|
|
34
34
|
|
|
35
|
-
options = {"max_hosts": 65536, "ports": "80,443"}
|
|
35
|
+
options = {"max_hosts": 65536, "ports": "80,443", "essential_only": False}
|
|
36
36
|
options_desc = {
|
|
37
37
|
"max_hosts": "Max number of IP_RANGE hosts to convert into IP_ADDRESS events",
|
|
38
38
|
"ports": "The set of ports to speculate on",
|
|
39
|
+
"essential_only": "Only enable essential speculate features (no extra discovery)",
|
|
39
40
|
}
|
|
40
41
|
scope_distance_modifier = 1
|
|
41
42
|
_priority = 4
|
|
@@ -44,14 +45,15 @@ class speculate(BaseInternalModule):
|
|
|
44
45
|
|
|
45
46
|
async def setup(self):
|
|
46
47
|
scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"]
|
|
47
|
-
self.open_port_consumers = any(
|
|
48
|
+
self.open_port_consumers = any("OPEN_TCP_PORT" in m.watched_events for m in scan_modules)
|
|
48
49
|
# only consider active portscanners (still speculate if only passive ones are enabled)
|
|
49
50
|
self.portscanner_enabled = any(
|
|
50
|
-
|
|
51
|
+
"portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values()
|
|
51
52
|
)
|
|
52
53
|
self.emit_open_ports = self.open_port_consumers and not self.portscanner_enabled
|
|
53
54
|
self.range_to_ip = True
|
|
54
55
|
self.dns_disable = self.scan.config.get("dns", {}).get("disable", False)
|
|
56
|
+
self.essential_only = self.config.get("essential_only", False)
|
|
55
57
|
self.org_stubs_seen = set()
|
|
56
58
|
|
|
57
59
|
port_string = self.config.get("ports", "80,443")
|
|
@@ -63,18 +65,26 @@ class speculate(BaseInternalModule):
|
|
|
63
65
|
if not self.portscanner_enabled:
|
|
64
66
|
self.info(f"No portscanner enabled. Assuming open ports: {', '.join(str(x) for x in self.ports)}")
|
|
65
67
|
|
|
66
|
-
target_len = len(self.scan.target)
|
|
68
|
+
target_len = len(self.scan.target.seeds)
|
|
67
69
|
if target_len > self.config.get("max_hosts", 65536):
|
|
68
70
|
if not self.portscanner_enabled:
|
|
69
71
|
self.hugewarning(
|
|
70
72
|
f"Selected target ({target_len:,} hosts) is too large, skipping IP_RANGE --> IP_ADDRESS speculation"
|
|
71
73
|
)
|
|
72
|
-
self.hugewarning(
|
|
74
|
+
self.hugewarning('Enabling the "portscan" module is highly recommended')
|
|
73
75
|
self.range_to_ip = False
|
|
74
76
|
|
|
75
77
|
return True
|
|
76
78
|
|
|
77
79
|
async def handle_event(self, event):
|
|
80
|
+
### BEGIN ESSENTIAL SPECULATION ###
|
|
81
|
+
# These features are required for smooth operation of bbot
|
|
82
|
+
# I.e. they are not "osinty" or intended to discover anything, they only compliment other modules
|
|
83
|
+
|
|
84
|
+
# we speculate on distance-1 stuff too, because distance-1 open ports are needed by certain modules like sslcert
|
|
85
|
+
event_in_scope_distance = event.scope_distance <= (self.scan.scope_search_distance + 1)
|
|
86
|
+
speculate_open_ports = self.emit_open_ports and event_in_scope_distance
|
|
87
|
+
|
|
78
88
|
# generate individual IP addresses from IP range
|
|
79
89
|
if event.type == "IP_RANGE" and self.range_to_ip:
|
|
80
90
|
net = ipaddress.ip_network(event.data)
|
|
@@ -89,28 +99,46 @@ class speculate(BaseInternalModule):
|
|
|
89
99
|
context=f"speculate converted range into individual IP_ADDRESS: {ip}",
|
|
90
100
|
)
|
|
91
101
|
|
|
102
|
+
# IP_ADDRESS / DNS_NAME --> OPEN_TCP_PORT
|
|
103
|
+
if speculate_open_ports:
|
|
104
|
+
# don't act on unresolved DNS_NAMEs
|
|
105
|
+
usable_dns = False
|
|
106
|
+
if event.type == "DNS_NAME":
|
|
107
|
+
if self.dns_disable or ("a-record" in event.tags or "aaaa-record" in event.tags):
|
|
108
|
+
usable_dns = True
|
|
109
|
+
|
|
110
|
+
if event.type == "IP_ADDRESS" or usable_dns:
|
|
111
|
+
for port in self.ports:
|
|
112
|
+
await self.emit_event(
|
|
113
|
+
self.helpers.make_netloc(event.data, port),
|
|
114
|
+
"OPEN_TCP_PORT",
|
|
115
|
+
parent=event,
|
|
116
|
+
internal=True,
|
|
117
|
+
context="speculated {event.type}: {event.data}",
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
### END ESSENTIAL SPECULATION ###
|
|
121
|
+
if self.essential_only:
|
|
122
|
+
return
|
|
123
|
+
|
|
92
124
|
# parent domains
|
|
93
125
|
if event.type.startswith("DNS_NAME"):
|
|
94
126
|
parent = self.helpers.parent_domain(event.host_original)
|
|
95
127
|
if parent != event.data:
|
|
96
128
|
await self.emit_event(
|
|
97
|
-
parent, "DNS_NAME", parent=event, context=
|
|
129
|
+
parent, "DNS_NAME", parent=event, context="speculated parent {event.type}: {event.data}"
|
|
98
130
|
)
|
|
99
131
|
|
|
100
|
-
# we speculate on distance-1 stuff too, because distance-1 open ports are needed by certain modules like sslcert
|
|
101
|
-
event_in_scope_distance = event.scope_distance <= (self.scan.scope_search_distance + 1)
|
|
102
|
-
speculate_open_ports = self.emit_open_ports and event_in_scope_distance
|
|
103
|
-
|
|
104
132
|
# URL --> OPEN_TCP_PORT
|
|
105
|
-
|
|
133
|
+
event_is_url = event.type == "URL"
|
|
134
|
+
if event_is_url or (event.type == "URL_UNVERIFIED" and self.open_port_consumers):
|
|
106
135
|
# only speculate port from a URL if it wouldn't be speculated naturally from the host
|
|
107
136
|
if event.host and (event.port not in self.ports or not speculate_open_ports):
|
|
108
137
|
await self.emit_event(
|
|
109
138
|
self.helpers.make_netloc(event.host, event.port),
|
|
110
139
|
"OPEN_TCP_PORT",
|
|
111
140
|
parent=event,
|
|
112
|
-
internal=
|
|
113
|
-
quick=(event.type == "URL"),
|
|
141
|
+
internal=not event_is_url, # if the URL is verified, the port is definitely open
|
|
114
142
|
context=f"speculated {{event.type}} from {event.type}: {{event.data}}",
|
|
115
143
|
)
|
|
116
144
|
|
|
@@ -144,25 +172,6 @@ class speculate(BaseInternalModule):
|
|
|
144
172
|
context="speculated {event.type}: {event.data}",
|
|
145
173
|
)
|
|
146
174
|
|
|
147
|
-
# IP_ADDRESS / DNS_NAME --> OPEN_TCP_PORT
|
|
148
|
-
if speculate_open_ports:
|
|
149
|
-
# don't act on unresolved DNS_NAMEs
|
|
150
|
-
usable_dns = False
|
|
151
|
-
if event.type == "DNS_NAME":
|
|
152
|
-
if self.dns_disable or ("a-record" in event.tags or "aaaa-record" in event.tags):
|
|
153
|
-
usable_dns = True
|
|
154
|
-
|
|
155
|
-
if event.type == "IP_ADDRESS" or usable_dns:
|
|
156
|
-
for port in self.ports:
|
|
157
|
-
await self.emit_event(
|
|
158
|
-
self.helpers.make_netloc(event.data, port),
|
|
159
|
-
"OPEN_TCP_PORT",
|
|
160
|
-
parent=event,
|
|
161
|
-
internal=True,
|
|
162
|
-
quick=True,
|
|
163
|
-
context="speculated {event.type}: {event.data}",
|
|
164
|
-
)
|
|
165
|
-
|
|
166
175
|
# ORG_STUB from TLD, SOCIAL, AZURE_TENANT
|
|
167
176
|
org_stubs = set()
|
|
168
177
|
if event.type == "DNS_NAME" and event.scope_distance == 0:
|
bbot/modules/internetdb.py
CHANGED
|
@@ -48,6 +48,9 @@ class internetdb(BaseModule):
|
|
|
48
48
|
"show_open_ports": "Display OPEN_TCP_PORT events in output, even if they didn't lead to an interesting discovery"
|
|
49
49
|
}
|
|
50
50
|
|
|
51
|
+
# we get lots of 404s, that's normal
|
|
52
|
+
_api_failure_abort_threshold = 9999999999
|
|
53
|
+
|
|
51
54
|
_qsize = 500
|
|
52
55
|
|
|
53
56
|
base_url = "https://internetdb.shodan.io"
|
|
@@ -64,7 +67,7 @@ class internetdb(BaseModule):
|
|
|
64
67
|
if ip is None:
|
|
65
68
|
return
|
|
66
69
|
url = f"{self.base_url}/{ip}"
|
|
67
|
-
r = await self.
|
|
70
|
+
r = await self.api_request(url)
|
|
68
71
|
if r is None:
|
|
69
72
|
self.debug(f"No response for {event.data}")
|
|
70
73
|
return
|
|
@@ -113,7 +116,6 @@ class internetdb(BaseModule):
|
|
|
113
116
|
"OPEN_TCP_PORT",
|
|
114
117
|
parent=event,
|
|
115
118
|
internal=(not self.show_open_ports),
|
|
116
|
-
quick=True,
|
|
117
119
|
context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}',
|
|
118
120
|
)
|
|
119
121
|
vulns = data.get("vulns", [])
|
bbot/modules/ip2location.py
CHANGED
|
@@ -32,12 +32,10 @@ class IP2Location(BaseModule):
|
|
|
32
32
|
|
|
33
33
|
async def ping(self):
|
|
34
34
|
url = self.build_url("8.8.8.8")
|
|
35
|
-
|
|
36
|
-
resp_content = getattr(r, "text", "")
|
|
37
|
-
assert getattr(r, "status_code", 0) == 200, resp_content
|
|
35
|
+
await super().ping(url)
|
|
38
36
|
|
|
39
37
|
def build_url(self, data):
|
|
40
|
-
url = f"{self.base_url}/?key={
|
|
38
|
+
url = f"{self.base_url}/?key={{api_key}}&ip={data}&format=json&source=bbot"
|
|
41
39
|
if self.lang:
|
|
42
40
|
url = f"{url}&lang={self.lang}"
|
|
43
41
|
return url
|
|
@@ -45,7 +43,7 @@ class IP2Location(BaseModule):
|
|
|
45
43
|
async def handle_event(self, event):
|
|
46
44
|
try:
|
|
47
45
|
url = self.build_url(event.data)
|
|
48
|
-
result = await self.
|
|
46
|
+
result = await self.api_request(url)
|
|
49
47
|
if result:
|
|
50
48
|
geo_data = result.json()
|
|
51
49
|
if not geo_data:
|
bbot/modules/ipneighbor.py
CHANGED
|
@@ -31,7 +31,7 @@ class ipneighbor(BaseModule):
|
|
|
31
31
|
netmask = main_ip.max_prefixlen - min(main_ip.max_prefixlen, self.num_bits)
|
|
32
32
|
network = ipaddress.ip_network(f"{main_ip}/{netmask}", strict=False)
|
|
33
33
|
subnet_hash = hash(network)
|
|
34
|
-
if not
|
|
34
|
+
if subnet_hash not in self.processed:
|
|
35
35
|
self.processed.add(subnet_hash)
|
|
36
36
|
for ip in network:
|
|
37
37
|
if ip != main_ip:
|
bbot/modules/ipstack.py
CHANGED
|
@@ -23,20 +23,15 @@ class Ipstack(BaseModule):
|
|
|
23
23
|
suppress_dupes = False
|
|
24
24
|
|
|
25
25
|
base_url = "http://api.ipstack.com"
|
|
26
|
+
ping_url = f"{base_url}/check?access_key={{api_key}}"
|
|
26
27
|
|
|
27
28
|
async def setup(self):
|
|
28
29
|
return await self.require_api_key()
|
|
29
30
|
|
|
30
|
-
async def ping(self):
|
|
31
|
-
url = f"{self.base_url}/check?access_key={self.api_key}"
|
|
32
|
-
r = await self.request_with_fail_count(url)
|
|
33
|
-
resp_content = getattr(r, "text", "")
|
|
34
|
-
assert getattr(r, "status_code", 0) == 200, resp_content
|
|
35
|
-
|
|
36
31
|
async def handle_event(self, event):
|
|
37
32
|
try:
|
|
38
|
-
url = f"{self.base_url}/{event.data}?access_key={
|
|
39
|
-
result = await self.
|
|
33
|
+
url = f"{self.base_url}/{event.data}?access_key={{api_key}}"
|
|
34
|
+
result = await self.api_request(url)
|
|
40
35
|
if result:
|
|
41
36
|
geo_data = result.json()
|
|
42
37
|
if not geo_data:
|
bbot/modules/jadx.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from subprocess import CalledProcessError
|
|
3
|
+
from bbot.modules.internal.base import BaseModule
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class jadx(BaseModule):
|
|
7
|
+
watched_events = ["FILESYSTEM"]
|
|
8
|
+
produced_events = ["FILESYSTEM"]
|
|
9
|
+
flags = ["passive", "safe"]
|
|
10
|
+
meta = {
|
|
11
|
+
"description": "Decompile APKs and XAPKs using JADX",
|
|
12
|
+
"created_date": "2024-11-04",
|
|
13
|
+
"author": "@domwhewell-sage",
|
|
14
|
+
}
|
|
15
|
+
options = {
|
|
16
|
+
"threads": 4,
|
|
17
|
+
}
|
|
18
|
+
options_desc = {
|
|
19
|
+
"threads": "Maximum jadx threads for extracting apk's, default: 4",
|
|
20
|
+
}
|
|
21
|
+
deps_common = ["java"]
|
|
22
|
+
deps_ansible = [
|
|
23
|
+
{
|
|
24
|
+
"name": "Create jadx directory",
|
|
25
|
+
"file": {"path": "#{BBOT_TOOLS}/jadx", "state": "directory", "mode": "0755"},
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
"name": "Download jadx",
|
|
29
|
+
"unarchive": {
|
|
30
|
+
"src": "https://github.com/skylot/jadx/releases/download/v1.5.0/jadx-1.5.0.zip",
|
|
31
|
+
"include": ["lib/jadx-1.5.0-all.jar", "bin/jadx"],
|
|
32
|
+
"dest": "#{BBOT_TOOLS}/jadx",
|
|
33
|
+
"remote_src": True,
|
|
34
|
+
},
|
|
35
|
+
},
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
allowed_file_types = ["java archive", "android application package"]
|
|
39
|
+
|
|
40
|
+
async def setup(self):
|
|
41
|
+
self.threads = self.config.get("threads", 4)
|
|
42
|
+
return True
|
|
43
|
+
|
|
44
|
+
async def filter_event(self, event):
|
|
45
|
+
if "file" in event.tags:
|
|
46
|
+
if event.data["magic_description"].lower() not in self.allowed_file_types:
|
|
47
|
+
return False, f"Jadx is not able to decompile this file type: {event.data['magic_description']}"
|
|
48
|
+
else:
|
|
49
|
+
return False, "Event is not a file"
|
|
50
|
+
return True
|
|
51
|
+
|
|
52
|
+
async def handle_event(self, event):
|
|
53
|
+
path = Path(event.data["path"])
|
|
54
|
+
output_dir = path.parent / path.name.replace(".", "_")
|
|
55
|
+
self.helpers.mkdir(output_dir)
|
|
56
|
+
success = await self.decompile_apk(path, output_dir)
|
|
57
|
+
|
|
58
|
+
# If jadx was able to decompile the java archive, emit an event
|
|
59
|
+
if success:
|
|
60
|
+
await self.emit_event(
|
|
61
|
+
{"path": str(output_dir)},
|
|
62
|
+
"FILESYSTEM",
|
|
63
|
+
tags="folder",
|
|
64
|
+
parent=event,
|
|
65
|
+
context=f'extracted "{path}" to: {output_dir}',
|
|
66
|
+
)
|
|
67
|
+
else:
|
|
68
|
+
output_dir.rmdir()
|
|
69
|
+
|
|
70
|
+
async def decompile_apk(self, path, output_dir):
|
|
71
|
+
command = [
|
|
72
|
+
f"{self.scan.helpers.tools_dir}/jadx/bin/jadx",
|
|
73
|
+
"--threads-count",
|
|
74
|
+
self.threads,
|
|
75
|
+
"--output-dir",
|
|
76
|
+
str(output_dir),
|
|
77
|
+
str(path),
|
|
78
|
+
]
|
|
79
|
+
try:
|
|
80
|
+
output = await self.run_process(command, check=True)
|
|
81
|
+
except CalledProcessError as e:
|
|
82
|
+
self.warning(f"Error decompiling {path}. STDOUT: {e.stdout} STDERR: {repr(e.stderr)}")
|
|
83
|
+
return False
|
|
84
|
+
if not (output_dir / "resources").exists() and not (output_dir / "sources").exists():
|
|
85
|
+
self.warning(f"JADX was unable to decompile {path}: (STDOUT: {output.stdout} STDERR: {output.stderr})")
|
|
86
|
+
return False
|
|
87
|
+
return True
|
bbot/modules/leakix.py
CHANGED
|
@@ -15,31 +15,32 @@ class leakix(subdomain_enum_apikey):
|
|
|
15
15
|
}
|
|
16
16
|
|
|
17
17
|
base_url = "https://leakix.net"
|
|
18
|
+
ping_url = f"{base_url}/host/1.1.1.1"
|
|
18
19
|
|
|
19
20
|
async def setup(self):
|
|
20
21
|
ret = await super(subdomain_enum_apikey, self).setup()
|
|
21
|
-
self.headers = {"Accept": "application/json"}
|
|
22
22
|
self.api_key = self.config.get("api_key", "")
|
|
23
23
|
if self.api_key:
|
|
24
|
-
self.headers["api-key"] = self.api_key
|
|
25
24
|
return await self.require_api_key()
|
|
26
25
|
return ret
|
|
27
26
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
27
|
+
def prepare_api_request(self, url, kwargs):
|
|
28
|
+
if self.api_key:
|
|
29
|
+
kwargs["headers"]["api-key"] = self.api_key
|
|
30
|
+
kwargs["headers"]["Accept"] = "application/json"
|
|
31
|
+
return url, kwargs
|
|
33
32
|
|
|
34
33
|
async def request_url(self, query):
|
|
35
34
|
url = f"{self.base_url}/api/subdomains/{self.helpers.quote(query)}"
|
|
36
|
-
response = await self.
|
|
35
|
+
response = await self.api_request(url)
|
|
37
36
|
return response
|
|
38
37
|
|
|
39
|
-
def parse_results(self, r, query=None):
|
|
38
|
+
async def parse_results(self, r, query=None):
|
|
39
|
+
results = set()
|
|
40
40
|
json = r.json()
|
|
41
41
|
if json:
|
|
42
42
|
for entry in json:
|
|
43
43
|
subdomain = entry.get("subdomain", "")
|
|
44
44
|
if subdomain:
|
|
45
|
-
|
|
45
|
+
results.add(subdomain)
|
|
46
|
+
return results
|
bbot/modules/myssl.py
CHANGED
|
@@ -15,9 +15,9 @@ class myssl(subdomain_enum):
|
|
|
15
15
|
|
|
16
16
|
async def request_url(self, query):
|
|
17
17
|
url = f"{self.base_url}?domain={self.helpers.quote(query)}"
|
|
18
|
-
return await self.
|
|
18
|
+
return await self.api_request(url)
|
|
19
19
|
|
|
20
|
-
def parse_results(self, r, query):
|
|
20
|
+
async def parse_results(self, r, query):
|
|
21
21
|
results = set()
|
|
22
22
|
json = r.json()
|
|
23
23
|
if json and isinstance(json, dict):
|
bbot/modules/newsletters.py
CHANGED
|
@@ -46,11 +46,11 @@ class newsletters(BaseModule):
|
|
|
46
46
|
body = _event.data["body"]
|
|
47
47
|
soup = self.helpers.beautifulsoup(body, "html.parser")
|
|
48
48
|
if soup is False:
|
|
49
|
-
self.debug(
|
|
49
|
+
self.debug("BeautifulSoup returned False")
|
|
50
50
|
return
|
|
51
51
|
result = self.find_type(soup)
|
|
52
52
|
if result:
|
|
53
|
-
description =
|
|
53
|
+
description = "Found a Newsletter Submission Form that could be used for email bombing attacks"
|
|
54
54
|
data = {"host": str(_event.host), "description": description, "url": _event.data["url"]}
|
|
55
55
|
await self.emit_event(
|
|
56
56
|
data,
|
bbot/modules/otx.py
CHANGED
|
@@ -15,12 +15,14 @@ class otx(subdomain_enum):
|
|
|
15
15
|
|
|
16
16
|
def request_url(self, query):
|
|
17
17
|
url = f"{self.base_url}/api/v1/indicators/domain/{self.helpers.quote(query)}/passive_dns"
|
|
18
|
-
return self.
|
|
18
|
+
return self.api_request(url)
|
|
19
19
|
|
|
20
|
-
def parse_results(self, r, query):
|
|
20
|
+
async def parse_results(self, r, query):
|
|
21
|
+
results = set()
|
|
21
22
|
j = r.json()
|
|
22
23
|
if isinstance(j, dict):
|
|
23
24
|
for entry in j.get("passive_dns", []):
|
|
24
25
|
subdomain = entry.get("hostname", "")
|
|
25
26
|
if subdomain:
|
|
26
|
-
|
|
27
|
+
results.add(subdomain)
|
|
28
|
+
return results
|
|
@@ -91,15 +91,15 @@ class asset_inventory(CSV):
|
|
|
91
91
|
self.assets[hostkey].absorb_event(event)
|
|
92
92
|
|
|
93
93
|
async def report(self):
|
|
94
|
-
stats =
|
|
95
|
-
totals =
|
|
94
|
+
stats = {}
|
|
95
|
+
totals = {}
|
|
96
96
|
|
|
97
97
|
def increment_stat(stat, value):
|
|
98
98
|
try:
|
|
99
99
|
totals[stat] += 1
|
|
100
100
|
except KeyError:
|
|
101
101
|
totals[stat] = 1
|
|
102
|
-
if not
|
|
102
|
+
if stat not in stats:
|
|
103
103
|
stats[stat] = {}
|
|
104
104
|
try:
|
|
105
105
|
stats[stat][value] += 1
|
|
@@ -259,17 +259,17 @@ class Asset:
|
|
|
259
259
|
# ips
|
|
260
260
|
self.ip_addresses = set(_make_ip_list(row.get("IP (External)", "")))
|
|
261
261
|
self.ip_addresses.update(set(_make_ip_list(row.get("IP (Internal)", ""))))
|
|
262
|
-
# If user
|
|
262
|
+
# If user requests a recheck dont import the following fields to force them to be rechecked
|
|
263
263
|
if not self.recheck:
|
|
264
264
|
# ports
|
|
265
265
|
ports = [i.strip() for i in row.get("Open Ports", "").split(",")]
|
|
266
|
-
self.ports.update(
|
|
266
|
+
self.ports.update({i for i in ports if i and is_port(i)})
|
|
267
267
|
# findings
|
|
268
268
|
findings = [i.strip() for i in row.get("Findings", "").splitlines()]
|
|
269
|
-
self.findings.update(
|
|
269
|
+
self.findings.update({i for i in findings if i})
|
|
270
270
|
# technologies
|
|
271
271
|
technologies = [i.strip() for i in row.get("Technologies", "").splitlines()]
|
|
272
|
-
self.technologies.update(
|
|
272
|
+
self.technologies.update({i for i in technologies if i})
|
|
273
273
|
# risk rating
|
|
274
274
|
risk_rating = row.get("Risk Rating", "").strip()
|
|
275
275
|
if risk_rating and risk_rating.isdigit() and int(risk_rating) > self.risk_rating:
|
bbot/modules/output/base.py
CHANGED
|
@@ -24,7 +24,7 @@ class BaseOutputModule(BaseModule):
|
|
|
24
24
|
if event.type in ("FINISHED",):
|
|
25
25
|
return True, "its type is FINISHED"
|
|
26
26
|
if self.errored:
|
|
27
|
-
return False,
|
|
27
|
+
return False, "module is in error state"
|
|
28
28
|
# exclude non-watched types
|
|
29
29
|
if not any(t in self.get_watched_events() for t in ("*", event.type)):
|
|
30
30
|
return False, "its type is not in watched_events"
|
bbot/modules/output/csv.py
CHANGED
|
@@ -64,7 +64,7 @@ class CSV(BaseOutputModule):
|
|
|
64
64
|
),
|
|
65
65
|
"Source Module": str(getattr(event, "module_sequence", "")),
|
|
66
66
|
"Scope Distance": str(getattr(event, "scope_distance", "")),
|
|
67
|
-
"Event Tags": ",".join(sorted(
|
|
67
|
+
"Event Tags": ",".join(sorted(getattr(event, "tags", []))),
|
|
68
68
|
"Discovery Path": " --> ".join(discovery_path),
|
|
69
69
|
}
|
|
70
70
|
)
|
bbot/modules/output/http.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
from bbot.errors import WebError
|
|
2
1
|
from bbot.modules.output.base import BaseOutputModule
|
|
3
2
|
|
|
4
3
|
|
|
@@ -52,16 +51,23 @@ class HTTP(BaseOutputModule):
|
|
|
52
51
|
|
|
53
52
|
async def handle_event(self, event):
|
|
54
53
|
while 1:
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
54
|
+
response = await self.helpers.request(
|
|
55
|
+
url=self.url,
|
|
56
|
+
method=self.method,
|
|
57
|
+
auth=self.auth,
|
|
58
|
+
headers=self.headers,
|
|
59
|
+
json=event.json(siem_friendly=self.siem_friendly),
|
|
60
|
+
)
|
|
61
|
+
is_success = False if response is None else response.is_success
|
|
62
|
+
if not is_success:
|
|
63
|
+
status_code = getattr(response, "status_code", 0)
|
|
64
|
+
self.warning(f"Error sending {event} (HTTP status code: {status_code}), retrying...")
|
|
65
|
+
body = getattr(response, "text", "")
|
|
66
|
+
self.debug(body)
|
|
67
|
+
if status_code == 429:
|
|
68
|
+
sleep_interval = 10
|
|
69
|
+
else:
|
|
70
|
+
sleep_interval = 1
|
|
71
|
+
await self.helpers.sleep(sleep_interval)
|
|
72
|
+
continue
|
|
73
|
+
break
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from bbot.modules.templates.sql import SQLTemplate
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class MySQL(SQLTemplate):
|
|
5
|
+
watched_events = ["*"]
|
|
6
|
+
meta = {"description": "Output scan data to a MySQL database"}
|
|
7
|
+
options = {
|
|
8
|
+
"username": "root",
|
|
9
|
+
"password": "bbotislife",
|
|
10
|
+
"host": "localhost",
|
|
11
|
+
"port": 3306,
|
|
12
|
+
"database": "bbot",
|
|
13
|
+
}
|
|
14
|
+
options_desc = {
|
|
15
|
+
"username": "The username to connect to MySQL",
|
|
16
|
+
"password": "The password to connect to MySQL",
|
|
17
|
+
"host": "The server running MySQL",
|
|
18
|
+
"port": "The port to connect to MySQL",
|
|
19
|
+
"database": "The database name to connect to",
|
|
20
|
+
}
|
|
21
|
+
deps_pip = ["sqlmodel", "aiomysql"]
|
|
22
|
+
protocol = "mysql+aiomysql"
|
|
23
|
+
|
|
24
|
+
async def create_database(self):
|
|
25
|
+
from sqlalchemy import text
|
|
26
|
+
from sqlalchemy.ext.asyncio import create_async_engine
|
|
27
|
+
|
|
28
|
+
# Create the engine for the initial connection to the server
|
|
29
|
+
initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0])
|
|
30
|
+
|
|
31
|
+
async with initial_engine.connect() as conn:
|
|
32
|
+
# Check if the database exists
|
|
33
|
+
result = await conn.execute(text(f"SHOW DATABASES LIKE '{self.database}'"))
|
|
34
|
+
database_exists = result.scalar() is not None
|
|
35
|
+
|
|
36
|
+
# Create the database if it does not exist
|
|
37
|
+
if not database_exists:
|
|
38
|
+
# Use aiomysql directly to create the database
|
|
39
|
+
import aiomysql
|
|
40
|
+
|
|
41
|
+
raw_conn = await aiomysql.connect(
|
|
42
|
+
user=self.username,
|
|
43
|
+
password=self.password,
|
|
44
|
+
host=self.host,
|
|
45
|
+
port=self.port,
|
|
46
|
+
)
|
|
47
|
+
try:
|
|
48
|
+
async with raw_conn.cursor() as cursor:
|
|
49
|
+
await cursor.execute(f"CREATE DATABASE {self.database}")
|
|
50
|
+
finally:
|
|
51
|
+
await raw_conn.ensure_closed()
|
bbot/modules/output/neo4j.py
CHANGED
|
@@ -1,10 +1,15 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import logging
|
|
2
3
|
from contextlib import suppress
|
|
3
4
|
from neo4j import AsyncGraphDatabase
|
|
4
5
|
|
|
5
6
|
from bbot.modules.output.base import BaseOutputModule
|
|
6
7
|
|
|
7
8
|
|
|
9
|
+
# silence annoying neo4j logger
|
|
10
|
+
logging.getLogger("neo4j").setLevel(logging.CRITICAL)
|
|
11
|
+
|
|
12
|
+
|
|
8
13
|
class neo4j(BaseOutputModule):
|
|
9
14
|
"""
|
|
10
15
|
# start Neo4j in the background with docker
|
|
@@ -48,7 +53,7 @@ class neo4j(BaseOutputModule):
|
|
|
48
53
|
),
|
|
49
54
|
)
|
|
50
55
|
self.session = self.driver.session()
|
|
51
|
-
await self.
|
|
56
|
+
await self.session.run("Match () Return 1 Limit 1")
|
|
52
57
|
except Exception as e:
|
|
53
58
|
return False, f"Error setting up Neo4j: {e}"
|
|
54
59
|
return True
|
|
@@ -110,7 +115,7 @@ class neo4j(BaseOutputModule):
|
|
|
110
115
|
|
|
111
116
|
cypher = f"""UNWIND $events AS event
|
|
112
117
|
MERGE (_:{event_type} {{ id: event.id }})
|
|
113
|
-
SET _ += event
|
|
118
|
+
SET _ += properties(event)
|
|
114
119
|
RETURN event.data as event_data, event.id as event_id, elementId(_) as neo4j_id"""
|
|
115
120
|
neo4j_ids = {}
|
|
116
121
|
# insert events
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from bbot.modules.templates.sql import SQLTemplate
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class Postgres(SQLTemplate):
|
|
5
|
+
watched_events = ["*"]
|
|
6
|
+
meta = {"description": "Output scan data to a SQLite database"}
|
|
7
|
+
options = {
|
|
8
|
+
"username": "postgres",
|
|
9
|
+
"password": "bbotislife",
|
|
10
|
+
"host": "localhost",
|
|
11
|
+
"port": 5432,
|
|
12
|
+
"database": "bbot",
|
|
13
|
+
}
|
|
14
|
+
options_desc = {
|
|
15
|
+
"username": "The username to connect to Postgres",
|
|
16
|
+
"password": "The password to connect to Postgres",
|
|
17
|
+
"host": "The server running Postgres",
|
|
18
|
+
"port": "The port to connect to Postgres",
|
|
19
|
+
"database": "The database name to connect to",
|
|
20
|
+
}
|
|
21
|
+
deps_pip = ["sqlmodel", "asyncpg"]
|
|
22
|
+
protocol = "postgresql+asyncpg"
|
|
23
|
+
|
|
24
|
+
async def create_database(self):
|
|
25
|
+
import asyncpg
|
|
26
|
+
from sqlalchemy import text
|
|
27
|
+
from sqlalchemy.ext.asyncio import create_async_engine
|
|
28
|
+
|
|
29
|
+
# Create the engine for the initial connection to the server
|
|
30
|
+
initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0])
|
|
31
|
+
|
|
32
|
+
async with initial_engine.connect() as conn:
|
|
33
|
+
# Check if the database exists
|
|
34
|
+
result = await conn.execute(text(f"SELECT 1 FROM pg_database WHERE datname = '{self.database}'"))
|
|
35
|
+
database_exists = result.scalar() is not None
|
|
36
|
+
|
|
37
|
+
# Create the database if it does not exist
|
|
38
|
+
if not database_exists:
|
|
39
|
+
# Use asyncpg directly to create the database
|
|
40
|
+
raw_conn = await asyncpg.connect(
|
|
41
|
+
user=self.username,
|
|
42
|
+
password=self.password,
|
|
43
|
+
host=self.host,
|
|
44
|
+
port=self.port,
|
|
45
|
+
)
|
|
46
|
+
try:
|
|
47
|
+
await raw_conn.execute(f"CREATE DATABASE {self.database}")
|
|
48
|
+
finally:
|
|
49
|
+
await raw_conn.close()
|
bbot/modules/output/slack.py
CHANGED