bbot 2.6.0.6840rc0__py3-none-any.whl → 2.7.2.7424rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bbot/__init__.py +1 -1
- bbot/cli.py +22 -8
- bbot/core/engine.py +1 -1
- bbot/core/event/__init__.py +2 -2
- bbot/core/event/base.py +138 -110
- bbot/core/flags.py +1 -0
- bbot/core/helpers/bloom.py +6 -7
- bbot/core/helpers/depsinstaller/installer.py +21 -2
- bbot/core/helpers/dns/dns.py +0 -1
- bbot/core/helpers/dns/engine.py +0 -2
- bbot/core/helpers/files.py +2 -2
- bbot/core/helpers/git.py +17 -0
- bbot/core/helpers/helper.py +6 -5
- bbot/core/helpers/misc.py +8 -23
- bbot/core/helpers/ntlm.py +0 -2
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +25 -8
- bbot/core/helpers/web/web.py +2 -1
- bbot/core/modules.py +22 -60
- bbot/defaults.yml +4 -2
- bbot/modules/apkpure.py +1 -1
- bbot/modules/baddns.py +1 -1
- bbot/modules/baddns_direct.py +1 -1
- bbot/modules/baddns_zone.py +1 -1
- bbot/modules/badsecrets.py +1 -1
- bbot/modules/base.py +123 -38
- bbot/modules/bucket_amazon.py +1 -1
- bbot/modules/bucket_digitalocean.py +1 -1
- bbot/modules/bucket_firebase.py +1 -1
- bbot/modules/bucket_google.py +1 -1
- bbot/modules/{bucket_azure.py → bucket_microsoft.py} +2 -2
- bbot/modules/builtwith.py +4 -2
- bbot/modules/dnsbimi.py +1 -4
- bbot/modules/dnsbrute.py +6 -1
- bbot/modules/dnsdumpster.py +35 -52
- bbot/modules/dnstlsrpt.py +0 -6
- bbot/modules/docker_pull.py +1 -1
- bbot/modules/emailformat.py +17 -1
- bbot/modules/ffuf.py +4 -1
- bbot/modules/ffuf_shortnames.py +6 -3
- bbot/modules/filedownload.py +7 -4
- bbot/modules/git_clone.py +47 -22
- bbot/modules/gitdumper.py +4 -14
- bbot/modules/github_workflows.py +6 -5
- bbot/modules/gitlab_com.py +31 -0
- bbot/modules/gitlab_onprem.py +84 -0
- bbot/modules/gowitness.py +0 -6
- bbot/modules/graphql_introspection.py +5 -2
- bbot/modules/httpx.py +2 -0
- bbot/modules/iis_shortnames.py +0 -7
- bbot/modules/internal/cloudcheck.py +65 -72
- bbot/modules/internal/unarchive.py +9 -3
- bbot/modules/lightfuzz/lightfuzz.py +6 -2
- bbot/modules/lightfuzz/submodules/esi.py +42 -0
- bbot/modules/medusa.py +4 -7
- bbot/modules/nuclei.py +1 -1
- bbot/modules/otx.py +9 -2
- bbot/modules/output/base.py +3 -11
- bbot/modules/paramminer_headers.py +10 -7
- bbot/modules/portfilter.py +2 -0
- bbot/modules/postman_download.py +1 -1
- bbot/modules/retirejs.py +232 -0
- bbot/modules/securitytxt.py +0 -3
- bbot/modules/sslcert.py +2 -2
- bbot/modules/subdomaincenter.py +1 -16
- bbot/modules/telerik.py +7 -2
- bbot/modules/templates/bucket.py +24 -4
- bbot/modules/templates/gitlab.py +98 -0
- bbot/modules/trufflehog.py +6 -3
- bbot/modules/wafw00f.py +2 -2
- bbot/presets/web/lightfuzz-heavy.yml +1 -1
- bbot/presets/web/lightfuzz-medium.yml +1 -1
- bbot/presets/web/lightfuzz-superheavy.yml +1 -1
- bbot/scanner/manager.py +44 -37
- bbot/scanner/scanner.py +12 -4
- bbot/scripts/benchmark_report.py +433 -0
- bbot/test/benchmarks/__init__.py +2 -0
- bbot/test/benchmarks/test_bloom_filter_benchmarks.py +105 -0
- bbot/test/benchmarks/test_closest_match_benchmarks.py +76 -0
- bbot/test/benchmarks/test_event_validation_benchmarks.py +438 -0
- bbot/test/benchmarks/test_excavate_benchmarks.py +291 -0
- bbot/test/benchmarks/test_ipaddress_benchmarks.py +143 -0
- bbot/test/benchmarks/test_weighted_shuffle_benchmarks.py +70 -0
- bbot/test/test_step_1/test_bbot_fastapi.py +2 -2
- bbot/test/test_step_1/test_events.py +22 -21
- bbot/test/test_step_1/test_helpers.py +1 -0
- bbot/test/test_step_1/test_manager_scope_accuracy.py +45 -0
- bbot/test/test_step_1/test_modules_basic.py +40 -15
- bbot/test/test_step_1/test_python_api.py +2 -2
- bbot/test/test_step_1/test_regexes.py +21 -4
- bbot/test/test_step_1/test_scan.py +7 -8
- bbot/test/test_step_1/test_web.py +46 -0
- bbot/test/test_step_2/module_tests/base.py +6 -1
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +52 -18
- bbot/test/test_step_2/module_tests/test_module_bucket_google.py +1 -1
- bbot/test/test_step_2/module_tests/{test_module_bucket_azure.py → test_module_bucket_microsoft.py} +7 -5
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +19 -31
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +3 -5
- bbot/test/test_step_2/module_tests/test_module_emailformat.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_emails.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_excavate.py +57 -4
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +10 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab_com.py +66 -0
- bbot/test/test_step_2/module_tests/{test_module_gitlab.py → test_module_gitlab_onprem.py} +4 -69
- bbot/test/test_step_2/module_tests/test_module_lightfuzz.py +71 -3
- bbot/test/test_step_2/module_tests/test_module_nuclei.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_otx.py +3 -0
- bbot/test/test_step_2/module_tests/test_module_portfilter.py +2 -0
- bbot/test/test_step_2/module_tests/test_module_retirejs.py +161 -0
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +10 -1
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/METADATA +10 -7
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/RECORD +117 -106
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/WHEEL +1 -1
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info/licenses}/LICENSE +98 -58
- bbot/modules/censys.py +0 -98
- bbot/modules/gitlab.py +0 -141
- bbot/modules/zoomeye.py +0 -77
- bbot/test/test_step_2/module_tests/test_module_censys.py +0 -83
- bbot/test/test_step_2/module_tests/test_module_zoomeye.py +0 -35
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/entry_points.txt +0 -0
|
@@ -848,3 +848,48 @@ async def test_manager_scope_tagging(bbot_scanner):
|
|
|
848
848
|
assert not distance_tags
|
|
849
849
|
|
|
850
850
|
await scan._cleanup()
|
|
851
|
+
|
|
852
|
+
|
|
853
|
+
@pytest.mark.asyncio
|
|
854
|
+
async def test_scope_accuracy_with_special_urls(bbot_scanner, bbot_httpserver):
|
|
855
|
+
"""
|
|
856
|
+
This is a regression test for https://github.com/blacklanternsecurity/bbot/issues/2785
|
|
857
|
+
|
|
858
|
+
The original bug was that the "special URL" filtering logic (for Javascript URls etc.)
|
|
859
|
+
was causing special URLs to be rejected by critical internal modules like `_scan_egress`, leading to the output of unwanted URLs.
|
|
860
|
+
"""
|
|
861
|
+
bbot_httpserver.expect_request(uri="/v2/users/spacex").respond_with_data(response_data="")
|
|
862
|
+
bbot_httpserver.expect_request(uri="/u/spacex").respond_with_data(response_data="<a href='http://127.0.0.1:8888/asdf.js'/>")
|
|
863
|
+
|
|
864
|
+
scan = bbot_scanner("ORG:spacex", modules=["httpx", "social", "dockerhub"], config={"speculate": True, "excavate": True})
|
|
865
|
+
|
|
866
|
+
await scan._prep()
|
|
867
|
+
scan.modules["dockerhub"].site_url = "http://127.0.0.1:8888"
|
|
868
|
+
scan.modules["dockerhub"].api_url = "http://127.0.0.1:8888/v2"
|
|
869
|
+
|
|
870
|
+
from bbot.modules.base import BaseModule
|
|
871
|
+
|
|
872
|
+
class DummyModule(BaseModule):
|
|
873
|
+
_name = "dummy_module"
|
|
874
|
+
watched_events = ["*"]
|
|
875
|
+
scope_distance_modifier = 10
|
|
876
|
+
accept_dupes = True
|
|
877
|
+
accept_url_special = True
|
|
878
|
+
events = []
|
|
879
|
+
|
|
880
|
+
async def handle_event(self, event):
|
|
881
|
+
self.events.append(event)
|
|
882
|
+
|
|
883
|
+
dummy_module = DummyModule(scan)
|
|
884
|
+
scan.modules["dummy_module"] = dummy_module
|
|
885
|
+
|
|
886
|
+
events = [e async for e in scan.async_start()]
|
|
887
|
+
|
|
888
|
+
# there are actually 2 URL events. They are both from the same URL, but one was extracted by the full URL regex, and the other by the src/href= regex.
|
|
889
|
+
# however, they should be deduped by scan_ingress.
|
|
890
|
+
bad_url_events = [e for e in dummy_module.events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/asdf.js"]
|
|
891
|
+
assert len(bad_url_events) == 1
|
|
892
|
+
# they should both be internal
|
|
893
|
+
assert all(e.internal is True for e in bad_url_events)
|
|
894
|
+
# but they shouldn't be output at all
|
|
895
|
+
assert not any(e.type == "URL_UNVERIFIED" for e in events)
|
|
@@ -29,7 +29,7 @@ async def test_modules_basic_checks(events, httpx_mock):
|
|
|
29
29
|
localhost._internal = True
|
|
30
30
|
result, reason = base_output_module_1._event_precheck(localhost)
|
|
31
31
|
assert result is False
|
|
32
|
-
assert reason == "
|
|
32
|
+
assert reason == "event is internal and output modules don't accept internal events"
|
|
33
33
|
localhost._internal = False
|
|
34
34
|
result, reason = base_output_module_1._event_precheck(localhost)
|
|
35
35
|
assert result is True
|
|
@@ -57,7 +57,7 @@ async def test_modules_basic_checks(events, httpx_mock):
|
|
|
57
57
|
localhost._internal = True
|
|
58
58
|
result, reason = base_output_module_2._event_precheck(localhost)
|
|
59
59
|
assert result is False
|
|
60
|
-
assert reason == "
|
|
60
|
+
assert reason == "event is internal and output modules don't accept internal events"
|
|
61
61
|
localhost._internal = False
|
|
62
62
|
result, reason = base_output_module_2._event_precheck(localhost)
|
|
63
63
|
assert result is True
|
|
@@ -66,7 +66,7 @@ async def test_modules_basic_checks(events, httpx_mock):
|
|
|
66
66
|
localhost._omit = True
|
|
67
67
|
result, reason = base_output_module_2._event_precheck(localhost)
|
|
68
68
|
assert result is False
|
|
69
|
-
assert reason == "
|
|
69
|
+
assert reason == "its type is omitted in the config"
|
|
70
70
|
# normal event should be accepted
|
|
71
71
|
url_unverified = scan.make_event("http://127.0.0.1", "URL_UNVERIFIED", parent=scan.root_event)
|
|
72
72
|
result, reason = base_output_module_2._event_precheck(url_unverified)
|
|
@@ -76,18 +76,18 @@ async def test_modules_basic_checks(events, httpx_mock):
|
|
|
76
76
|
await scan.egress_module.handle_event(url_unverified)
|
|
77
77
|
result, reason = base_output_module_2._event_precheck(url_unverified)
|
|
78
78
|
assert result is False
|
|
79
|
-
assert reason == "
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
assert
|
|
79
|
+
assert reason == "its type is omitted in the config"
|
|
80
|
+
|
|
81
|
+
egress_module = scan.egress_module
|
|
82
|
+
url = scan.make_event("http://evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event, tags=["target"])
|
|
83
|
+
assert url._omit is False
|
|
84
|
+
# targets should not be omitted
|
|
85
|
+
await egress_module.handle_event(url)
|
|
86
|
+
assert url._omit is False
|
|
87
|
+
# non-targets should be omitted
|
|
88
|
+
url = scan.make_event("http://evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event)
|
|
89
|
+
await egress_module.handle_event(url)
|
|
90
|
+
assert url._omit is True
|
|
91
91
|
|
|
92
92
|
# common event filtering tests
|
|
93
93
|
for module_class in (BaseModule, BaseOutputModule, BaseReportModule, BaseInternalModule):
|
|
@@ -342,6 +342,31 @@ async def test_modules_basic_perdomainonly(bbot_scanner, monkeypatch):
|
|
|
342
342
|
await per_domain_scan._cleanup()
|
|
343
343
|
|
|
344
344
|
|
|
345
|
+
@pytest.mark.asyncio
|
|
346
|
+
async def test_modules_basic_setup_deps(bbot_scanner):
|
|
347
|
+
from bbot.modules.base import BaseModule
|
|
348
|
+
|
|
349
|
+
class dummy(BaseModule):
|
|
350
|
+
_name = "dummy"
|
|
351
|
+
deps_ran = False
|
|
352
|
+
setup_ran = False
|
|
353
|
+
|
|
354
|
+
async def setup_deps(self):
|
|
355
|
+
self.deps_ran = True
|
|
356
|
+
return True
|
|
357
|
+
|
|
358
|
+
async def setup(self):
|
|
359
|
+
self.setup_ran = True
|
|
360
|
+
return True
|
|
361
|
+
|
|
362
|
+
scan = bbot_scanner()
|
|
363
|
+
scan.modules["dummy"] = dummy(scan)
|
|
364
|
+
await scan.setup_modules(deps_only=True)
|
|
365
|
+
assert scan.modules["dummy"].deps_ran
|
|
366
|
+
assert not scan.modules["dummy"].setup_ran
|
|
367
|
+
await scan._cleanup()
|
|
368
|
+
|
|
369
|
+
|
|
345
370
|
@pytest.mark.asyncio
|
|
346
371
|
async def test_modules_basic_stats(helpers, events, bbot_scanner, httpx_mock, monkeypatch):
|
|
347
372
|
from bbot.modules.base import BaseModule
|
|
@@ -10,7 +10,7 @@ async def test_python_api():
|
|
|
10
10
|
events1 = []
|
|
11
11
|
async for event in scan1.async_start():
|
|
12
12
|
events1.append(event)
|
|
13
|
-
assert any("127.0.0.1"
|
|
13
|
+
assert any(e.type == "IP_ADDRESS" and e.data == "127.0.0.1" for e in events1)
|
|
14
14
|
# make sure output files work
|
|
15
15
|
scan2 = Scanner("127.0.0.1", output_modules=["json"], scan_name="python_api_test")
|
|
16
16
|
await scan2.async_start_without_generator()
|
|
@@ -69,7 +69,7 @@ def test_python_api_sync():
|
|
|
69
69
|
events1 = []
|
|
70
70
|
for event in scan1.start():
|
|
71
71
|
events1.append(event)
|
|
72
|
-
assert any("127.0.0.1"
|
|
72
|
+
assert any(e.type == "IP_ADDRESS" and e.data == "127.0.0.1" for e in events1)
|
|
73
73
|
# make sure output files work
|
|
74
74
|
scan2 = Scanner("127.0.0.1", output_modules=["json"], scan_name="python_api_test")
|
|
75
75
|
scan2.start_without_generator()
|
|
@@ -6,9 +6,6 @@ from bbot.core.helpers import regexes
|
|
|
6
6
|
from bbot.errors import ValidationError
|
|
7
7
|
from bbot.core.event.helpers import EventSeed
|
|
8
8
|
|
|
9
|
-
# NOTE: :2001:db8:: will currently cause an exception...
|
|
10
|
-
# e.g. raised unknown error: split_port() failed to parse netloc ":2001:db8::"
|
|
11
|
-
|
|
12
9
|
|
|
13
10
|
def test_ip_regexes():
|
|
14
11
|
bad_ip = [
|
|
@@ -23,6 +20,15 @@ def test_ip_regexes():
|
|
|
23
20
|
"2001:db8:g::", # includes non-hex character,
|
|
24
21
|
"2001.db8.80", # weird dot separated thing that might actually resolve as a DNS_NAME
|
|
25
22
|
"9e:3e:53:29:43:64", # MAC address, poor regex patterning will often detect these.
|
|
23
|
+
"2001:db8:1:2:3:4:5", # only 7 groups, no zero-compression
|
|
24
|
+
"2001:db8:1:2:3:4:5:6:7", # too many groups
|
|
25
|
+
"2001:db8::1::1", # multiple ::
|
|
26
|
+
"2001:db8::zzzz", # non-hex character
|
|
27
|
+
"2001:db8::12345", # hex value too long
|
|
28
|
+
":2001:db8::1", # starts with :
|
|
29
|
+
":2001:db8::", # starts with :
|
|
30
|
+
"cafe:80", # looks like open port
|
|
31
|
+
"12:34:56:78:9A:BC", # mac address
|
|
26
32
|
]
|
|
27
33
|
|
|
28
34
|
good_ip = [
|
|
@@ -46,6 +52,17 @@ def test_ip_regexes():
|
|
|
46
52
|
"1::1",
|
|
47
53
|
"ffff::ffff",
|
|
48
54
|
"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
|
|
55
|
+
"2001:db8::ff00:42:8329",
|
|
56
|
+
"2001:0db8:0000:0000:0000:0000:0000:0001",
|
|
57
|
+
"2001:db8:0:0:0:0:0:1",
|
|
58
|
+
"2001:db8::1",
|
|
59
|
+
"2001:db8::dead:beef",
|
|
60
|
+
"2001:db8:1:2:3:4:5:6",
|
|
61
|
+
"2001:db8:1:2:3:4:5:ffff",
|
|
62
|
+
"::",
|
|
63
|
+
"::ffff",
|
|
64
|
+
"::dead:beef",
|
|
65
|
+
"::DEAD:BEEF",
|
|
49
66
|
]
|
|
50
67
|
|
|
51
68
|
ip_address_regexes = regexes.event_type_regexes["IP_ADDRESS"]
|
|
@@ -61,7 +78,7 @@ def test_ip_regexes():
|
|
|
61
78
|
if ip.startswith("["):
|
|
62
79
|
assert ip == "[2001:db8::]:80"
|
|
63
80
|
else:
|
|
64
|
-
assert ip
|
|
81
|
+
assert ip in ("cafe:80", "203.0.113.0:80")
|
|
65
82
|
continue
|
|
66
83
|
if event_type == "DNS_NAME":
|
|
67
84
|
if ip.startswith("2001"):
|
|
@@ -111,7 +111,6 @@ async def test_task_scan_handle_event_timeout(bbot_scanner):
|
|
|
111
111
|
class LongBatchModule(BaseModule):
|
|
112
112
|
watched_events = ["IP_ADDRESS"]
|
|
113
113
|
handled_event = False
|
|
114
|
-
canceled = False
|
|
115
114
|
_name = "long_batch"
|
|
116
115
|
_batch_size = 2
|
|
117
116
|
|
|
@@ -147,24 +146,18 @@ async def test_task_scan_handle_event_timeout(bbot_scanner):
|
|
|
147
146
|
|
|
148
147
|
@pytest.mark.asyncio
|
|
149
148
|
async def test_url_extension_handling(bbot_scanner):
|
|
150
|
-
scan = bbot_scanner(config={"url_extension_blacklist": ["css"]
|
|
149
|
+
scan = bbot_scanner(config={"url_extension_blacklist": ["css"]})
|
|
151
150
|
await scan._prep()
|
|
152
151
|
assert scan.url_extension_blacklist == {"css"}
|
|
153
|
-
assert scan.url_extension_httpx_only == {"js"}
|
|
154
152
|
good_event = scan.make_event("https://evilcorp.com/a.txt", "URL", tags=["status-200"], parent=scan.root_event)
|
|
155
153
|
bad_event = scan.make_event("https://evilcorp.com/a.css", "URL", tags=["status-200"], parent=scan.root_event)
|
|
156
|
-
httpx_event = scan.make_event("https://evilcorp.com/a.js", "URL", tags=["status-200"], parent=scan.root_event)
|
|
157
154
|
assert "blacklisted" not in bad_event.tags
|
|
158
|
-
assert "httpx-only" not in httpx_event.tags
|
|
159
155
|
result = await scan.ingress_module.handle_event(good_event)
|
|
160
156
|
assert result is None
|
|
161
157
|
result, reason = await scan.ingress_module.handle_event(bad_event)
|
|
162
158
|
assert result is False
|
|
163
159
|
assert reason == "event is blacklisted"
|
|
164
160
|
assert "blacklisted" in bad_event.tags
|
|
165
|
-
result = await scan.ingress_module.handle_event(httpx_event)
|
|
166
|
-
assert result is None
|
|
167
|
-
assert "httpx-only" in httpx_event.tags
|
|
168
161
|
|
|
169
162
|
await scan._cleanup()
|
|
170
163
|
|
|
@@ -284,3 +277,9 @@ async def test_exclude_cdn(bbot_scanner, monkeypatch):
|
|
|
284
277
|
"www.evilcorp.com:80",
|
|
285
278
|
"www.evilcorp.com:443",
|
|
286
279
|
}
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
async def test_scan_name(bbot_scanner):
|
|
283
|
+
scan = bbot_scanner("evilcorp.com", name="test_scan_name")
|
|
284
|
+
assert scan.name == "test_scan_name"
|
|
285
|
+
assert scan.preset.scan_name == "test_scan_name"
|
|
@@ -498,3 +498,49 @@ async def test_http_sendcookies(bbot_scanner, bbot_httpserver):
|
|
|
498
498
|
assert r1 is not None, "Request to self-signed SSL server went through even with ssl_verify=True"
|
|
499
499
|
assert "bar" in r1.text
|
|
500
500
|
await scan1._cleanup()
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
@pytest.mark.asyncio
|
|
504
|
+
async def test_api_download_api_key_cycle(bbot_scanner, bbot_httpserver):
|
|
505
|
+
from werkzeug.wrappers import Response
|
|
506
|
+
from bbot.modules.base import BaseModule
|
|
507
|
+
|
|
508
|
+
endpoint = "/api_download_cycle_one_test"
|
|
509
|
+
url = bbot_httpserver.url_for(endpoint)
|
|
510
|
+
|
|
511
|
+
seen_auth = []
|
|
512
|
+
n_request = 0
|
|
513
|
+
|
|
514
|
+
# First key should trigger 500, second key should succeed with 200
|
|
515
|
+
def handler(request):
|
|
516
|
+
nonlocal n_request
|
|
517
|
+
n_request += 1
|
|
518
|
+
auth = request.headers.get("Authorization", "")
|
|
519
|
+
seen_auth.append(auth)
|
|
520
|
+
if auth == "Bearer k1":
|
|
521
|
+
if n_request == 1:
|
|
522
|
+
return Response("ok_k1", status=200)
|
|
523
|
+
return Response("fail_k1", status=500)
|
|
524
|
+
elif auth == "Bearer k2":
|
|
525
|
+
return Response("ok_k2", status=200)
|
|
526
|
+
return Response("unexpected_key", status=400)
|
|
527
|
+
|
|
528
|
+
bbot_httpserver.expect_request(uri=endpoint).respond_with_handler(handler)
|
|
529
|
+
|
|
530
|
+
scan = bbot_scanner("127.0.0.1")
|
|
531
|
+
module = BaseModule(scan)
|
|
532
|
+
module.api_key = ["k1", "k2"]
|
|
533
|
+
|
|
534
|
+
filename = await module.api_download(url)
|
|
535
|
+
assert filename is not None
|
|
536
|
+
with open(filename) as f:
|
|
537
|
+
assert f.read() == "ok_k1"
|
|
538
|
+
|
|
539
|
+
assert seen_auth == ["Bearer k1"]
|
|
540
|
+
|
|
541
|
+
filename = await module.api_download(url)
|
|
542
|
+
|
|
543
|
+
# verify the requests occurred in expected order with expected API keys
|
|
544
|
+
assert seen_auth == ["Bearer k1", "Bearer k1", "Bearer k2"]
|
|
545
|
+
|
|
546
|
+
await scan._cleanup()
|
|
@@ -61,6 +61,7 @@ class ModuleTestBase:
|
|
|
61
61
|
config=self.config,
|
|
62
62
|
whitelist=module_test_base.whitelist,
|
|
63
63
|
blacklist=module_test_base.blacklist,
|
|
64
|
+
force_start=getattr(module_test_base, "force_start", False),
|
|
64
65
|
)
|
|
65
66
|
self.events = []
|
|
66
67
|
self.log = logging.getLogger(f"bbot.test.{module_test_base.name}")
|
|
@@ -108,10 +109,14 @@ class ModuleTestBase:
|
|
|
108
109
|
self.log.debug("Executing setup_after_prep()")
|
|
109
110
|
await self.setup_after_prep(module_test)
|
|
110
111
|
self.log.debug("Starting scan")
|
|
111
|
-
|
|
112
|
+
await self._execute_scan(module_test)
|
|
112
113
|
self.log.debug(f"Finished {module_test.name} module test")
|
|
113
114
|
yield module_test
|
|
114
115
|
|
|
116
|
+
async def _execute_scan(self, module_test):
|
|
117
|
+
"""Execute the scan and collect events. Can be overridden by benchmark classes."""
|
|
118
|
+
module_test.events = [e async for e in module_test.scan.async_start()]
|
|
119
|
+
|
|
115
120
|
@pytest.mark.asyncio
|
|
116
121
|
async def test_module_run(self, module_test):
|
|
117
122
|
from bbot.core.helpers.misc import execute_sync_or_async
|
|
@@ -21,6 +21,8 @@ class Bucket_Amazon_Base(ModuleTestBase):
|
|
|
21
21
|
random_bucket_2 = f"{random_bucket_name_2}.s3-ap-southeast-2.amazonaws.com"
|
|
22
22
|
random_bucket_3 = f"{random_bucket_name_3}.s3.amazonaws.com"
|
|
23
23
|
|
|
24
|
+
nonexistent_is_404 = True
|
|
25
|
+
|
|
24
26
|
open_bucket_body = """<?xml version="1.0" encoding="UTF-8"?>
|
|
25
27
|
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Name>vpn-static</Name><Prefix></Prefix><Marker></Marker><MaxKeys>1000</MaxKeys><IsTruncated>false</IsTruncated><Contents><Key>style.css</Key><LastModified>2017-03-18T06:41:59.000Z</LastModified><ETag>"bf9e72bdab09b785f05ff0395023cc35"</ETag><Size>429</Size><StorageClass>STANDARD</StorageClass></Contents></ListBucketResult>"""
|
|
26
28
|
|
|
@@ -66,30 +68,62 @@ class Bucket_Amazon_Base(ModuleTestBase):
|
|
|
66
68
|
url=self.url_3,
|
|
67
69
|
text="",
|
|
68
70
|
)
|
|
69
|
-
|
|
71
|
+
if self.nonexistent_is_404:
|
|
72
|
+
module_test.httpx_mock.add_response(url=re.compile(".*"), text="", status_code=404)
|
|
70
73
|
|
|
71
74
|
def check(self, module_test, events):
|
|
72
|
-
|
|
73
|
-
assert
|
|
74
|
-
|
|
75
|
+
storage_buckets = [e for e in events if e.type == "STORAGE_BUCKET"]
|
|
76
|
+
assert len(storage_buckets) == 3
|
|
77
|
+
assert 1 == len(
|
|
78
|
+
[
|
|
79
|
+
e
|
|
80
|
+
for e in storage_buckets
|
|
81
|
+
if e.data["name"] == random_bucket_name_1
|
|
82
|
+
and str(e.module) == "cloudcheck"
|
|
83
|
+
and f"cloud-{self.provider}" in e.tags
|
|
84
|
+
and f"{self.provider}-domain" in e.tags
|
|
85
|
+
]
|
|
86
|
+
)
|
|
87
|
+
assert 1 == len(
|
|
88
|
+
[
|
|
89
|
+
e
|
|
90
|
+
for e in storage_buckets
|
|
91
|
+
if e.data["name"] == random_bucket_name_2
|
|
92
|
+
and str(e.module) == "cloudcheck"
|
|
93
|
+
and f"cloud-{self.provider}" in e.tags
|
|
94
|
+
and f"{self.provider}-domain" in e.tags
|
|
95
|
+
]
|
|
96
|
+
)
|
|
97
|
+
assert 1 == len(
|
|
98
|
+
[
|
|
99
|
+
e
|
|
100
|
+
for e in storage_buckets
|
|
101
|
+
if e.data["name"] == random_bucket_name_3
|
|
102
|
+
and str(e.module) == str(self.module_name)
|
|
103
|
+
and f"cloud-{module_test.module.cloudcheck_provider_name.lower()}" in e.tags
|
|
104
|
+
and f"{module_test.module.cloudcheck_provider_name.lower()}-domain" in e.tags
|
|
105
|
+
]
|
|
75
106
|
)
|
|
76
107
|
# make sure open buckets were found
|
|
77
108
|
if module_test.module.supports_open_check:
|
|
78
|
-
assert
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
109
|
+
assert 1 == len(
|
|
110
|
+
[
|
|
111
|
+
e
|
|
112
|
+
for e in events
|
|
113
|
+
if e.type == "FINDING"
|
|
114
|
+
and str(e.module) == self.module_name
|
|
115
|
+
and e.data.get("url") == f"https://{self.random_bucket_2}/"
|
|
116
|
+
]
|
|
117
|
+
), f'open bucket not found for module "{self.module_name}"'
|
|
87
118
|
# make sure bucket mutations were found
|
|
88
|
-
assert
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
119
|
+
assert 1 == len(
|
|
120
|
+
[
|
|
121
|
+
e
|
|
122
|
+
for e in events
|
|
123
|
+
if e.type == "STORAGE_BUCKET"
|
|
124
|
+
and str(e.module) == self.module_name
|
|
125
|
+
and f"{random_bucket_name_3}" in e.data["url"]
|
|
126
|
+
]
|
|
93
127
|
), f'bucket (dev mutation: {self.random_bucket_3}) not found for module "{self.module_name}"'
|
|
94
128
|
|
|
95
129
|
|
|
@@ -22,6 +22,6 @@ class TestBucket_Google(Bucket_Amazon_Base):
|
|
|
22
22
|
"""
|
|
23
23
|
|
|
24
24
|
def url_setup(self):
|
|
25
|
-
self.url_1 = f"{random_bucket_name_1}.storage.googleapis.com"
|
|
25
|
+
self.url_1 = f"https://{random_bucket_name_1}.storage.googleapis.com"
|
|
26
26
|
self.url_2 = f"https://www.googleapis.com/storage/v1/b/{random_bucket_name_2}/iam/testPermissions?&permissions=storage.buckets.get&permissions=storage.buckets.list&permissions=storage.buckets.create&permissions=storage.buckets.delete&permissions=storage.buckets.setIamPolicy&permissions=storage.objects.get&permissions=storage.objects.list&permissions=storage.objects.create&permissions=storage.objects.delete&permissions=storage.objects.setIamPolicy"
|
|
27
27
|
self.url_3 = f"https://www.googleapis.com/storage/v1/b/{random_bucket_name_3}"
|
bbot/test/test_step_2/module_tests/{test_module_bucket_azure.py → test_module_bucket_microsoft.py}
RENAMED
|
@@ -2,21 +2,23 @@ from .test_module_bucket_amazon import *
|
|
|
2
2
|
from .base import ModuleTestBase
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
class
|
|
6
|
-
provider = "
|
|
5
|
+
class TestBucket_Microsoft(Bucket_Amazon_Base):
|
|
6
|
+
provider = "microsoft"
|
|
7
7
|
random_bucket_1 = f"{random_bucket_name_1}.blob.core.windows.net"
|
|
8
8
|
random_bucket_2 = f"{random_bucket_name_2}.blob.core.windows.net"
|
|
9
9
|
random_bucket_3 = f"{random_bucket_name_3}.blob.core.windows.net"
|
|
10
10
|
|
|
11
|
+
nonexistent_is_404 = False
|
|
12
|
+
|
|
11
13
|
def url_setup(self):
|
|
12
14
|
self.url_1 = f"https://{self.random_bucket_1}"
|
|
13
15
|
self.url_2 = f"https://{self.random_bucket_2}"
|
|
14
16
|
self.url_3 = f"https://{self.random_bucket_3}/{random_bucket_name_3}?restype=container"
|
|
15
17
|
|
|
16
18
|
|
|
17
|
-
class
|
|
19
|
+
class TestBucket_Microsoft_NoDup(ModuleTestBase):
|
|
18
20
|
targets = ["tesla.com"]
|
|
19
|
-
module_name = "
|
|
21
|
+
module_name = "bucket_microsoft"
|
|
20
22
|
config_overrides = {"cloudcheck": True}
|
|
21
23
|
|
|
22
24
|
async def setup_before_prep(self, module_test):
|
|
@@ -42,7 +44,7 @@ class TestBucket_Azure_NoDup(ModuleTestBase):
|
|
|
42
44
|
)
|
|
43
45
|
|
|
44
46
|
|
|
45
|
-
class
|
|
47
|
+
class TestBucket_Microsoft_NoDup(TestBucket_Microsoft_NoDup):
|
|
46
48
|
"""
|
|
47
49
|
This tests _suppress_chain_dupes functionality to make sure it works as expected
|
|
48
50
|
"""
|
|
@@ -13,29 +13,12 @@ class TestCloudCheck(ModuleTestBase):
|
|
|
13
13
|
scan = Scanner(config={"cloudcheck": True})
|
|
14
14
|
await scan._prep()
|
|
15
15
|
module = scan.modules["cloudcheck"]
|
|
16
|
-
|
|
17
|
-
# make sure we have all the providers
|
|
18
|
-
provider_names = (
|
|
19
|
-
"amazon",
|
|
20
|
-
"google",
|
|
21
|
-
"azure",
|
|
22
|
-
"digitalocean",
|
|
23
|
-
"oracle",
|
|
24
|
-
"akamai",
|
|
25
|
-
"cloudflare",
|
|
26
|
-
"github",
|
|
27
|
-
"zoho",
|
|
28
|
-
"fastly",
|
|
29
|
-
)
|
|
30
|
-
for provider_name in provider_names:
|
|
31
|
-
assert provider_name in providers
|
|
16
|
+
from cloudcheck import providers
|
|
32
17
|
|
|
33
|
-
|
|
34
|
-
assert
|
|
35
|
-
amazon_range = next(iter(amazon_ranges))
|
|
36
|
-
amazon_address = amazon_range.broadcast_address
|
|
18
|
+
# make sure we have at least one provider
|
|
19
|
+
assert providers.Amazon.name == "Amazon"
|
|
37
20
|
|
|
38
|
-
ip_event = scan.make_event(
|
|
21
|
+
ip_event = scan.make_event("8.8.8.8", parent=scan.root_event)
|
|
39
22
|
aws_event1 = scan.make_event("amazonaws.com", parent=scan.root_event)
|
|
40
23
|
aws_event2 = scan.make_event("asdf.amazonaws.com", parent=scan.root_event)
|
|
41
24
|
aws_event3 = scan.make_event("asdfamazonaws.com", parent=scan.root_event)
|
|
@@ -44,16 +27,20 @@ class TestCloudCheck(ModuleTestBase):
|
|
|
44
27
|
other_event1 = scan.make_event("cname.evilcorp.com", parent=scan.root_event)
|
|
45
28
|
other_event2 = scan.make_event("cname2.evilcorp.com", parent=scan.root_event)
|
|
46
29
|
other_event3 = scan.make_event("cname3.evilcorp.com", parent=scan.root_event)
|
|
47
|
-
other_event2._resolved_hosts = {
|
|
30
|
+
other_event2._resolved_hosts = {"8.8.8.8"}
|
|
48
31
|
other_event3._resolved_hosts = {"asdf.amazonaws.com"}
|
|
49
32
|
|
|
50
|
-
for event in (ip_event,
|
|
33
|
+
for event in (ip_event, other_event2):
|
|
34
|
+
await module.handle_event(ip_event)
|
|
35
|
+
assert "cloud-google" in ip_event.tags
|
|
36
|
+
assert "google-ip" in ip_event.tags
|
|
37
|
+
|
|
38
|
+
for event in (aws_event1, aws_event2, aws_event4, other_event3):
|
|
51
39
|
await module.handle_event(event)
|
|
52
40
|
assert "cloud-amazon" in event.tags, f"{event} was not properly cloud-tagged"
|
|
53
41
|
|
|
54
|
-
assert "
|
|
55
|
-
assert "
|
|
56
|
-
assert "cloud-cname" in other_event3.tags
|
|
42
|
+
assert "amazon-domain" in aws_event1.tags
|
|
43
|
+
assert "amazon-cname" in other_event3.tags
|
|
57
44
|
|
|
58
45
|
for event in (aws_event3, other_event1):
|
|
59
46
|
await module.handle_event(event)
|
|
@@ -70,13 +57,10 @@ class TestCloudCheck(ModuleTestBase):
|
|
|
70
57
|
for event in (google_event1, google_event2, google_event3):
|
|
71
58
|
await module.handle_event(event)
|
|
72
59
|
assert "cloud-google" in event.tags, f"{event} was not properly cloud-tagged"
|
|
73
|
-
assert "cloud-storage-bucket" in google_event3.tags
|
|
74
60
|
|
|
75
61
|
await scan._cleanup()
|
|
76
62
|
|
|
77
63
|
def check(self, module_test, events):
|
|
78
|
-
for e in events:
|
|
79
|
-
self.log.debug(e)
|
|
80
64
|
assert 2 == len([e for e in events if e.type == "STORAGE_BUCKET"])
|
|
81
65
|
assert 1 == len(
|
|
82
66
|
[
|
|
@@ -84,8 +68,10 @@ class TestCloudCheck(ModuleTestBase):
|
|
|
84
68
|
for e in events
|
|
85
69
|
if e.type == "STORAGE_BUCKET"
|
|
86
70
|
and e.data["name"] == "asdf"
|
|
71
|
+
and str(e.module) == "cloudcheck"
|
|
87
72
|
and "cloud-amazon" in e.tags
|
|
88
|
-
and "
|
|
73
|
+
and "amazon-domain" in e.tags
|
|
74
|
+
and e.scope_distance == 1
|
|
89
75
|
]
|
|
90
76
|
)
|
|
91
77
|
assert 1 == len(
|
|
@@ -94,7 +80,9 @@ class TestCloudCheck(ModuleTestBase):
|
|
|
94
80
|
for e in events
|
|
95
81
|
if e.type == "STORAGE_BUCKET"
|
|
96
82
|
and e.data["name"] == "asdf2"
|
|
83
|
+
and str(e.module) == "cloudcheck"
|
|
97
84
|
and "cloud-google" in e.tags
|
|
98
|
-
and "
|
|
85
|
+
and "google-domain" in e.tags
|
|
86
|
+
and e.scope_distance == 0
|
|
99
87
|
]
|
|
100
88
|
)
|
|
@@ -6,11 +6,12 @@ raw_bimi_txt_default = (
|
|
|
6
6
|
raw_bimi_txt_nondefault = '"v=BIMI1; l=https://nondefault.thirdparty.tld/brand/logo.svg;a=https://nondefault.thirdparty.tld/brand/certificate.pem;"'
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
class
|
|
9
|
+
class TestDnsbimi(ModuleTestBase):
|
|
10
10
|
targets = ["test.localdomain"]
|
|
11
11
|
modules_overrides = ["dnsbimi", "speculate"]
|
|
12
12
|
config_overrides = {
|
|
13
13
|
"modules": {"dnsbimi": {"emit_raw_dns_records": True, "selectors": "default,nondefault"}},
|
|
14
|
+
"omit_event_types": ["HTTP_RESPONSE", "RAW_TEXT", "DNS_NAME_UNRESOLVED", "FILESYSTEM", "WEB_PARAMETER"],
|
|
14
15
|
}
|
|
15
16
|
|
|
16
17
|
async def setup_after_prep(self, module_test):
|