bbot 2.2.0.5263rc0__py3-none-any.whl → 2.2.0.5309rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +1 -1
- bbot/core/engine.py +2 -2
- bbot/core/event/base.py +23 -2
- bbot/core/helpers/bloom.py +8 -1
- bbot/core/helpers/depsinstaller/installer.py +8 -5
- bbot/core/helpers/dns/helpers.py +2 -2
- bbot/core/helpers/helper.py +4 -3
- bbot/core/helpers/misc.py +29 -5
- bbot/core/helpers/regexes.py +2 -1
- bbot/core/helpers/web/web.py +1 -1
- bbot/defaults.yml +3 -0
- bbot/modules/anubisdb.py +1 -1
- bbot/modules/baddns.py +1 -1
- bbot/modules/bevigil.py +2 -2
- bbot/modules/binaryedge.py +1 -1
- bbot/modules/bufferoverrun.py +2 -3
- bbot/modules/builtwith.py +2 -2
- bbot/modules/c99.py +4 -2
- bbot/modules/certspotter.py +4 -2
- bbot/modules/chaos.py +4 -2
- bbot/modules/columbus.py +1 -1
- bbot/modules/crt.py +4 -2
- bbot/modules/digitorus.py +1 -1
- bbot/modules/dnscaa.py +3 -3
- bbot/modules/fullhunt.py +1 -1
- bbot/modules/hackertarget.py +4 -2
- bbot/modules/internal/excavate.py +2 -3
- bbot/modules/internal/speculate.py +34 -24
- bbot/modules/leakix.py +6 -5
- bbot/modules/myssl.py +1 -1
- bbot/modules/otx.py +4 -2
- bbot/modules/passivetotal.py +4 -2
- bbot/modules/rapiddns.py +2 -7
- bbot/modules/securitytrails.py +4 -2
- bbot/modules/shodan_dns.py +1 -1
- bbot/modules/subdomaincenter.py +1 -1
- bbot/modules/templates/subdomain_enum.py +3 -3
- bbot/modules/trickest.py +1 -1
- bbot/modules/virustotal.py +2 -7
- bbot/modules/zoomeye.py +5 -3
- bbot/presets/fast.yml +16 -0
- bbot/presets/spider.yml +4 -0
- bbot/scanner/manager.py +1 -2
- bbot/scanner/preset/args.py +20 -4
- bbot/scanner/preset/path.py +3 -1
- bbot/scanner/preset/preset.py +18 -12
- bbot/scanner/scanner.py +7 -2
- bbot/scanner/target.py +236 -434
- bbot/test/bbot_fixtures.py +5 -2
- bbot/test/conftest.py +95 -83
- bbot/test/test_step_1/test_bloom_filter.py +2 -0
- bbot/test/test_step_1/test_cli.py +36 -0
- bbot/test/test_step_1/test_dns.py +2 -1
- bbot/test/test_step_1/test_events.py +16 -3
- bbot/test/test_step_1/test_helpers.py +17 -0
- bbot/test/test_step_1/test_modules_basic.py +0 -3
- bbot/test/test_step_1/test_presets.py +51 -38
- bbot/test/test_step_1/test_python_api.py +4 -0
- bbot/test/test_step_1/test_scan.py +8 -2
- bbot/test/test_step_1/test_target.py +227 -129
- bbot/test/test_step_1/test_web.py +3 -0
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_leakix.py +5 -1
- {bbot-2.2.0.5263rc0.dist-info → bbot-2.2.0.5309rc0.dist-info}/METADATA +4 -4
- {bbot-2.2.0.5263rc0.dist-info → bbot-2.2.0.5309rc0.dist-info}/RECORD +71 -70
- {bbot-2.2.0.5263rc0.dist-info → bbot-2.2.0.5309rc0.dist-info}/LICENSE +0 -0
- {bbot-2.2.0.5263rc0.dist-info → bbot-2.2.0.5309rc0.dist-info}/WHEEL +0 -0
- {bbot-2.2.0.5263rc0.dist-info → bbot-2.2.0.5309rc0.dist-info}/entry_points.txt +0 -0
bbot/test/bbot_fixtures.py
CHANGED
|
@@ -15,8 +15,8 @@ from werkzeug.wrappers import Request
|
|
|
15
15
|
from bbot.errors import * # noqa: F401
|
|
16
16
|
from bbot.core import CORE
|
|
17
17
|
from bbot.scanner import Preset
|
|
18
|
-
from bbot.core.helpers.misc import mkdir, rand_string
|
|
19
18
|
from bbot.core.helpers.async_helpers import get_event_loop
|
|
19
|
+
from bbot.core.helpers.misc import mkdir, rand_string, get_python_constraints
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
log = logging.getLogger(f"bbot.test.fixtures")
|
|
@@ -229,4 +229,7 @@ def install_all_python_deps():
|
|
|
229
229
|
deps_pip = set()
|
|
230
230
|
for module in DEFAULT_PRESET.module_loader.preloaded().values():
|
|
231
231
|
deps_pip.update(set(module.get("deps", {}).get("pip", [])))
|
|
232
|
-
|
|
232
|
+
|
|
233
|
+
constraint_file = tempwordlist(get_python_constraints())
|
|
234
|
+
|
|
235
|
+
subprocess.run([sys.executable, "-m", "pip", "install", "--constraint", constraint_file] + list(deps_pip))
|
bbot/test/conftest.py
CHANGED
|
@@ -94,9 +94,21 @@ def bbot_httpserver_ssl():
|
|
|
94
94
|
server.clear()
|
|
95
95
|
|
|
96
96
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
97
|
+
def should_mock(request):
|
|
98
|
+
return not request.url.host in ["127.0.0.1", "localhost", "raw.githubusercontent.com"] + interactsh_servers
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def pytest_collection_modifyitems(config, items):
|
|
102
|
+
# make sure all tests have the httpx_mock marker
|
|
103
|
+
for item in items:
|
|
104
|
+
item.add_marker(
|
|
105
|
+
pytest.mark.httpx_mock(
|
|
106
|
+
should_mock=should_mock,
|
|
107
|
+
assert_all_requests_were_expected=False,
|
|
108
|
+
assert_all_responses_were_requested=False,
|
|
109
|
+
can_send_already_matched_responses=True,
|
|
110
|
+
)
|
|
111
|
+
)
|
|
100
112
|
|
|
101
113
|
|
|
102
114
|
@pytest.fixture
|
|
@@ -239,80 +251,80 @@ def pytest_terminal_summary(terminalreporter, exitstatus, config): # pragma: no
|
|
|
239
251
|
|
|
240
252
|
|
|
241
253
|
# BELOW: debugging for frozen/hung tests
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
254
|
+
import psutil
|
|
255
|
+
import traceback
|
|
256
|
+
import inspect
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def _print_detailed_info(): # pragma: no cover
|
|
260
|
+
"""
|
|
261
|
+
Debugging pytests hanging
|
|
262
|
+
"""
|
|
263
|
+
print("=== Detailed Thread and Process Information ===\n")
|
|
264
|
+
try:
|
|
265
|
+
print("=== Threads ===")
|
|
266
|
+
for thread in threading.enumerate():
|
|
267
|
+
print(f"Thread Name: {thread.name}")
|
|
268
|
+
print(f"Thread ID: {thread.ident}")
|
|
269
|
+
print(f"Is Alive: {thread.is_alive()}")
|
|
270
|
+
print(f"Daemon: {thread.daemon}")
|
|
271
|
+
|
|
272
|
+
if hasattr(thread, "_target"):
|
|
273
|
+
target = thread._target
|
|
274
|
+
if target:
|
|
275
|
+
qualname = (
|
|
276
|
+
f"{target.__module__}.{target.__qualname__}"
|
|
277
|
+
if hasattr(target, "__qualname__")
|
|
278
|
+
else str(target)
|
|
279
|
+
)
|
|
280
|
+
print(f"Target Function: {qualname}")
|
|
281
|
+
|
|
282
|
+
if hasattr(thread, "_args"):
|
|
283
|
+
args = thread._args
|
|
284
|
+
kwargs = thread._kwargs if hasattr(thread, "_kwargs") else {}
|
|
285
|
+
arg_spec = inspect.getfullargspec(target)
|
|
286
|
+
|
|
287
|
+
all_args = list(args) + [f"{k}={v}" for k, v in kwargs.items()]
|
|
288
|
+
|
|
289
|
+
if inspect.ismethod(target) and arg_spec.args[0] == "self":
|
|
290
|
+
arg_spec.args.pop(0)
|
|
291
|
+
|
|
292
|
+
named_args = list(zip(arg_spec.args, all_args))
|
|
293
|
+
if arg_spec.varargs:
|
|
294
|
+
named_args.extend((f"*{arg_spec.varargs}", arg) for arg in all_args[len(arg_spec.args) :])
|
|
295
|
+
|
|
296
|
+
print("Arguments:")
|
|
297
|
+
for name, value in named_args:
|
|
298
|
+
print(f" {name}: {value}")
|
|
299
|
+
else:
|
|
300
|
+
print("Target Function: None")
|
|
301
|
+
else:
|
|
302
|
+
print("Target Function: Unknown")
|
|
303
|
+
|
|
304
|
+
print()
|
|
305
|
+
|
|
306
|
+
print("=== Processes ===")
|
|
307
|
+
current_process = psutil.Process()
|
|
308
|
+
for child in current_process.children(recursive=True):
|
|
309
|
+
print(f"Process ID: {child.pid}")
|
|
310
|
+
print(f"Name: {child.name()}")
|
|
311
|
+
print(f"Status: {child.status()}")
|
|
312
|
+
print(f"CPU Times: {child.cpu_times()}")
|
|
313
|
+
print(f"Memory Info: {child.memory_info()}")
|
|
314
|
+
print()
|
|
315
|
+
|
|
316
|
+
print("=== Current Process ===")
|
|
317
|
+
print(f"Process ID: {current_process.pid}")
|
|
318
|
+
print(f"Name: {current_process.name()}")
|
|
319
|
+
print(f"Status: {current_process.status()}")
|
|
320
|
+
print(f"CPU Times: {current_process.cpu_times()}")
|
|
321
|
+
print(f"Memory Info: {current_process.memory_info()}")
|
|
322
|
+
print()
|
|
323
|
+
|
|
324
|
+
except Exception as e:
|
|
325
|
+
print(f"An error occurred: {str(e)}")
|
|
326
|
+
print("Traceback:")
|
|
327
|
+
traceback.print_exc()
|
|
316
328
|
|
|
317
329
|
|
|
318
330
|
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
|
|
@@ -330,11 +342,11 @@ def pytest_sessionfinish(session, exitstatus):
|
|
|
330
342
|
yield
|
|
331
343
|
|
|
332
344
|
# temporarily suspend stdout capture and print detailed thread info
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
345
|
+
capmanager = session.config.pluginmanager.get_plugin("capturemanager")
|
|
346
|
+
if capmanager:
|
|
347
|
+
capmanager.suspend_global_capture(in_=True)
|
|
336
348
|
|
|
337
|
-
|
|
349
|
+
_print_detailed_info()
|
|
338
350
|
|
|
339
|
-
|
|
340
|
-
|
|
351
|
+
if capmanager:
|
|
352
|
+
capmanager.resume_global_capture()
|
|
@@ -535,6 +535,13 @@ def test_cli_module_validation(monkeypatch, caplog):
|
|
|
535
535
|
]
|
|
536
536
|
)
|
|
537
537
|
|
|
538
|
+
# bad target
|
|
539
|
+
caplog.clear()
|
|
540
|
+
assert not caplog.text
|
|
541
|
+
monkeypatch.setattr("sys.argv", ["bbot", "-t", "asdf:::sdf"])
|
|
542
|
+
cli.main()
|
|
543
|
+
assert 'Unable to autodetect event type from "asdf:::sdf"' in caplog.text
|
|
544
|
+
|
|
538
545
|
# incorrect flag
|
|
539
546
|
caplog.clear()
|
|
540
547
|
assert not caplog.text
|
|
@@ -626,6 +633,35 @@ config:
|
|
|
626
633
|
stdout_preset = yaml.safe_load(captured.out)
|
|
627
634
|
assert stdout_preset["config"]["web"]["http_proxy"] == "http://proxy2"
|
|
628
635
|
|
|
636
|
+
# --fast-mode
|
|
637
|
+
monkeypatch.setattr("sys.argv", ["bbot", "--current-preset"])
|
|
638
|
+
cli.main()
|
|
639
|
+
captured = capsys.readouterr()
|
|
640
|
+
stdout_preset = yaml.safe_load(captured.out)
|
|
641
|
+
assert list(stdout_preset) == ["description"]
|
|
642
|
+
|
|
643
|
+
monkeypatch.setattr("sys.argv", ["bbot", "--fast", "--current-preset"])
|
|
644
|
+
cli.main()
|
|
645
|
+
captured = capsys.readouterr()
|
|
646
|
+
stdout_preset = yaml.safe_load(captured.out)
|
|
647
|
+
stdout_preset.pop("description")
|
|
648
|
+
assert stdout_preset == {
|
|
649
|
+
"config": {
|
|
650
|
+
"scope": {"strict": True},
|
|
651
|
+
"dns": {"minimal": True},
|
|
652
|
+
"modules": {"speculate": {"essential_only": True}},
|
|
653
|
+
},
|
|
654
|
+
"exclude_modules": ["excavate"],
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
# --proxy
|
|
658
|
+
monkeypatch.setattr("sys.argv", ["bbot", "--proxy", "http://127.0.0.1:8080", "--current-preset"])
|
|
659
|
+
cli.main()
|
|
660
|
+
captured = capsys.readouterr()
|
|
661
|
+
stdout_preset = yaml.safe_load(captured.out)
|
|
662
|
+
stdout_preset.pop("description")
|
|
663
|
+
assert stdout_preset == {"config": {"web": {"http_proxy": "http://127.0.0.1:8080"}}}
|
|
664
|
+
|
|
629
665
|
# cli config overrides all presets
|
|
630
666
|
monkeypatch.setattr(
|
|
631
667
|
"sys.argv",
|
|
@@ -106,7 +106,8 @@ async def test_dns_resolution(bbot_scanner):
|
|
|
106
106
|
assert "2606:4700:4700::1111" in await dnsengine.resolve("one.one.one.one", type="AAAA")
|
|
107
107
|
assert "one.one.one.one" in await dnsengine.resolve("1.1.1.1")
|
|
108
108
|
for rdtype in ("NS", "SOA", "MX", "TXT"):
|
|
109
|
-
|
|
109
|
+
results = await dnsengine.resolve("google.com", type=rdtype)
|
|
110
|
+
assert len(results) > 0
|
|
110
111
|
|
|
111
112
|
# batch resolution
|
|
112
113
|
batch_results = [r async for r in dnsengine.resolve_batch(["1.1.1.1", "one.one.one.one"])]
|
|
@@ -42,6 +42,7 @@ async def test_events(events, helpers):
|
|
|
42
42
|
# ip tests
|
|
43
43
|
assert events.ipv4 == scan.make_event("8.8.8.8", dummy=True)
|
|
44
44
|
assert "8.8.8.8" in events.ipv4
|
|
45
|
+
assert events.ipv4.host_filterable == "8.8.8.8"
|
|
45
46
|
assert "8.8.8.8" == events.ipv4
|
|
46
47
|
assert "8.8.8.8" in events.netv4
|
|
47
48
|
assert "8.8.8.9" not in events.ipv4
|
|
@@ -59,11 +60,19 @@ async def test_events(events, helpers):
|
|
|
59
60
|
assert events.emoji not in events.ipv4
|
|
60
61
|
assert events.emoji not in events.netv6
|
|
61
62
|
assert events.netv6 not in events.emoji
|
|
62
|
-
|
|
63
|
+
ipv6_event = scan.make_event(" [DEaD::c0De]:88", "DNS_NAME", dummy=True)
|
|
64
|
+
assert "dead::c0de" == ipv6_event
|
|
65
|
+
assert ipv6_event.host_filterable == "dead::c0de"
|
|
66
|
+
range_to_ip = scan.make_event("1.2.3.4/32", dummy=True)
|
|
67
|
+
assert range_to_ip.type == "IP_ADDRESS"
|
|
68
|
+
range_to_ip = scan.make_event("dead::beef/128", dummy=True)
|
|
69
|
+
assert range_to_ip.type == "IP_ADDRESS"
|
|
63
70
|
|
|
64
71
|
# hostname tests
|
|
65
72
|
assert events.domain.host == "publicapis.org"
|
|
73
|
+
assert events.domain.host_filterable == "publicapis.org"
|
|
66
74
|
assert events.subdomain.host == "api.publicapis.org"
|
|
75
|
+
assert events.subdomain.host_filterable == "api.publicapis.org"
|
|
67
76
|
assert events.domain.host_stem == "publicapis"
|
|
68
77
|
assert events.subdomain.host_stem == "api.publicapis"
|
|
69
78
|
assert "api.publicapis.org" in events.domain
|
|
@@ -86,7 +95,11 @@ async def test_events(events, helpers):
|
|
|
86
95
|
assert "port" not in e.json()
|
|
87
96
|
|
|
88
97
|
# url tests
|
|
89
|
-
|
|
98
|
+
url_no_trailing_slash = scan.make_event("http://evilcorp.com", dummy=True)
|
|
99
|
+
url_trailing_slash = scan.make_event("http://evilcorp.com/", dummy=True)
|
|
100
|
+
assert url_no_trailing_slash == url_trailing_slash
|
|
101
|
+
assert url_no_trailing_slash.host_filterable == "http://evilcorp.com/"
|
|
102
|
+
assert url_trailing_slash.host_filterable == "http://evilcorp.com/"
|
|
90
103
|
assert events.url_unverified.host == "api.publicapis.org"
|
|
91
104
|
assert events.url_unverified in events.domain
|
|
92
105
|
assert events.url_unverified in events.subdomain
|
|
@@ -129,6 +142,7 @@ async def test_events(events, helpers):
|
|
|
129
142
|
assert events.http_response.port == 80
|
|
130
143
|
assert events.http_response.parsed_url.scheme == "http"
|
|
131
144
|
assert events.http_response.with_port().geturl() == "http://example.com:80/"
|
|
145
|
+
assert events.http_response.host_filterable == "http://example.com/"
|
|
132
146
|
|
|
133
147
|
http_response = scan.make_event(
|
|
134
148
|
{
|
|
@@ -484,7 +498,6 @@ async def test_events(events, helpers):
|
|
|
484
498
|
json_event = db_event.json()
|
|
485
499
|
assert isinstance(json_event["uuid"], str)
|
|
486
500
|
assert json_event["uuid"] == str(db_event.uuid)
|
|
487
|
-
print(f"{json_event} / {db_event.uuid} / {db_event.parent_uuid} / {scan.root_event.uuid}")
|
|
488
501
|
assert json_event["parent_uuid"] == str(scan.root_event.uuid)
|
|
489
502
|
assert json_event["scope_distance"] == 1
|
|
490
503
|
assert json_event["data"] == "evilcorp.com:80"
|
|
@@ -93,8 +93,23 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver):
|
|
|
93
93
|
ipaddress.ip_network("0.0.0.0/0"),
|
|
94
94
|
]
|
|
95
95
|
assert helpers.is_ip("127.0.0.1")
|
|
96
|
+
assert helpers.is_ip("127.0.0.1", include_network=True)
|
|
97
|
+
assert helpers.is_ip("127.0.0.1", version=4)
|
|
98
|
+
assert not helpers.is_ip("127.0.0.1", version=6)
|
|
96
99
|
assert not helpers.is_ip("127.0.0.0.1")
|
|
97
100
|
|
|
101
|
+
assert helpers.is_ip("dead::beef")
|
|
102
|
+
assert helpers.is_ip("dead::beef", include_network=True)
|
|
103
|
+
assert not helpers.is_ip("dead::beef", version=4)
|
|
104
|
+
assert helpers.is_ip("dead::beef", version=6)
|
|
105
|
+
assert not helpers.is_ip("dead:::beef")
|
|
106
|
+
|
|
107
|
+
assert not helpers.is_ip("1.2.3.4/24")
|
|
108
|
+
assert helpers.is_ip("1.2.3.4/24", include_network=True)
|
|
109
|
+
assert not helpers.is_ip("1.2.3.4/24", version=4)
|
|
110
|
+
assert helpers.is_ip("1.2.3.4/24", include_network=True, version=4)
|
|
111
|
+
assert not helpers.is_ip("1.2.3.4/24", include_network=True, version=6)
|
|
112
|
+
|
|
98
113
|
assert not helpers.is_ip_type("127.0.0.1")
|
|
99
114
|
assert helpers.is_ip_type(ipaddress.ip_address("127.0.0.1"))
|
|
100
115
|
assert not helpers.is_ip_type(ipaddress.ip_address("127.0.0.1"), network=True)
|
|
@@ -104,6 +119,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver):
|
|
|
104
119
|
assert not helpers.is_ip_type(ipaddress.ip_network("127.0.0.0/8"), network=False)
|
|
105
120
|
|
|
106
121
|
assert helpers.is_dns_name("evilcorp.com")
|
|
122
|
+
assert not helpers.is_dns_name("evilcorp.com:80")
|
|
123
|
+
assert not helpers.is_dns_name("http://evilcorp.com:80")
|
|
107
124
|
assert helpers.is_dns_name("evilcorp")
|
|
108
125
|
assert not helpers.is_dns_name("evilcorp", include_local=False)
|
|
109
126
|
assert helpers.is_dns_name("ドメイン.テスト")
|
|
@@ -10,9 +10,6 @@ from bbot.modules.internal.base import BaseInternalModule
|
|
|
10
10
|
|
|
11
11
|
@pytest.mark.asyncio
|
|
12
12
|
async def test_modules_basic_checks(events, httpx_mock):
|
|
13
|
-
for http_method in ("GET", "CONNECT", "HEAD", "POST", "PUT", "TRACE", "DEBUG", "PATCH", "DELETE", "OPTIONS"):
|
|
14
|
-
httpx_mock.add_response(method=http_method, url=re.compile(r".*"), json={"test": "test"})
|
|
15
|
-
|
|
16
13
|
from bbot.scanner import Scanner
|
|
17
14
|
|
|
18
15
|
scan = Scanner(config={"omit_event_types": ["URL_UNVERIFIED"]})
|
|
@@ -86,12 +86,15 @@ def test_preset_yaml(clean_default_config):
|
|
|
86
86
|
debug=False,
|
|
87
87
|
silent=True,
|
|
88
88
|
config={"preset_test_asdf": 1},
|
|
89
|
-
strict_scope=False,
|
|
90
89
|
)
|
|
91
90
|
preset1 = preset1.bake()
|
|
92
|
-
assert "evilcorp.com" in preset1.target
|
|
91
|
+
assert "evilcorp.com" in preset1.target.seeds
|
|
92
|
+
assert "evilcorp.ce" not in preset1.target.seeds
|
|
93
|
+
assert "asdf.www.evilcorp.ce" in preset1.target.seeds
|
|
93
94
|
assert "evilcorp.ce" in preset1.whitelist
|
|
95
|
+
assert "asdf.evilcorp.ce" in preset1.whitelist
|
|
94
96
|
assert "test.www.evilcorp.ce" in preset1.blacklist
|
|
97
|
+
assert "asdf.test.www.evilcorp.ce" in preset1.blacklist
|
|
95
98
|
assert "sslcert" in preset1.scan_modules
|
|
96
99
|
assert preset1.whitelisted("evilcorp.ce")
|
|
97
100
|
assert preset1.whitelisted("www.evilcorp.ce")
|
|
@@ -171,12 +174,14 @@ def test_preset_scope():
|
|
|
171
174
|
|
|
172
175
|
# test target merging
|
|
173
176
|
scan = Scanner("1.2.3.4", preset=Preset.from_dict({"target": ["evilcorp.com"]}))
|
|
174
|
-
assert set([str(h) for h in scan.preset.target.seeds.hosts]) == {"1.2.3.4", "evilcorp.com"}
|
|
175
|
-
assert set([e.data for e in scan.target]) == {"1.2.3.4", "evilcorp.com"}
|
|
177
|
+
assert set([str(h) for h in scan.preset.target.seeds.hosts]) == {"1.2.3.4/32", "evilcorp.com"}
|
|
178
|
+
assert set([e.data for e in scan.target.seeds]) == {"1.2.3.4", "evilcorp.com"}
|
|
179
|
+
assert set([e.data for e in scan.target.whitelist]) == {"1.2.3.4", "evilcorp.com"}
|
|
176
180
|
|
|
177
181
|
blank_preset = Preset()
|
|
178
182
|
blank_preset = blank_preset.bake()
|
|
179
|
-
assert not blank_preset.target
|
|
183
|
+
assert not blank_preset.target.seeds
|
|
184
|
+
assert not blank_preset.target.whitelist
|
|
180
185
|
assert blank_preset.strict_scope == False
|
|
181
186
|
|
|
182
187
|
preset1 = Preset(
|
|
@@ -188,10 +193,11 @@ def test_preset_scope():
|
|
|
188
193
|
preset1_baked = preset1.bake()
|
|
189
194
|
|
|
190
195
|
# make sure target logic works as expected
|
|
191
|
-
assert "evilcorp.com" in preset1_baked.target
|
|
192
|
-
assert "
|
|
193
|
-
assert "asdf.
|
|
194
|
-
assert not "evilcorp.
|
|
196
|
+
assert "evilcorp.com" in preset1_baked.target.seeds
|
|
197
|
+
assert not "evilcorp.com" in preset1_baked.target.whitelist
|
|
198
|
+
assert "asdf.evilcorp.com" in preset1_baked.target.seeds
|
|
199
|
+
assert not "asdf.evilcorp.com" in preset1_baked.target.whitelist
|
|
200
|
+
assert "asdf.evilcorp.ce" in preset1_baked.whitelist
|
|
195
201
|
assert "evilcorp.ce" in preset1_baked.whitelist
|
|
196
202
|
assert "test.www.evilcorp.ce" in preset1_baked.blacklist
|
|
197
203
|
assert not "evilcorp.ce" in preset1_baked.blacklist
|
|
@@ -210,7 +216,7 @@ def test_preset_scope():
|
|
|
210
216
|
"evilcorp.org",
|
|
211
217
|
whitelist=["evilcorp.de"],
|
|
212
218
|
blacklist=["test.www.evilcorp.de"],
|
|
213
|
-
|
|
219
|
+
config={"scope": {"strict": True}},
|
|
214
220
|
)
|
|
215
221
|
|
|
216
222
|
preset1.merge(preset3)
|
|
@@ -218,17 +224,21 @@ def test_preset_scope():
|
|
|
218
224
|
preset1_baked = preset1.bake()
|
|
219
225
|
|
|
220
226
|
# targets should be merged
|
|
221
|
-
assert "evilcorp.com" in preset1_baked.target
|
|
222
|
-
assert "www.evilcorp.ce" in preset1_baked.target
|
|
223
|
-
assert "evilcorp.org" in preset1_baked.target
|
|
227
|
+
assert "evilcorp.com" in preset1_baked.target.seeds
|
|
228
|
+
assert "www.evilcorp.ce" in preset1_baked.target.seeds
|
|
229
|
+
assert "evilcorp.org" in preset1_baked.target.seeds
|
|
224
230
|
# strict scope is enabled
|
|
225
|
-
assert not "asdf.evilcorp.
|
|
226
|
-
assert not "asdf.
|
|
231
|
+
assert not "asdf.www.evilcorp.ce" in preset1_baked.target.seeds
|
|
232
|
+
assert not "asdf.evilcorp.org" in preset1_baked.target.seeds
|
|
233
|
+
assert not "asdf.evilcorp.com" in preset1_baked.target.seeds
|
|
234
|
+
assert not "asdf.www.evilcorp.ce" in preset1_baked.target.seeds
|
|
227
235
|
assert "evilcorp.ce" in preset1_baked.whitelist
|
|
228
236
|
assert "evilcorp.de" in preset1_baked.whitelist
|
|
229
237
|
assert not "asdf.evilcorp.de" in preset1_baked.whitelist
|
|
230
238
|
assert not "asdf.evilcorp.ce" in preset1_baked.whitelist
|
|
231
239
|
# blacklist should be merged, strict scope does not apply
|
|
240
|
+
assert "test.www.evilcorp.ce" in preset1_baked.blacklist
|
|
241
|
+
assert "test.www.evilcorp.de" in preset1_baked.blacklist
|
|
232
242
|
assert "asdf.test.www.evilcorp.ce" in preset1_baked.blacklist
|
|
233
243
|
assert "asdf.test.www.evilcorp.de" in preset1_baked.blacklist
|
|
234
244
|
assert not "asdf.test.www.evilcorp.org" in preset1_baked.blacklist
|
|
@@ -264,14 +274,14 @@ def test_preset_scope():
|
|
|
264
274
|
}
|
|
265
275
|
assert preset_whitelist_baked.to_dict(include_target=True) == {
|
|
266
276
|
"target": ["evilcorp.org"],
|
|
267
|
-
"whitelist": ["1.2.3.0/24", "evilcorp.net"],
|
|
268
|
-
"blacklist": ["evilcorp.co.uk"],
|
|
277
|
+
"whitelist": ["1.2.3.0/24", "http://evilcorp.net/"],
|
|
278
|
+
"blacklist": ["bob@evilcorp.co.uk", "evilcorp.co.uk:443"],
|
|
269
279
|
"config": {"modules": {"secretsdb": {"api_key": "deadbeef", "otherthing": "asdf"}}},
|
|
270
280
|
}
|
|
271
281
|
assert preset_whitelist_baked.to_dict(include_target=True, redact_secrets=True) == {
|
|
272
282
|
"target": ["evilcorp.org"],
|
|
273
|
-
"whitelist": ["1.2.3.0/24", "evilcorp.net"],
|
|
274
|
-
"blacklist": ["evilcorp.co.uk"],
|
|
283
|
+
"whitelist": ["1.2.3.0/24", "http://evilcorp.net/"],
|
|
284
|
+
"blacklist": ["bob@evilcorp.co.uk", "evilcorp.co.uk:443"],
|
|
275
285
|
"config": {"modules": {"secretsdb": {"otherthing": "asdf"}}},
|
|
276
286
|
}
|
|
277
287
|
|
|
@@ -279,7 +289,8 @@ def test_preset_scope():
|
|
|
279
289
|
assert not preset_nowhitelist_baked.in_scope("www.evilcorp.de")
|
|
280
290
|
assert not preset_nowhitelist_baked.in_scope("1.2.3.4/24")
|
|
281
291
|
|
|
282
|
-
assert "www.evilcorp.org" in preset_whitelist_baked.target
|
|
292
|
+
assert "www.evilcorp.org" in preset_whitelist_baked.target.seeds
|
|
293
|
+
assert not "www.evilcorp.org" in preset_whitelist_baked.target.whitelist
|
|
283
294
|
assert "1.2.3.4" in preset_whitelist_baked.whitelist
|
|
284
295
|
assert not preset_whitelist_baked.in_scope("www.evilcorp.org")
|
|
285
296
|
assert not preset_whitelist_baked.in_scope("www.evilcorp.de")
|
|
@@ -292,17 +303,17 @@ def test_preset_scope():
|
|
|
292
303
|
assert preset_whitelist_baked.whitelisted("1.2.3.4/28")
|
|
293
304
|
assert preset_whitelist_baked.whitelisted("1.2.3.4/24")
|
|
294
305
|
|
|
295
|
-
assert set([e.data for e in preset_nowhitelist_baked.
|
|
296
|
-
assert set([e.data for e in preset_whitelist_baked.target]) == {"evilcorp.org"}
|
|
306
|
+
assert set([e.data for e in preset_nowhitelist_baked.seeds]) == {"evilcorp.com"}
|
|
297
307
|
assert set([e.data for e in preset_nowhitelist_baked.whitelist]) == {"evilcorp.com"}
|
|
298
|
-
assert set([e.data for e in preset_whitelist_baked.
|
|
308
|
+
assert set([e.data for e in preset_whitelist_baked.seeds]) == {"evilcorp.org"}
|
|
309
|
+
assert set([e.data for e in preset_whitelist_baked.whitelist]) == {"1.2.3.0/24", "http://evilcorp.net/"}
|
|
299
310
|
|
|
300
311
|
preset_nowhitelist.merge(preset_whitelist)
|
|
301
312
|
preset_nowhitelist_baked = preset_nowhitelist.bake()
|
|
302
|
-
assert set([e.data for e in preset_nowhitelist_baked.
|
|
303
|
-
assert set([e.data for e in preset_nowhitelist_baked.whitelist]) == {"1.2.3.0/24", "evilcorp.net"}
|
|
304
|
-
assert "www.evilcorp.org" in preset_nowhitelist_baked.
|
|
305
|
-
assert "www.evilcorp.com" in preset_nowhitelist_baked.
|
|
313
|
+
assert set([e.data for e in preset_nowhitelist_baked.seeds]) == {"evilcorp.com", "evilcorp.org"}
|
|
314
|
+
assert set([e.data for e in preset_nowhitelist_baked.whitelist]) == {"1.2.3.0/24", "http://evilcorp.net/"}
|
|
315
|
+
assert "www.evilcorp.org" in preset_nowhitelist_baked.seeds
|
|
316
|
+
assert "www.evilcorp.com" in preset_nowhitelist_baked.seeds
|
|
306
317
|
assert "1.2.3.4" in preset_nowhitelist_baked.whitelist
|
|
307
318
|
assert not preset_nowhitelist_baked.in_scope("www.evilcorp.org")
|
|
308
319
|
assert not preset_nowhitelist_baked.in_scope("www.evilcorp.com")
|
|
@@ -314,10 +325,12 @@ def test_preset_scope():
|
|
|
314
325
|
preset_whitelist = Preset("evilcorp.org", whitelist=["1.2.3.4/24"])
|
|
315
326
|
preset_whitelist.merge(preset_nowhitelist)
|
|
316
327
|
preset_whitelist_baked = preset_whitelist.bake()
|
|
317
|
-
assert set([e.data for e in preset_whitelist_baked.
|
|
328
|
+
assert set([e.data for e in preset_whitelist_baked.seeds]) == {"evilcorp.com", "evilcorp.org"}
|
|
318
329
|
assert set([e.data for e in preset_whitelist_baked.whitelist]) == {"1.2.3.0/24"}
|
|
319
|
-
assert "www.evilcorp.org" in preset_whitelist_baked.
|
|
320
|
-
assert "www.evilcorp.com" in preset_whitelist_baked.
|
|
330
|
+
assert "www.evilcorp.org" in preset_whitelist_baked.seeds
|
|
331
|
+
assert "www.evilcorp.com" in preset_whitelist_baked.seeds
|
|
332
|
+
assert not "www.evilcorp.org" in preset_whitelist_baked.target.whitelist
|
|
333
|
+
assert not "www.evilcorp.com" in preset_whitelist_baked.target.whitelist
|
|
321
334
|
assert "1.2.3.4" in preset_whitelist_baked.whitelist
|
|
322
335
|
assert not preset_whitelist_baked.in_scope("www.evilcorp.org")
|
|
323
336
|
assert not preset_whitelist_baked.in_scope("www.evilcorp.com")
|
|
@@ -329,18 +342,18 @@ def test_preset_scope():
|
|
|
329
342
|
preset_nowhitelist2 = Preset("evilcorp.de")
|
|
330
343
|
preset_nowhitelist1_baked = preset_nowhitelist1.bake()
|
|
331
344
|
preset_nowhitelist2_baked = preset_nowhitelist2.bake()
|
|
332
|
-
assert set([e.data for e in preset_nowhitelist1_baked.
|
|
333
|
-
assert set([e.data for e in preset_nowhitelist2_baked.
|
|
345
|
+
assert set([e.data for e in preset_nowhitelist1_baked.seeds]) == {"evilcorp.com"}
|
|
346
|
+
assert set([e.data for e in preset_nowhitelist2_baked.seeds]) == {"evilcorp.de"}
|
|
334
347
|
assert set([e.data for e in preset_nowhitelist1_baked.whitelist]) == {"evilcorp.com"}
|
|
335
348
|
assert set([e.data for e in preset_nowhitelist2_baked.whitelist]) == {"evilcorp.de"}
|
|
336
349
|
preset_nowhitelist1.merge(preset_nowhitelist2)
|
|
337
350
|
preset_nowhitelist1_baked = preset_nowhitelist1.bake()
|
|
338
|
-
assert set([e.data for e in preset_nowhitelist1_baked.
|
|
339
|
-
assert set([e.data for e in preset_nowhitelist2_baked.
|
|
351
|
+
assert set([e.data for e in preset_nowhitelist1_baked.seeds]) == {"evilcorp.com", "evilcorp.de"}
|
|
352
|
+
assert set([e.data for e in preset_nowhitelist2_baked.seeds]) == {"evilcorp.de"}
|
|
340
353
|
assert set([e.data for e in preset_nowhitelist1_baked.whitelist]) == {"evilcorp.com", "evilcorp.de"}
|
|
341
354
|
assert set([e.data for e in preset_nowhitelist2_baked.whitelist]) == {"evilcorp.de"}
|
|
342
|
-
assert "www.evilcorp.com" in preset_nowhitelist1_baked.
|
|
343
|
-
assert "www.evilcorp.de" in preset_nowhitelist1_baked.
|
|
355
|
+
assert "www.evilcorp.com" in preset_nowhitelist1_baked.seeds
|
|
356
|
+
assert "www.evilcorp.de" in preset_nowhitelist1_baked.seeds
|
|
344
357
|
assert "www.evilcorp.com" in preset_nowhitelist1_baked.target.seeds
|
|
345
358
|
assert "www.evilcorp.de" in preset_nowhitelist1_baked.target.seeds
|
|
346
359
|
assert "www.evilcorp.com" in preset_nowhitelist1_baked.whitelist
|
|
@@ -357,8 +370,8 @@ def test_preset_scope():
|
|
|
357
370
|
preset_nowhitelist2.merge(preset_nowhitelist1)
|
|
358
371
|
preset_nowhitelist1_baked = preset_nowhitelist1.bake()
|
|
359
372
|
preset_nowhitelist2_baked = preset_nowhitelist2.bake()
|
|
360
|
-
assert set([e.data for e in preset_nowhitelist1_baked.
|
|
361
|
-
assert set([e.data for e in preset_nowhitelist2_baked.
|
|
373
|
+
assert set([e.data for e in preset_nowhitelist1_baked.seeds]) == {"evilcorp.com"}
|
|
374
|
+
assert set([e.data for e in preset_nowhitelist2_baked.seeds]) == {"evilcorp.com", "evilcorp.de"}
|
|
362
375
|
assert set([e.data for e in preset_nowhitelist1_baked.whitelist]) == {"evilcorp.com"}
|
|
363
376
|
assert set([e.data for e in preset_nowhitelist2_baked.whitelist]) == {"evilcorp.com", "evilcorp.de"}
|
|
364
377
|
|
|
@@ -84,6 +84,10 @@ def test_python_api_sync():
|
|
|
84
84
|
def test_python_api_validation():
|
|
85
85
|
from bbot.scanner import Scanner, Preset
|
|
86
86
|
|
|
87
|
+
# invalid target
|
|
88
|
+
with pytest.raises(ValidationError) as error:
|
|
89
|
+
Scanner("asdf:::asdf")
|
|
90
|
+
assert str(error.value) == 'Unable to autodetect event type from "asdf:::asdf"'
|
|
87
91
|
# invalid module
|
|
88
92
|
with pytest.raises(ValidationError) as error:
|
|
89
93
|
Scanner(modules=["asdf"])
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from ipaddress import ip_network
|
|
2
|
+
|
|
1
3
|
from ..bbot_fixtures import *
|
|
2
4
|
|
|
3
5
|
|
|
@@ -12,6 +14,7 @@ async def test_scan(
|
|
|
12
14
|
"1.1.1.0",
|
|
13
15
|
"1.1.1.1/31",
|
|
14
16
|
"evilcorp.com",
|
|
17
|
+
"test.evilcorp.com",
|
|
15
18
|
blacklist=["1.1.1.1/28", "www.evilcorp.com"],
|
|
16
19
|
modules=["ipneighbor"],
|
|
17
20
|
)
|
|
@@ -31,8 +34,11 @@ async def test_scan(
|
|
|
31
34
|
assert not scan0.in_scope("test.www.evilcorp.com")
|
|
32
35
|
assert not scan0.in_scope("www.evilcorp.co.uk")
|
|
33
36
|
j = scan0.json
|
|
34
|
-
assert set(j["target"]["seeds"]) == {"1.1.1.0", "1.1.1.0/31", "evilcorp.com"}
|
|
35
|
-
|
|
37
|
+
assert set(j["target"]["seeds"]) == {"1.1.1.0", "1.1.1.0/31", "evilcorp.com", "test.evilcorp.com"}
|
|
38
|
+
# we preserve the original whitelist inputs
|
|
39
|
+
assert set(j["target"]["whitelist"]) == {"1.1.1.0", "1.1.1.0/31", "evilcorp.com", "test.evilcorp.com"}
|
|
40
|
+
# but in the background they are collapsed
|
|
41
|
+
assert scan0.target.whitelist.hosts == {ip_network("1.1.1.0/31"), "evilcorp.com"}
|
|
36
42
|
assert set(j["target"]["blacklist"]) == {"1.1.1.0/28", "www.evilcorp.com"}
|
|
37
43
|
assert "ipneighbor" in j["preset"]["modules"]
|
|
38
44
|
|