bbot 2.6.0.6840rc0__py3-none-any.whl → 2.7.2.7424rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bbot/__init__.py +1 -1
- bbot/cli.py +22 -8
- bbot/core/engine.py +1 -1
- bbot/core/event/__init__.py +2 -2
- bbot/core/event/base.py +138 -110
- bbot/core/flags.py +1 -0
- bbot/core/helpers/bloom.py +6 -7
- bbot/core/helpers/depsinstaller/installer.py +21 -2
- bbot/core/helpers/dns/dns.py +0 -1
- bbot/core/helpers/dns/engine.py +0 -2
- bbot/core/helpers/files.py +2 -2
- bbot/core/helpers/git.py +17 -0
- bbot/core/helpers/helper.py +6 -5
- bbot/core/helpers/misc.py +8 -23
- bbot/core/helpers/ntlm.py +0 -2
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +25 -8
- bbot/core/helpers/web/web.py +2 -1
- bbot/core/modules.py +22 -60
- bbot/defaults.yml +4 -2
- bbot/modules/apkpure.py +1 -1
- bbot/modules/baddns.py +1 -1
- bbot/modules/baddns_direct.py +1 -1
- bbot/modules/baddns_zone.py +1 -1
- bbot/modules/badsecrets.py +1 -1
- bbot/modules/base.py +123 -38
- bbot/modules/bucket_amazon.py +1 -1
- bbot/modules/bucket_digitalocean.py +1 -1
- bbot/modules/bucket_firebase.py +1 -1
- bbot/modules/bucket_google.py +1 -1
- bbot/modules/{bucket_azure.py → bucket_microsoft.py} +2 -2
- bbot/modules/builtwith.py +4 -2
- bbot/modules/dnsbimi.py +1 -4
- bbot/modules/dnsbrute.py +6 -1
- bbot/modules/dnsdumpster.py +35 -52
- bbot/modules/dnstlsrpt.py +0 -6
- bbot/modules/docker_pull.py +1 -1
- bbot/modules/emailformat.py +17 -1
- bbot/modules/ffuf.py +4 -1
- bbot/modules/ffuf_shortnames.py +6 -3
- bbot/modules/filedownload.py +7 -4
- bbot/modules/git_clone.py +47 -22
- bbot/modules/gitdumper.py +4 -14
- bbot/modules/github_workflows.py +6 -5
- bbot/modules/gitlab_com.py +31 -0
- bbot/modules/gitlab_onprem.py +84 -0
- bbot/modules/gowitness.py +0 -6
- bbot/modules/graphql_introspection.py +5 -2
- bbot/modules/httpx.py +2 -0
- bbot/modules/iis_shortnames.py +0 -7
- bbot/modules/internal/cloudcheck.py +65 -72
- bbot/modules/internal/unarchive.py +9 -3
- bbot/modules/lightfuzz/lightfuzz.py +6 -2
- bbot/modules/lightfuzz/submodules/esi.py +42 -0
- bbot/modules/medusa.py +4 -7
- bbot/modules/nuclei.py +1 -1
- bbot/modules/otx.py +9 -2
- bbot/modules/output/base.py +3 -11
- bbot/modules/paramminer_headers.py +10 -7
- bbot/modules/portfilter.py +2 -0
- bbot/modules/postman_download.py +1 -1
- bbot/modules/retirejs.py +232 -0
- bbot/modules/securitytxt.py +0 -3
- bbot/modules/sslcert.py +2 -2
- bbot/modules/subdomaincenter.py +1 -16
- bbot/modules/telerik.py +7 -2
- bbot/modules/templates/bucket.py +24 -4
- bbot/modules/templates/gitlab.py +98 -0
- bbot/modules/trufflehog.py +6 -3
- bbot/modules/wafw00f.py +2 -2
- bbot/presets/web/lightfuzz-heavy.yml +1 -1
- bbot/presets/web/lightfuzz-medium.yml +1 -1
- bbot/presets/web/lightfuzz-superheavy.yml +1 -1
- bbot/scanner/manager.py +44 -37
- bbot/scanner/scanner.py +12 -4
- bbot/scripts/benchmark_report.py +433 -0
- bbot/test/benchmarks/__init__.py +2 -0
- bbot/test/benchmarks/test_bloom_filter_benchmarks.py +105 -0
- bbot/test/benchmarks/test_closest_match_benchmarks.py +76 -0
- bbot/test/benchmarks/test_event_validation_benchmarks.py +438 -0
- bbot/test/benchmarks/test_excavate_benchmarks.py +291 -0
- bbot/test/benchmarks/test_ipaddress_benchmarks.py +143 -0
- bbot/test/benchmarks/test_weighted_shuffle_benchmarks.py +70 -0
- bbot/test/test_step_1/test_bbot_fastapi.py +2 -2
- bbot/test/test_step_1/test_events.py +22 -21
- bbot/test/test_step_1/test_helpers.py +1 -0
- bbot/test/test_step_1/test_manager_scope_accuracy.py +45 -0
- bbot/test/test_step_1/test_modules_basic.py +40 -15
- bbot/test/test_step_1/test_python_api.py +2 -2
- bbot/test/test_step_1/test_regexes.py +21 -4
- bbot/test/test_step_1/test_scan.py +7 -8
- bbot/test/test_step_1/test_web.py +46 -0
- bbot/test/test_step_2/module_tests/base.py +6 -1
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +52 -18
- bbot/test/test_step_2/module_tests/test_module_bucket_google.py +1 -1
- bbot/test/test_step_2/module_tests/{test_module_bucket_azure.py → test_module_bucket_microsoft.py} +7 -5
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +19 -31
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +3 -5
- bbot/test/test_step_2/module_tests/test_module_emailformat.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_emails.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_excavate.py +57 -4
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +10 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab_com.py +66 -0
- bbot/test/test_step_2/module_tests/{test_module_gitlab.py → test_module_gitlab_onprem.py} +4 -69
- bbot/test/test_step_2/module_tests/test_module_lightfuzz.py +71 -3
- bbot/test/test_step_2/module_tests/test_module_nuclei.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_otx.py +3 -0
- bbot/test/test_step_2/module_tests/test_module_portfilter.py +2 -0
- bbot/test/test_step_2/module_tests/test_module_retirejs.py +161 -0
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +10 -1
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/METADATA +10 -7
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/RECORD +117 -106
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/WHEEL +1 -1
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info/licenses}/LICENSE +98 -58
- bbot/modules/censys.py +0 -98
- bbot/modules/gitlab.py +0 -141
- bbot/modules/zoomeye.py +0 -77
- bbot/test/test_step_2/module_tests/test_module_censys.py +0 -83
- bbot/test/test_step_2/module_tests/test_module_zoomeye.py +0 -35
- {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/entry_points.txt +0 -0
bbot/__init__.py
CHANGED
bbot/cli.py
CHANGED
|
@@ -7,7 +7,7 @@ import multiprocessing
|
|
|
7
7
|
from bbot.errors import *
|
|
8
8
|
from bbot import __version__
|
|
9
9
|
from bbot.logger import log_to_stderr
|
|
10
|
-
from bbot.core.helpers.misc import chain_lists
|
|
10
|
+
from bbot.core.helpers.misc import chain_lists, rm_rf
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
if multiprocessing.current_process().name == "MainProcess":
|
|
@@ -173,13 +173,27 @@ async def _main():
|
|
|
173
173
|
|
|
174
174
|
# --install-all-deps
|
|
175
175
|
if options.install_all_deps:
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
176
|
+
preloaded_modules = preset.module_loader.preloaded()
|
|
177
|
+
scan_modules = [k for k, v in preloaded_modules.items() if str(v.get("type", "")) == "scan"]
|
|
178
|
+
output_modules = [k for k, v in preloaded_modules.items() if str(v.get("type", "")) == "output"]
|
|
179
|
+
log.verbose("Creating dummy scan with all modules + output modules for deps installation")
|
|
180
|
+
dummy_scan = Scanner(preset=preset, modules=scan_modules, output_modules=output_modules)
|
|
181
|
+
dummy_scan.helpers.depsinstaller.force_deps = True
|
|
182
|
+
log.info("Installing module dependencies")
|
|
183
|
+
await dummy_scan.load_modules()
|
|
184
|
+
log.verbose("Running module setups")
|
|
185
|
+
succeeded, hard_failed, soft_failed = await dummy_scan.setup_modules(deps_only=True)
|
|
186
|
+
# remove any leftovers from the dummy scan
|
|
187
|
+
rm_rf(dummy_scan.home, ignore_errors=True)
|
|
188
|
+
rm_rf(dummy_scan.temp_dir, ignore_errors=True)
|
|
189
|
+
if succeeded:
|
|
190
|
+
log.success(
|
|
191
|
+
f"Successfully installed dependencies for {len(succeeded):,} modules: {','.join(succeeded)}"
|
|
192
|
+
)
|
|
193
|
+
if soft_failed or hard_failed:
|
|
194
|
+
failed = soft_failed + hard_failed
|
|
195
|
+
log.warning(f"Failed to install dependencies for {len(failed):,} modules: {', '.join(failed)}")
|
|
181
196
|
return False
|
|
182
|
-
log.hugesuccess(f"Successfully installed dependencies for the following modules: {', '.join(succeeded)}")
|
|
183
197
|
return True
|
|
184
198
|
|
|
185
199
|
scan_name = str(scan.name)
|
|
@@ -201,7 +215,7 @@ async def _main():
|
|
|
201
215
|
if not scan.preset.strict_scope:
|
|
202
216
|
for event in scan.target.seeds.event_seeds:
|
|
203
217
|
if event.type == "DNS_NAME":
|
|
204
|
-
cloudcheck_result = scan.helpers.cloudcheck(event.host)
|
|
218
|
+
cloudcheck_result = await scan.helpers.cloudcheck.lookup(event.host)
|
|
205
219
|
if cloudcheck_result:
|
|
206
220
|
scan.hugewarning(
|
|
207
221
|
f'YOUR TARGET CONTAINS A CLOUD DOMAIN: "{event.host}". You\'re in for a wild ride!'
|
bbot/core/engine.py
CHANGED
|
@@ -636,7 +636,7 @@ class EngineServer(EngineBase):
|
|
|
636
636
|
"""
|
|
637
637
|
if tasks:
|
|
638
638
|
try:
|
|
639
|
-
done,
|
|
639
|
+
done, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout)
|
|
640
640
|
return done
|
|
641
641
|
except BaseException as e:
|
|
642
642
|
if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)):
|
bbot/core/event/__init__.py
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
from .base import make_event, is_event, event_from_json
|
|
1
|
+
from .base import make_event, update_event, is_event, event_from_json
|
|
2
2
|
|
|
3
|
-
__all__ = ["make_event", "is_event", "event_from_json"]
|
|
3
|
+
__all__ = ["make_event", "update_event", "is_event", "event_from_json"]
|
bbot/core/event/base.py
CHANGED
|
@@ -789,26 +789,32 @@ class BaseEvent:
|
|
|
789
789
|
|
|
790
790
|
def __contains__(self, other):
|
|
791
791
|
"""
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
792
|
+
Membership checks for Events.
|
|
793
|
+
|
|
794
|
+
Supports:
|
|
795
|
+
- some_event in other_event (event vs event)
|
|
796
|
+
- "host:port" in other_event (string coerced to an event)
|
|
796
797
|
"""
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
798
|
+
# Fast path: already an Event
|
|
799
|
+
if is_event(other):
|
|
800
|
+
other_event = other
|
|
801
|
+
else:
|
|
802
|
+
try:
|
|
803
|
+
other_event = make_event(other, dummy=True)
|
|
804
|
+
except ValidationError:
|
|
805
|
+
return False
|
|
806
|
+
|
|
801
807
|
# if hashes match
|
|
802
|
-
if
|
|
808
|
+
if other_event == self:
|
|
803
809
|
return True
|
|
804
|
-
# if hosts match
|
|
805
|
-
if self.host and
|
|
806
|
-
if self.host ==
|
|
810
|
+
# if hosts match (including subnet / domain containment)
|
|
811
|
+
if self.host and other_event.host:
|
|
812
|
+
if self.host == other_event.host:
|
|
807
813
|
return True
|
|
808
814
|
# hostnames and IPs
|
|
809
815
|
radixtarget = RadixTarget()
|
|
810
816
|
radixtarget.insert(self.host)
|
|
811
|
-
return bool(radixtarget.search(
|
|
817
|
+
return bool(radixtarget.search(other_event.host))
|
|
812
818
|
return False
|
|
813
819
|
|
|
814
820
|
def json(self, mode="json", siem_friendly=False):
|
|
@@ -996,10 +1002,14 @@ class BaseEvent:
|
|
|
996
1002
|
return self.priority > getattr(other, "priority", (0,))
|
|
997
1003
|
|
|
998
1004
|
def __eq__(self, other):
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1005
|
+
"""
|
|
1006
|
+
Event equality is **only** defined between Event instances.
|
|
1007
|
+
|
|
1008
|
+
Equality is based on the event hash (derived from its id). Comparisons to
|
|
1009
|
+
non-Event types raise a ValueError to make incorrect comparisons explicit.
|
|
1010
|
+
"""
|
|
1011
|
+
if not is_event(other):
|
|
1012
|
+
raise ValueError("Event equality is only defined between Event instances")
|
|
1003
1013
|
return hash(self) == hash(other)
|
|
1004
1014
|
|
|
1005
1015
|
def __hash__(self):
|
|
@@ -1748,6 +1758,55 @@ class MOBILE_APP(DictEvent):
|
|
|
1748
1758
|
return self.data["url"]
|
|
1749
1759
|
|
|
1750
1760
|
|
|
1761
|
+
def update_event(
|
|
1762
|
+
event,
|
|
1763
|
+
parent=None,
|
|
1764
|
+
context=None,
|
|
1765
|
+
module=None,
|
|
1766
|
+
scan=None,
|
|
1767
|
+
tags=None,
|
|
1768
|
+
internal=None,
|
|
1769
|
+
):
|
|
1770
|
+
"""
|
|
1771
|
+
Updates an existing event object with additional metadata.
|
|
1772
|
+
|
|
1773
|
+
Parameters:
|
|
1774
|
+
event (BaseEvent): The event object to update.
|
|
1775
|
+
parent (BaseEvent, optional): New parent event.
|
|
1776
|
+
context (str, optional): Discovery context to set.
|
|
1777
|
+
module (str or BaseModule, optional): Module that discovered the event.
|
|
1778
|
+
scan (Scan, optional): BBOT Scan object associated with the event.
|
|
1779
|
+
tags (Union[str, List[str]], optional): Tags to merge into the event.
|
|
1780
|
+
internal (Any, optional): Marks the event as internal if True.
|
|
1781
|
+
|
|
1782
|
+
Returns:
|
|
1783
|
+
BaseEvent: The updated event object.
|
|
1784
|
+
"""
|
|
1785
|
+
if not is_event(event):
|
|
1786
|
+
raise ValidationError(f"update_event() expects an Event, got {type(event)}")
|
|
1787
|
+
|
|
1788
|
+
# allow tags to be either a string or an array
|
|
1789
|
+
if not tags:
|
|
1790
|
+
tags = []
|
|
1791
|
+
elif isinstance(tags, str):
|
|
1792
|
+
tags = [tags]
|
|
1793
|
+
tags = set(tags)
|
|
1794
|
+
|
|
1795
|
+
if scan is not None and not event.scan:
|
|
1796
|
+
event.scan = scan
|
|
1797
|
+
if module is not None:
|
|
1798
|
+
event.module = module
|
|
1799
|
+
if parent is not None:
|
|
1800
|
+
event.parent = parent
|
|
1801
|
+
if context is not None:
|
|
1802
|
+
event.discovery_context = context
|
|
1803
|
+
if internal is True:
|
|
1804
|
+
event.internal = True
|
|
1805
|
+
if tags:
|
|
1806
|
+
event.tags = tags.union(event.tags)
|
|
1807
|
+
return event
|
|
1808
|
+
|
|
1809
|
+
|
|
1751
1810
|
def make_event(
|
|
1752
1811
|
data,
|
|
1753
1812
|
event_type=None,
|
|
@@ -1761,14 +1820,13 @@ def make_event(
|
|
|
1761
1820
|
internal=None,
|
|
1762
1821
|
):
|
|
1763
1822
|
"""
|
|
1764
|
-
Creates and returns a new event object
|
|
1823
|
+
Creates and returns a new event object.
|
|
1765
1824
|
|
|
1766
|
-
This function serves as a factory for creating new event objects
|
|
1767
|
-
|
|
1768
|
-
it updates the event based on the additional parameters provided.
|
|
1825
|
+
This function serves as a factory for creating new event objects from raw data.
|
|
1826
|
+
If you need to modify an existing event, use ``update_event()`` instead.
|
|
1769
1827
|
|
|
1770
1828
|
Parameters:
|
|
1771
|
-
data (Union[str, dict
|
|
1829
|
+
data (Union[str, dict]): The primary data for the event.
|
|
1772
1830
|
event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. Auto-detected if not provided.
|
|
1773
1831
|
parent (BaseEvent, optional): Parent event leading to this event's discovery.
|
|
1774
1832
|
context (str, optional): Description of circumstances leading to event's discovery.
|
|
@@ -1781,32 +1839,20 @@ def make_event(
|
|
|
1781
1839
|
internal (Any, optional): Makes the event internal if set to True. Defaults to None.
|
|
1782
1840
|
|
|
1783
1841
|
Returns:
|
|
1784
|
-
BaseEvent: A new
|
|
1842
|
+
BaseEvent: A new event object.
|
|
1785
1843
|
|
|
1786
1844
|
Raises:
|
|
1787
1845
|
ValidationError: Raised when there's an error in event data or type sanitization.
|
|
1788
|
-
|
|
1789
|
-
Examples:
|
|
1790
|
-
If inside a module, e.g. from within its `handle_event()`:
|
|
1791
|
-
>>> self.make_event("1.2.3.4", parent=event)
|
|
1792
|
-
IP_ADDRESS("1.2.3.4", module=portscan, tags={'ipv4', 'distance-1'})
|
|
1793
|
-
|
|
1794
|
-
If you're outside a module but you have a scan object:
|
|
1795
|
-
>>> scan.make_event("1.2.3.4", parent=scan.root_event)
|
|
1796
|
-
IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4', 'distance-1'})
|
|
1797
|
-
|
|
1798
|
-
If you're outside a scan and just messing around:
|
|
1799
|
-
>>> from bbot.core.event.base import make_event
|
|
1800
|
-
>>> make_event("1.2.3.4", dummy=True)
|
|
1801
|
-
IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4'})
|
|
1802
|
-
|
|
1803
|
-
Note:
|
|
1804
|
-
When working within a module's `handle_event()`, use the instance method
|
|
1805
|
-
`self.make_event()` instead of calling this function directly.
|
|
1806
1846
|
"""
|
|
1807
1847
|
if not data:
|
|
1808
1848
|
raise ValidationError("No data provided")
|
|
1809
1849
|
|
|
1850
|
+
# do not allow passing an existing event here – use update_event() instead
|
|
1851
|
+
if is_event(data):
|
|
1852
|
+
raise ValidationError(
|
|
1853
|
+
"make_event() does not accept an existing event object. Use update_event(event, ...) to modify an event."
|
|
1854
|
+
)
|
|
1855
|
+
|
|
1810
1856
|
# allow tags to be either a string or an array
|
|
1811
1857
|
if not tags:
|
|
1812
1858
|
tags = []
|
|
@@ -1814,76 +1860,58 @@ def make_event(
|
|
|
1814
1860
|
tags = [tags]
|
|
1815
1861
|
tags = set(tags)
|
|
1816
1862
|
|
|
1817
|
-
# if
|
|
1818
|
-
if
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
if
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1830
|
-
if
|
|
1831
|
-
event.tags = tags.union(event.tags)
|
|
1832
|
-
event_type = data.type
|
|
1833
|
-
return event
|
|
1834
|
-
else:
|
|
1835
|
-
# if event_type is not provided, autodetect it
|
|
1836
|
-
if event_type is None:
|
|
1837
|
-
event_seed = EventSeed(data)
|
|
1838
|
-
event_type = event_seed.type
|
|
1839
|
-
data = event_seed.data
|
|
1840
|
-
if not dummy:
|
|
1841
|
-
log.debug(f'Autodetected event type "{event_type}" based on data: "{data}"')
|
|
1842
|
-
|
|
1843
|
-
event_type = str(event_type).strip().upper()
|
|
1844
|
-
|
|
1845
|
-
# Catch these common whoopsies
|
|
1846
|
-
if event_type in ("DNS_NAME", "IP_ADDRESS"):
|
|
1847
|
-
# DNS_NAME <--> EMAIL_ADDRESS confusion
|
|
1848
|
-
if validators.soft_validate(data, "email"):
|
|
1849
|
-
event_type = "EMAIL_ADDRESS"
|
|
1850
|
-
else:
|
|
1851
|
-
# DNS_NAME <--> IP_ADDRESS confusion
|
|
1852
|
-
try:
|
|
1853
|
-
data = validators.validate_host(data)
|
|
1854
|
-
except Exception as e:
|
|
1855
|
-
log.trace(traceback.format_exc())
|
|
1856
|
-
raise ValidationError(f'Error sanitizing event data "{data}" for type "{event_type}": {e}')
|
|
1857
|
-
data_is_ip = is_ip(data)
|
|
1858
|
-
if event_type == "DNS_NAME" and data_is_ip:
|
|
1859
|
-
event_type = "IP_ADDRESS"
|
|
1860
|
-
elif event_type == "IP_ADDRESS" and not data_is_ip:
|
|
1861
|
-
event_type = "DNS_NAME"
|
|
1862
|
-
# USERNAME <--> EMAIL_ADDRESS confusion
|
|
1863
|
-
if event_type == "USERNAME" and validators.soft_validate(data, "email"):
|
|
1863
|
+
# if event_type is not provided, autodetect it
|
|
1864
|
+
if event_type is None:
|
|
1865
|
+
event_seed = EventSeed(data)
|
|
1866
|
+
event_type = event_seed.type
|
|
1867
|
+
data = event_seed.data
|
|
1868
|
+
if not dummy:
|
|
1869
|
+
log.debug(f'Autodetected event type "{event_type}" based on data: "{data}"')
|
|
1870
|
+
|
|
1871
|
+
event_type = str(event_type).strip().upper()
|
|
1872
|
+
|
|
1873
|
+
# Catch these common whoopsies
|
|
1874
|
+
if event_type in ("DNS_NAME", "IP_ADDRESS"):
|
|
1875
|
+
# DNS_NAME <--> EMAIL_ADDRESS confusion
|
|
1876
|
+
if validators.soft_validate(data, "email"):
|
|
1864
1877
|
event_type = "EMAIL_ADDRESS"
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1878
|
+
else:
|
|
1879
|
+
# DNS_NAME <--> IP_ADDRESS confusion
|
|
1880
|
+
try:
|
|
1881
|
+
data = validators.validate_host(data)
|
|
1882
|
+
except Exception as e:
|
|
1883
|
+
log.trace(traceback.format_exc())
|
|
1884
|
+
raise ValidationError(f'Error sanitizing event data "{data}" for type "{event_type}": {e}')
|
|
1885
|
+
data_is_ip = is_ip(data)
|
|
1886
|
+
if event_type == "DNS_NAME" and data_is_ip:
|
|
1887
|
+
event_type = "IP_ADDRESS"
|
|
1888
|
+
elif event_type == "IP_ADDRESS" and not data_is_ip:
|
|
1889
|
+
event_type = "DNS_NAME"
|
|
1890
|
+
# USERNAME <--> EMAIL_ADDRESS confusion
|
|
1891
|
+
if event_type == "USERNAME" and validators.soft_validate(data, "email"):
|
|
1892
|
+
event_type = "EMAIL_ADDRESS"
|
|
1893
|
+
tags.add("affiliate")
|
|
1894
|
+
# Convert single-host IP_RANGE to IP_ADDRESS
|
|
1895
|
+
if event_type == "IP_RANGE":
|
|
1896
|
+
with suppress(Exception):
|
|
1897
|
+
net = ipaddress.ip_network(data, strict=False)
|
|
1898
|
+
if net.prefixlen == net.max_prefixlen:
|
|
1899
|
+
event_type = "IP_ADDRESS"
|
|
1900
|
+
data = net.network_address
|
|
1901
|
+
|
|
1902
|
+
event_class = globals().get(event_type, DefaultEvent)
|
|
1903
|
+
return event_class(
|
|
1904
|
+
data,
|
|
1905
|
+
event_type=event_type,
|
|
1906
|
+
parent=parent,
|
|
1907
|
+
context=context,
|
|
1908
|
+
module=module,
|
|
1909
|
+
scan=scan,
|
|
1910
|
+
tags=tags,
|
|
1911
|
+
confidence=confidence,
|
|
1912
|
+
_dummy=dummy,
|
|
1913
|
+
_internal=internal,
|
|
1914
|
+
)
|
|
1887
1915
|
|
|
1888
1916
|
|
|
1889
1917
|
def event_from_json(j, siem_friendly=False):
|
bbot/core/flags.py
CHANGED
|
@@ -6,6 +6,7 @@ flag_descriptions = {
|
|
|
6
6
|
"cloud-enum": "Enumerates cloud resources",
|
|
7
7
|
"code-enum": "Find public code repositories and search them for secrets etc.",
|
|
8
8
|
"deadly": "Highly aggressive",
|
|
9
|
+
"download": "Modules that download files, apps, or repositories",
|
|
9
10
|
"email-enum": "Enumerates email addresses",
|
|
10
11
|
"iis-shortnames": "Scans for IIS Shortname vulnerability",
|
|
11
12
|
"passive": "Never connects to target systems",
|
bbot/core/helpers/bloom.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import mmh3
|
|
3
3
|
import mmap
|
|
4
|
+
import xxhash
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
class BloomFilter:
|
|
@@ -55,14 +56,12 @@ class BloomFilter:
|
|
|
55
56
|
if not isinstance(item, str):
|
|
56
57
|
item = str(item)
|
|
57
58
|
item = item.encode("utf-8")
|
|
58
|
-
return [abs(hash(item)) % self.size, abs(mmh3.hash(item)) % self.size, abs(self._fnv1a_hash(item)) % self.size]
|
|
59
59
|
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
return hash
|
|
60
|
+
return [
|
|
61
|
+
abs(hash(item)) % self.size,
|
|
62
|
+
abs(mmh3.hash(item)) % self.size,
|
|
63
|
+
abs(xxhash.xxh32(item).intdigest()) % self.size,
|
|
64
|
+
]
|
|
66
65
|
|
|
67
66
|
def close(self):
|
|
68
67
|
"""Explicitly close the memory-mapped file."""
|
|
@@ -2,6 +2,8 @@ import os
|
|
|
2
2
|
import sys
|
|
3
3
|
import stat
|
|
4
4
|
import json
|
|
5
|
+
import mmh3
|
|
6
|
+
import orjson
|
|
5
7
|
import shutil
|
|
6
8
|
import getpass
|
|
7
9
|
import logging
|
|
@@ -14,6 +16,7 @@ from secrets import token_bytes
|
|
|
14
16
|
from ansible_runner.interface import run
|
|
15
17
|
from subprocess import CalledProcessError
|
|
16
18
|
|
|
19
|
+
from bbot import __version__
|
|
17
20
|
from ..misc import can_sudo_without_password, os_platform, rm_at_exit, get_python_constraints
|
|
18
21
|
|
|
19
22
|
log = logging.getLogger("bbot.core.helpers.depsinstaller")
|
|
@@ -68,7 +71,7 @@ class DepsInstaller:
|
|
|
68
71
|
},
|
|
69
72
|
],
|
|
70
73
|
# to compile just about any tool, we need the openssl dev headers
|
|
71
|
-
"
|
|
74
|
+
"openssl_dev_headers": [
|
|
72
75
|
{
|
|
73
76
|
"name": "Install OpenSSL library and development headers (Debian/Ubuntu)",
|
|
74
77
|
"package": {"name": ["libssl-dev", "openssl"], "state": "present"},
|
|
@@ -172,6 +175,7 @@ class DepsInstaller:
|
|
|
172
175
|
+ self.venv
|
|
173
176
|
+ str(self.parent_helper.bbot_home)
|
|
174
177
|
+ os.uname()[1]
|
|
178
|
+
+ str(__version__)
|
|
175
179
|
).hexdigest()
|
|
176
180
|
success = self.setup_status.get(module_hash, None)
|
|
177
181
|
dependencies = list(chain(*preloaded["deps"].values()))
|
|
@@ -439,6 +443,13 @@ class DepsInstaller:
|
|
|
439
443
|
log.warning("Incorrect password")
|
|
440
444
|
|
|
441
445
|
async def install_core_deps(self):
|
|
446
|
+
# skip if we've already successfully installed core deps for this definition
|
|
447
|
+
core_deps_hash = str(mmh3.hash(orjson.dumps(self.CORE_DEPS, option=orjson.OPT_SORT_KEYS)))
|
|
448
|
+
core_deps_cache_file = self.parent_helper.cache_dir / core_deps_hash
|
|
449
|
+
if core_deps_cache_file.exists():
|
|
450
|
+
log.debug("Skipping core dependency installation (cache hit)")
|
|
451
|
+
return
|
|
452
|
+
|
|
442
453
|
to_install = set()
|
|
443
454
|
to_install_friendly = set()
|
|
444
455
|
playbook = []
|
|
@@ -454,6 +465,7 @@ class DepsInstaller:
|
|
|
454
465
|
else:
|
|
455
466
|
playbook.extend(package_name_or_playbook)
|
|
456
467
|
# install ansible community.general collection
|
|
468
|
+
overall_success = True
|
|
457
469
|
if not self.setup_status.get("ansible:community.general", False):
|
|
458
470
|
log.info("Installing Ansible Community General Collection")
|
|
459
471
|
try:
|
|
@@ -465,6 +477,7 @@ class DepsInstaller:
|
|
|
465
477
|
log.warning(
|
|
466
478
|
f"Failed to install Ansible Community.General Collection (return code {err.returncode}): {err.stderr}"
|
|
467
479
|
)
|
|
480
|
+
overall_success = False
|
|
468
481
|
# construct ansible playbook
|
|
469
482
|
if to_install:
|
|
470
483
|
playbook.append(
|
|
@@ -478,7 +491,13 @@ class DepsInstaller:
|
|
|
478
491
|
if playbook:
|
|
479
492
|
log.info(f"Installing core BBOT dependencies: {','.join(sorted(to_install_friendly))}")
|
|
480
493
|
self.ensure_root()
|
|
481
|
-
self.ansible_run(tasks=playbook)
|
|
494
|
+
success, _ = self.ansible_run(tasks=playbook)
|
|
495
|
+
overall_success &= success
|
|
496
|
+
|
|
497
|
+
# mark cache only if everything succeeded (or nothing needed doing)
|
|
498
|
+
if overall_success:
|
|
499
|
+
with suppress(Exception):
|
|
500
|
+
core_deps_cache_file.touch()
|
|
482
501
|
|
|
483
502
|
def _setup_sudo_cache(self):
|
|
484
503
|
if not self._sudo_cache_setup:
|
bbot/core/helpers/dns/dns.py
CHANGED
|
@@ -38,7 +38,6 @@ class DNSHelper(EngineClient):
|
|
|
38
38
|
_wildcard_cache (dict): Cache for wildcard detection results.
|
|
39
39
|
_dns_cache (LRUCache): Cache for DNS resolution results, limited in size.
|
|
40
40
|
resolver_file (Path): File containing system's current resolver nameservers.
|
|
41
|
-
filter_bad_ptrs (bool): Whether to filter out DNS names that appear to be auto-generated PTR records. Defaults to True.
|
|
42
41
|
|
|
43
42
|
Args:
|
|
44
43
|
parent_helper: The parent helper object with configuration details and utilities.
|
bbot/core/helpers/dns/engine.py
CHANGED
|
@@ -86,8 +86,6 @@ class DNSEngine(EngineServer):
|
|
|
86
86
|
self._debug = self.dns_config.get("debug", False)
|
|
87
87
|
self._dns_cache = LRUCache(maxsize=10000)
|
|
88
88
|
|
|
89
|
-
self.filter_bad_ptrs = self.dns_config.get("filter_ptrs", True)
|
|
90
|
-
|
|
91
89
|
async def resolve(self, query, **kwargs):
|
|
92
90
|
"""Resolve DNS names and IP addresses to their corresponding results.
|
|
93
91
|
|
bbot/core/helpers/files.py
CHANGED
|
@@ -9,7 +9,7 @@ from .misc import rm_at_exit
|
|
|
9
9
|
log = logging.getLogger("bbot.core.helpers.files")
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
def tempfile(self, content, pipe=True):
|
|
12
|
+
def tempfile(self, content, pipe=True, extension=None):
|
|
13
13
|
"""
|
|
14
14
|
Creates a temporary file or named pipe and populates it with content.
|
|
15
15
|
|
|
@@ -29,7 +29,7 @@ def tempfile(self, content, pipe=True):
|
|
|
29
29
|
>>> tempfile(["Another", "temp", "file"], pipe=False)
|
|
30
30
|
'/home/user/.bbot/temp/someotherfile'
|
|
31
31
|
"""
|
|
32
|
-
filename = self.temp_filename()
|
|
32
|
+
filename = self.temp_filename(extension)
|
|
33
33
|
rm_at_exit(filename)
|
|
34
34
|
try:
|
|
35
35
|
if type(content) not in (set, list, tuple):
|
bbot/core/helpers/git.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def sanitize_git_repo(repo_folder: Path):
|
|
5
|
+
# sanitizing the git config is infeasible since there are too many different ways to do evil things
|
|
6
|
+
# instead, we move it out of .git and into the repo folder, so we don't miss any secrets etc. inside
|
|
7
|
+
config_file = repo_folder / ".git" / "config"
|
|
8
|
+
if config_file.exists():
|
|
9
|
+
config_file.rename(repo_folder / "git_config_original")
|
|
10
|
+
# move the index file
|
|
11
|
+
index_file = repo_folder / ".git" / "index"
|
|
12
|
+
if index_file.exists():
|
|
13
|
+
index_file.rename(repo_folder / "git_index_original")
|
|
14
|
+
# move the hooks folder
|
|
15
|
+
hooks_folder = repo_folder / ".git" / "hooks"
|
|
16
|
+
if hooks_folder.exists():
|
|
17
|
+
hooks_folder.rename(repo_folder / "git_hooks_original")
|
bbot/core/helpers/helper.py
CHANGED
|
@@ -89,6 +89,7 @@ class ConfigAwareHelper:
|
|
|
89
89
|
self.yara = YaraHelper(self)
|
|
90
90
|
self._dns = None
|
|
91
91
|
self._web = None
|
|
92
|
+
self._cloudcheck = None
|
|
92
93
|
self.config_aware_validators = self.validators.Validators(self)
|
|
93
94
|
self.depsinstaller = DepsInstaller(self)
|
|
94
95
|
self.word_cloud = WordCloud(self)
|
|
@@ -107,12 +108,12 @@ class ConfigAwareHelper:
|
|
|
107
108
|
return self._web
|
|
108
109
|
|
|
109
110
|
@property
|
|
110
|
-
def
|
|
111
|
-
if self.
|
|
112
|
-
from cloudcheck import
|
|
111
|
+
def cloudcheck(self):
|
|
112
|
+
if self._cloudcheck is None:
|
|
113
|
+
from cloudcheck import CloudCheck
|
|
113
114
|
|
|
114
|
-
self.
|
|
115
|
-
return self.
|
|
115
|
+
self._cloudcheck = CloudCheck()
|
|
116
|
+
return self._cloudcheck
|
|
116
117
|
|
|
117
118
|
def bloom_filter(self, size):
|
|
118
119
|
from .bloom import BloomFilter
|
bbot/core/helpers/misc.py
CHANGED
|
@@ -17,6 +17,7 @@ from unidecode import unidecode # noqa F401
|
|
|
17
17
|
from asyncio import create_task, gather, sleep, wait_for # noqa
|
|
18
18
|
from urllib.parse import urlparse, quote, unquote, urlunparse, urljoin # noqa F401
|
|
19
19
|
|
|
20
|
+
from .git import * # noqa F401
|
|
20
21
|
from .url import * # noqa F401
|
|
21
22
|
from ... import errors
|
|
22
23
|
from . import regexes as bbot_regexes
|
|
@@ -216,26 +217,29 @@ def split_host_port(d):
|
|
|
216
217
|
host = None
|
|
217
218
|
port = None
|
|
218
219
|
scheme = None
|
|
220
|
+
|
|
221
|
+
# first, try to parse as an IP address
|
|
219
222
|
if is_ip(d):
|
|
220
223
|
return make_ip_type(d), port
|
|
221
224
|
|
|
225
|
+
# if not an IP address, try to parse as a host:port
|
|
222
226
|
match = bbot_regexes.split_host_port_regex.match(d)
|
|
223
227
|
if match is None:
|
|
224
|
-
raise ValueError(f'
|
|
228
|
+
raise ValueError(f'split_host_port() failed to parse "{d}"')
|
|
225
229
|
scheme = match.group("scheme")
|
|
226
230
|
netloc = match.group("netloc")
|
|
227
231
|
if netloc is None:
|
|
228
|
-
raise ValueError(f'
|
|
232
|
+
raise ValueError(f'split_host_port() failed to parse "{d}"')
|
|
229
233
|
|
|
230
234
|
match = bbot_regexes.extract_open_port_regex.match(netloc)
|
|
231
235
|
if match is None:
|
|
232
|
-
raise ValueError(f'
|
|
236
|
+
raise ValueError(f'split_host_port() failed to parse netloc "{netloc}" (original value: {d})')
|
|
233
237
|
|
|
234
238
|
host = match.group(2)
|
|
235
239
|
if host is None:
|
|
236
240
|
host = match.group(1)
|
|
237
241
|
if host is None:
|
|
238
|
-
raise ValueError(f'
|
|
242
|
+
raise ValueError(f'split_host_port() failed to locate host in netloc "{netloc}" (original value: {d})')
|
|
239
243
|
|
|
240
244
|
port = match.group(3)
|
|
241
245
|
if port is None and scheme is not None:
|
|
@@ -2288,25 +2292,6 @@ def is_file(f):
|
|
|
2288
2292
|
return False
|
|
2289
2293
|
|
|
2290
2294
|
|
|
2291
|
-
def cloudcheck(ip):
|
|
2292
|
-
"""
|
|
2293
|
-
Check whether an IP address belongs to a cloud provider and returns the provider name, type, and subnet.
|
|
2294
|
-
|
|
2295
|
-
Args:
|
|
2296
|
-
ip (str): The IP address to check.
|
|
2297
|
-
|
|
2298
|
-
Returns:
|
|
2299
|
-
tuple: A tuple containing provider name (str), provider type (str), and subnet (IPv4Network).
|
|
2300
|
-
|
|
2301
|
-
Examples:
|
|
2302
|
-
>>> cloudcheck("168.62.20.37")
|
|
2303
|
-
('Azure', 'cloud', IPv4Network('168.62.0.0/19'))
|
|
2304
|
-
"""
|
|
2305
|
-
import cloudcheck as _cloudcheck
|
|
2306
|
-
|
|
2307
|
-
return _cloudcheck.check(ip)
|
|
2308
|
-
|
|
2309
|
-
|
|
2310
2295
|
def is_async_function(f):
|
|
2311
2296
|
"""
|
|
2312
2297
|
Check if a given function is an asynchronous function.
|
bbot/core/helpers/ntlm.py
CHANGED
|
@@ -17,11 +17,9 @@ class StrStruct(object):
|
|
|
17
17
|
self.alloc = alloc
|
|
18
18
|
self.offset = offset
|
|
19
19
|
self.raw = raw[offset : offset + length]
|
|
20
|
-
self.utf16 = False
|
|
21
20
|
|
|
22
21
|
if len(self.raw) >= 2 and self.raw[1] == "\0":
|
|
23
22
|
self.string = self.raw.decode("utf-16")
|
|
24
|
-
self.utf16 = True
|
|
25
23
|
else:
|
|
26
24
|
self.string = self.raw
|
|
27
25
|
|