bbot 2.6.0.6840rc0__py3-none-any.whl → 2.7.2.7424rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +22 -8
  3. bbot/core/engine.py +1 -1
  4. bbot/core/event/__init__.py +2 -2
  5. bbot/core/event/base.py +138 -110
  6. bbot/core/flags.py +1 -0
  7. bbot/core/helpers/bloom.py +6 -7
  8. bbot/core/helpers/depsinstaller/installer.py +21 -2
  9. bbot/core/helpers/dns/dns.py +0 -1
  10. bbot/core/helpers/dns/engine.py +0 -2
  11. bbot/core/helpers/files.py +2 -2
  12. bbot/core/helpers/git.py +17 -0
  13. bbot/core/helpers/helper.py +6 -5
  14. bbot/core/helpers/misc.py +8 -23
  15. bbot/core/helpers/ntlm.py +0 -2
  16. bbot/core/helpers/regex.py +1 -1
  17. bbot/core/helpers/regexes.py +25 -8
  18. bbot/core/helpers/web/web.py +2 -1
  19. bbot/core/modules.py +22 -60
  20. bbot/defaults.yml +4 -2
  21. bbot/modules/apkpure.py +1 -1
  22. bbot/modules/baddns.py +1 -1
  23. bbot/modules/baddns_direct.py +1 -1
  24. bbot/modules/baddns_zone.py +1 -1
  25. bbot/modules/badsecrets.py +1 -1
  26. bbot/modules/base.py +123 -38
  27. bbot/modules/bucket_amazon.py +1 -1
  28. bbot/modules/bucket_digitalocean.py +1 -1
  29. bbot/modules/bucket_firebase.py +1 -1
  30. bbot/modules/bucket_google.py +1 -1
  31. bbot/modules/{bucket_azure.py → bucket_microsoft.py} +2 -2
  32. bbot/modules/builtwith.py +4 -2
  33. bbot/modules/dnsbimi.py +1 -4
  34. bbot/modules/dnsbrute.py +6 -1
  35. bbot/modules/dnsdumpster.py +35 -52
  36. bbot/modules/dnstlsrpt.py +0 -6
  37. bbot/modules/docker_pull.py +1 -1
  38. bbot/modules/emailformat.py +17 -1
  39. bbot/modules/ffuf.py +4 -1
  40. bbot/modules/ffuf_shortnames.py +6 -3
  41. bbot/modules/filedownload.py +7 -4
  42. bbot/modules/git_clone.py +47 -22
  43. bbot/modules/gitdumper.py +4 -14
  44. bbot/modules/github_workflows.py +6 -5
  45. bbot/modules/gitlab_com.py +31 -0
  46. bbot/modules/gitlab_onprem.py +84 -0
  47. bbot/modules/gowitness.py +0 -6
  48. bbot/modules/graphql_introspection.py +5 -2
  49. bbot/modules/httpx.py +2 -0
  50. bbot/modules/iis_shortnames.py +0 -7
  51. bbot/modules/internal/cloudcheck.py +65 -72
  52. bbot/modules/internal/unarchive.py +9 -3
  53. bbot/modules/lightfuzz/lightfuzz.py +6 -2
  54. bbot/modules/lightfuzz/submodules/esi.py +42 -0
  55. bbot/modules/medusa.py +4 -7
  56. bbot/modules/nuclei.py +1 -1
  57. bbot/modules/otx.py +9 -2
  58. bbot/modules/output/base.py +3 -11
  59. bbot/modules/paramminer_headers.py +10 -7
  60. bbot/modules/portfilter.py +2 -0
  61. bbot/modules/postman_download.py +1 -1
  62. bbot/modules/retirejs.py +232 -0
  63. bbot/modules/securitytxt.py +0 -3
  64. bbot/modules/sslcert.py +2 -2
  65. bbot/modules/subdomaincenter.py +1 -16
  66. bbot/modules/telerik.py +7 -2
  67. bbot/modules/templates/bucket.py +24 -4
  68. bbot/modules/templates/gitlab.py +98 -0
  69. bbot/modules/trufflehog.py +6 -3
  70. bbot/modules/wafw00f.py +2 -2
  71. bbot/presets/web/lightfuzz-heavy.yml +1 -1
  72. bbot/presets/web/lightfuzz-medium.yml +1 -1
  73. bbot/presets/web/lightfuzz-superheavy.yml +1 -1
  74. bbot/scanner/manager.py +44 -37
  75. bbot/scanner/scanner.py +12 -4
  76. bbot/scripts/benchmark_report.py +433 -0
  77. bbot/test/benchmarks/__init__.py +2 -0
  78. bbot/test/benchmarks/test_bloom_filter_benchmarks.py +105 -0
  79. bbot/test/benchmarks/test_closest_match_benchmarks.py +76 -0
  80. bbot/test/benchmarks/test_event_validation_benchmarks.py +438 -0
  81. bbot/test/benchmarks/test_excavate_benchmarks.py +291 -0
  82. bbot/test/benchmarks/test_ipaddress_benchmarks.py +143 -0
  83. bbot/test/benchmarks/test_weighted_shuffle_benchmarks.py +70 -0
  84. bbot/test/test_step_1/test_bbot_fastapi.py +2 -2
  85. bbot/test/test_step_1/test_events.py +22 -21
  86. bbot/test/test_step_1/test_helpers.py +1 -0
  87. bbot/test/test_step_1/test_manager_scope_accuracy.py +45 -0
  88. bbot/test/test_step_1/test_modules_basic.py +40 -15
  89. bbot/test/test_step_1/test_python_api.py +2 -2
  90. bbot/test/test_step_1/test_regexes.py +21 -4
  91. bbot/test/test_step_1/test_scan.py +7 -8
  92. bbot/test/test_step_1/test_web.py +46 -0
  93. bbot/test/test_step_2/module_tests/base.py +6 -1
  94. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +52 -18
  95. bbot/test/test_step_2/module_tests/test_module_bucket_google.py +1 -1
  96. bbot/test/test_step_2/module_tests/{test_module_bucket_azure.py → test_module_bucket_microsoft.py} +7 -5
  97. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +19 -31
  98. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +2 -1
  99. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +3 -5
  100. bbot/test/test_step_2/module_tests/test_module_emailformat.py +1 -1
  101. bbot/test/test_step_2/module_tests/test_module_emails.py +2 -2
  102. bbot/test/test_step_2/module_tests/test_module_excavate.py +57 -4
  103. bbot/test/test_step_2/module_tests/test_module_github_workflows.py +10 -1
  104. bbot/test/test_step_2/module_tests/test_module_gitlab_com.py +66 -0
  105. bbot/test/test_step_2/module_tests/{test_module_gitlab.py → test_module_gitlab_onprem.py} +4 -69
  106. bbot/test/test_step_2/module_tests/test_module_lightfuzz.py +71 -3
  107. bbot/test/test_step_2/module_tests/test_module_nuclei.py +1 -2
  108. bbot/test/test_step_2/module_tests/test_module_otx.py +3 -0
  109. bbot/test/test_step_2/module_tests/test_module_portfilter.py +2 -0
  110. bbot/test/test_step_2/module_tests/test_module_retirejs.py +161 -0
  111. bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
  112. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +10 -1
  113. {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/METADATA +10 -7
  114. {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/RECORD +117 -106
  115. {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/WHEEL +1 -1
  116. {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info/licenses}/LICENSE +98 -58
  117. bbot/modules/censys.py +0 -98
  118. bbot/modules/gitlab.py +0 -141
  119. bbot/modules/zoomeye.py +0 -77
  120. bbot/test/test_step_2/module_tests/test_module_censys.py +0 -83
  121. bbot/test/test_step_2/module_tests/test_module_zoomeye.py +0 -35
  122. {bbot-2.6.0.6840rc0.dist-info → bbot-2.7.2.7424rc0.dist-info}/entry_points.txt +0 -0
@@ -27,7 +27,6 @@ class graphql_introspection(BaseModule):
27
27
  self.output_dir = Path(output_folder) / "graphql-schemas"
28
28
  else:
29
29
  self.output_dir = self.scan.home / "graphql-schemas"
30
- self.helpers.mkdir(self.output_dir)
31
30
  return True
32
31
 
33
32
  async def filter_event(self, event):
@@ -120,7 +119,10 @@ fragment TypeRef on __Type {
120
119
  }
121
120
  response = await self.helpers.request(**request_args)
122
121
  if not response or response.status_code != 200:
123
- self.debug(f"Failed to get GraphQL schema for {url} (status code {response.status_code})")
122
+ self.debug(
123
+ f"Failed to get GraphQL schema for {url} "
124
+ f"{f'(status code {response.status_code})' if response else ''}"
125
+ )
124
126
  continue
125
127
  try:
126
128
  response_json = response.json()
@@ -128,6 +130,7 @@ fragment TypeRef on __Type {
128
130
  self.debug(f"Failed to parse JSON for {url}")
129
131
  continue
130
132
  if response_json.get("data", {}).get("__schema", {}).get("types", []):
133
+ self.helpers.mkdir(self.output_dir)
131
134
  filename = f"schema-{self.helpers.tagify(url)}.json"
132
135
  filename = self.output_dir / filename
133
136
  with open(filename, "w") as f:
bbot/modules/httpx.py CHANGED
@@ -50,6 +50,8 @@ class httpx(BaseModule):
50
50
  _shuffle_incoming_queue = False
51
51
  _batch_size = 500
52
52
  _priority = 2
53
+ # accept Javascript URLs
54
+ accept_url_special = True
53
55
 
54
56
  async def setup(self):
55
57
  self.threads = self.config.get("threads", 50)
@@ -116,13 +116,6 @@ class iis_shortnames(BaseModule):
116
116
 
117
117
  return duplicates
118
118
 
119
- async def threaded_request(self, method, url, affirmative_status_code, c):
120
- r = await self.helpers.request(method=method, url=url, allow_redirects=False, retries=2, timeout=10)
121
- if r is not None:
122
- if r.status_code == affirmative_status_code:
123
- return True, c
124
- return None, c
125
-
126
119
  async def solve_valid_chars(self, method, target, affirmative_status_code):
127
120
  confirmed_chars = []
128
121
  confirmed_exts = []
@@ -1,3 +1,5 @@
1
+ import asyncio
2
+ import regex as re
1
3
  from contextlib import suppress
2
4
 
3
5
  from bbot.modules.base import BaseInterceptModule
@@ -10,107 +12,98 @@ class CloudCheck(BaseInterceptModule):
10
12
  "created_date": "2024-07-07",
11
13
  "author": "@TheTechromancer",
12
14
  }
13
- scope_distance_modifier = 1
15
+ # tag events up to and including distance-2
16
+ scope_distance_modifier = 2
14
17
  _priority = 3
15
18
 
16
19
  async def setup(self):
17
- from cloudcheck import update
18
-
19
- await update()
20
- self.dummy_modules = None
20
+ self._cloud_hostname_regexes = None
21
+ self._cloud_hostname_regexes_lock = asyncio.Lock()
21
22
  return True
22
23
 
23
- def make_dummy_modules(self):
24
- self.dummy_modules = {}
25
- for provider_name in self.helpers.cloud.providers.keys():
26
- module = self.scan._make_dummy_module(f"cloud_{provider_name}", _type="scan")
27
- module.default_discovery_context = "{module} derived {event.type}: {event.host}"
28
- self.dummy_modules[provider_name] = module
29
-
30
24
  async def filter_event(self, event):
31
25
  if (not event.host) or (event.type in ("IP_RANGE",)):
32
26
  return False, "event does not have host attribute"
33
27
  return True
34
28
 
35
29
  async def handle_event(self, event, **kwargs):
36
- # don't hold up the event loop loading cloud IPs etc.
37
- if self.dummy_modules is None:
38
- self.make_dummy_modules()
39
30
  # cloud tagging by hosts
40
31
  hosts_to_check = set(event.resolved_hosts)
41
32
  with suppress(KeyError):
42
33
  hosts_to_check.remove(event.host_original)
43
- hosts_to_check = [event.host_original] + list(hosts_to_check)
34
+ hosts_to_check = [str(event.host_original)] + list(hosts_to_check)
44
35
 
45
36
  for i, host in enumerate(hosts_to_check):
46
37
  host_is_ip = self.helpers.is_ip(host)
47
38
  try:
48
- cloudcheck_results = self.helpers.cloudcheck(host)
39
+ cloudcheck_results = await self.helpers.cloudcheck.lookup(host)
49
40
  except Exception as e:
50
41
  self.trace(f"Error running cloudcheck against {event} (host: {host}): {e}")
51
42
  continue
52
- for provider, provider_type, subnet in cloudcheck_results:
53
- if provider:
54
- event.add_tag(f"{provider_type}-{provider}")
43
+ for provider in cloudcheck_results:
44
+ provider_name = provider["name"].lower()
45
+ tags = provider.get("tags", [])
46
+ for tag in tags:
47
+ event.add_tag(tag)
48
+ event.add_tag(f"{tag}-{provider_name}")
55
49
  if host_is_ip:
56
- event.add_tag(f"{provider_type}-ip")
50
+ event.add_tag(f"{provider_name}-ip")
57
51
  else:
58
52
  # if the original hostname is a cloud domain, tag it as such
59
53
  if i == 0:
60
- event.add_tag(f"{provider_type}-domain")
54
+ event.add_tag(f"{provider_name}-domain")
61
55
  # any children are tagged as CNAMEs
62
56
  else:
63
- event.add_tag(f"{provider_type}-cname")
57
+ event.add_tag(f"{provider_name}-cname")
64
58
 
65
- found = set()
66
- str_hosts_to_check = [str(host) for host in hosts_to_check]
67
- # look for cloud assets in hosts, http responses
68
- # loop through each provider
69
- for provider in self.helpers.cloud.providers.values():
70
- provider_name = provider.name.lower()
71
- base_kwargs = {
72
- "parent": event,
73
- "tags": [f"{provider.provider_type}-{provider_name}"],
74
- "_provider": provider_name,
75
- }
76
- # loop through the provider's regex signatures, if any
77
- for event_type, sigs in provider.signatures.items():
78
- if event_type != "STORAGE_BUCKET":
79
- raise ValueError(f'Unknown cloudcheck event type "{event_type}"')
80
- base_kwargs["event_type"] = event_type
81
- for sig in sigs:
82
- matches = []
83
- # TODO: convert this to an excavate YARA hook
84
- # if event.type == "HTTP_RESPONSE":
85
- # matches = await self.helpers.re.findall(sig, event.data.get("body", ""))
86
- if event.type.startswith("DNS_NAME"):
87
- for host in str_hosts_to_check:
88
- match = sig.match(host)
89
- if match:
90
- matches.append(match.groups())
91
- for match in matches:
92
- if match not in found:
93
- found.add(match)
59
+ # we only generate storage buckets off of in-scope or distance-1 events
60
+ if event.scope_distance >= self.max_scope_distance:
61
+ return
94
62
 
95
- _kwargs = dict(base_kwargs)
96
- event_type_tag = f"cloud-{event_type}"
97
- _kwargs["tags"].append(event_type_tag)
98
- if event.type.startswith("DNS_NAME"):
99
- event.add_tag(event_type_tag)
63
+ # see if any of our hosts are storage buckets, etc.
64
+ regexes = await self.cloud_hostname_regexes()
65
+ regexes = regexes.get("STORAGE_BUCKET_HOSTNAME", [])
66
+ for regex_name, regex in regexes.items():
67
+ for host in hosts_to_check:
68
+ if match := regex.match(host):
69
+ try:
70
+ bucket_name, bucket_domain = match.groups()
71
+ except Exception as e:
72
+ self.error(
73
+ f"Bucket regex {regex_name} ({regex}) is not formatted correctly to extract bucket name and domain: {e}"
74
+ )
75
+ continue
76
+ bucket_name, bucket_domain = match.groups()
77
+ bucket_url = f"https://{bucket_name}.{bucket_domain}"
78
+ await self.emit_event(
79
+ {
80
+ "name": bucket_name,
81
+ "url": bucket_url,
82
+ "context": f"{{module}} analyzed {event.type} and found {{event.type}}: {bucket_url}",
83
+ },
84
+ "STORAGE_BUCKET",
85
+ parent=event,
86
+ )
100
87
 
101
- if event_type == "STORAGE_BUCKET":
102
- bucket_name, bucket_domain = match
103
- bucket_url = f"https://{bucket_name}.{bucket_domain}"
104
- _kwargs["data"] = {
105
- "name": bucket_name,
106
- "url": bucket_url,
107
- "context": f"{{module}} analyzed {event.type} and found {{event.type}}: {bucket_url}",
108
- }
109
- await self.emit_event(**_kwargs)
88
+ async def cloud_hostname_regexes(self):
89
+ async with self._cloud_hostname_regexes_lock:
90
+ if not self._cloud_hostname_regexes:
91
+ storage_bucket_regexes = {}
92
+ self._cloud_hostname_regexes = {"STORAGE_BUCKET_HOSTNAME": storage_bucket_regexes}
93
+ from cloudcheck import providers
110
94
 
111
- async def emit_event(self, *args, **kwargs):
112
- provider_name = kwargs.pop("_provider")
113
- dummy_module = self.dummy_modules[provider_name]
114
- event = dummy_module.make_event(*args, **kwargs)
115
- if event:
116
- await super().emit_event(event)
95
+ for attr in dir(providers):
96
+ if attr.startswith("_"):
97
+ continue
98
+ provider = getattr(providers, attr)
99
+ provider_regexes = getattr(provider, "regexes", {})
100
+ for regex_name, regexes in provider_regexes.items():
101
+ for i, regex in enumerate(regexes):
102
+ if not regex_name in ("STORAGE_BUCKET_HOSTNAME"):
103
+ continue
104
+ try:
105
+ storage_bucket_regexes[f"{attr}-{regex_name}-{i}"] = re.compile(regex)
106
+ except Exception as e:
107
+ self.error(f"Error compiling regex for {attr}-{regex_name}: {e}")
108
+ continue
109
+ return self._cloud_hostname_regexes
@@ -1,4 +1,5 @@
1
1
  from pathlib import Path
2
+ from contextlib import suppress
2
3
  from bbot.modules.internal.base import BaseInternalModule
3
4
  from bbot.core.helpers.libmagic import get_magic_info, get_compression
4
5
 
@@ -62,15 +63,20 @@ class unarchive(BaseInternalModule):
62
63
  context=f'extracted "{path}" to: {output_dir}',
63
64
  )
64
65
  else:
65
- output_dir.rmdir()
66
+ with suppress(OSError):
67
+ output_dir.rmdir()
66
68
 
67
69
  async def extract_file(self, path, output_dir):
68
70
  extension, mime_type, description, confidence = get_magic_info(path)
69
71
  compression_format = get_compression(mime_type)
70
72
  cmd_list = self.compression_methods.get(compression_format, [])
71
73
  if cmd_list:
72
- if not output_dir.exists():
73
- self.helpers.mkdir(output_dir)
74
+ # output dir must not already exist
75
+ try:
76
+ output_dir.mkdir(exist_ok=False)
77
+ except FileExistsError:
78
+ self.warning(f"Destination directory {output_dir} already exists, aborting unarchive for {path}")
79
+ return False
74
80
  command = [s.format(filename=path, extract_dir=output_dir) for s in cmd_list]
75
81
  try:
76
82
  await self.run_process(command, check=True)
@@ -11,7 +11,7 @@ class lightfuzz(BaseModule):
11
11
 
12
12
  options = {
13
13
  "force_common_headers": False,
14
- "enabled_submodules": ["sqli", "cmdi", "xss", "path", "ssti", "crypto", "serial"],
14
+ "enabled_submodules": ["sqli", "cmdi", "xss", "path", "ssti", "crypto", "serial", "esi"],
15
15
  "disable_post": False,
16
16
  }
17
17
  options_desc = {
@@ -34,6 +34,7 @@ class lightfuzz(BaseModule):
34
34
  self.event_dict = {}
35
35
  self.interactsh_subdomain_tags = {}
36
36
  self.interactsh_instance = None
37
+ self.interactsh_domain = None
37
38
  self.disable_post = self.config.get("disable_post", False)
38
39
  self.enabled_submodules = self.config.get("enabled_submodules")
39
40
  self.interactsh_disable = self.scan.config.get("interactsh_disable", False)
@@ -51,13 +52,16 @@ class lightfuzz(BaseModule):
51
52
  self.submodules[submodule_name] = submodule_class
52
53
 
53
54
  interactsh_needed = any(submodule.uses_interactsh for submodule in self.submodules.values())
54
-
55
55
  if interactsh_needed and not self.interactsh_disable:
56
56
  try:
57
57
  self.interactsh_instance = self.helpers.interactsh()
58
58
  self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback)
59
+ if not self.interactsh_domain:
60
+ self.warning("Interactsh failure: No domain returned from self.interactsh_instance.register()")
61
+ self.interactsh_instance = None
59
62
  except InteractshError as e:
60
63
  self.warning(f"Interactsh failure: {e}")
64
+ self.interactsh_instance = None
61
65
  return True
62
66
 
63
67
  async def interactsh_callback(self, r):
@@ -0,0 +1,42 @@
1
+ from .base import BaseLightfuzz
2
+
3
+
4
+ class esi(BaseLightfuzz):
5
+ """
6
+ Detects Edge Side Includes (ESI) processing vulnerabilities.
7
+
8
+ Tests if the server processes ESI tags by sending a payload containing ESI tags
9
+ and checking if the tags are processed (removed) in the response.
10
+ """
11
+
12
+ # Technique lifted from https://github.com/PortSwigger/active-scan-plus-plus
13
+
14
+ friendly_name = "Edge Side Includes"
15
+
16
+ async def check_probe(self, cookies, probe, match):
17
+ """
18
+ Sends the probe and checks if the expected match string is found in the response.
19
+ """
20
+ probe_result = await self.standard_probe(self.event.data["type"], cookies, probe)
21
+ if probe_result and match in probe_result.text:
22
+ self.results.append(
23
+ {
24
+ "type": "FINDING",
25
+ "description": f"Edge Side Include. Parameter: [{self.event.data['name']}] Parameter Type: [{self.event.data['type']}]",
26
+ }
27
+ )
28
+ return True
29
+ return False
30
+
31
+ async def fuzz(self):
32
+ """
33
+ Main fuzzing method that sends the ESI test payload and checks for processing.
34
+ """
35
+ cookies = self.event.data.get("assigned_cookies", {})
36
+
37
+ # ESI test payload: if ESI is processed, <!--esi--> will be removed
38
+ # leaving AABB<!--esx-->CC in the response
39
+ payload = "AA<!--esi-->BB<!--esx-->CC"
40
+ detection_string = "AABB<!--esx-->CC"
41
+
42
+ await self.check_probe(cookies, payload, detection_string)
bbot/modules/medusa.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import re
2
2
  from bbot.modules.base import BaseModule
3
- from bbot.errors import WordlistError
4
3
 
5
4
 
6
5
  class medusa(BaseModule):
@@ -102,13 +101,11 @@ class medusa(BaseModule):
102
101
  },
103
102
  ]
104
103
 
105
- async def setup(self):
106
- # Try to cache wordlist
107
- try:
108
- self.snmp_wordlist_path = await self.helpers.wordlist(self.config.get("snmp_wordlist"))
109
- except WordlistError as e:
110
- return False, f"Error retrieving wordlist: {e}"
104
+ async def setup_deps(self):
105
+ self.snmp_wordlist_path = await self.helpers.wordlist(self.config.get("snmp_wordlist"))
106
+ return True
111
107
 
108
+ async def setup(self):
112
109
  self.password_match_regex = re.compile(r"Password:\s*(\S+)")
113
110
  self.success_indicator_match_regex = re.compile(r"\[([^\]]+)\]\s*$")
114
111
 
bbot/modules/nuclei.py CHANGED
@@ -15,7 +15,7 @@ class nuclei(BaseModule):
15
15
  }
16
16
 
17
17
  options = {
18
- "version": "3.4.2",
18
+ "version": "3.6.2",
19
19
  "tags": "",
20
20
  "templates": "",
21
21
  "severity": "",
bbot/modules/otx.py CHANGED
@@ -1,7 +1,7 @@
1
- from bbot.modules.templates.subdomain_enum import subdomain_enum
1
+ from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey
2
2
 
3
3
 
4
- class otx(subdomain_enum):
4
+ class otx(subdomain_enum_apikey):
5
5
  flags = ["subdomain-enum", "passive", "safe"]
6
6
  watched_events = ["DNS_NAME"]
7
7
  produced_events = ["DNS_NAME"]
@@ -9,10 +9,17 @@ class otx(subdomain_enum):
9
9
  "description": "Query otx.alienvault.com for subdomains",
10
10
  "created_date": "2022-08-24",
11
11
  "author": "@TheTechromancer",
12
+ "auth_required": True,
12
13
  }
14
+ options = {"api_key": ""}
15
+ options_desc = {"api_key": "OTX API key"}
13
16
 
14
17
  base_url = "https://otx.alienvault.com"
15
18
 
19
+ def prepare_api_request(self, url, kwargs):
20
+ kwargs["headers"]["X-OTX-API-KEY"] = self.api_key
21
+ return url, kwargs
22
+
16
23
  def request_url(self, query):
17
24
  url = f"{self.base_url}/api/v1/indicators/domain/{self.helpers.quote(query)}/passive_dns"
18
25
  return self.api_request(url)
@@ -38,26 +38,18 @@ class BaseOutputModule(BaseModule):
38
38
  if self._is_graph_important(event):
39
39
  return True, "event is critical to the graph"
40
40
 
41
- # exclude certain URLs (e.g. javascript):
42
- # TODO: revisit this after httpx rework
43
- if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags:
44
- return False, (f"Omitting {event} from output because it's marked as httpx-only")
45
-
46
41
  # omit certain event types
47
42
  if event._omit:
48
- if "target" in event.tags:
49
- reason = "it's a target"
50
- self.debug(f"Allowing omitted event: {event} because {reason}")
51
- elif event.type in self.get_watched_events():
43
+ if event.type in self.get_watched_events():
52
44
  reason = "its type is explicitly in watched_events"
53
45
  self.debug(f"Allowing omitted event: {event} because {reason}")
54
46
  else:
55
- return False, "_omit is True"
47
+ return False, "its type is omitted in the config"
56
48
 
57
49
  # internal events like those from speculate, ipneighbor
58
50
  # or events that are over our report distance
59
51
  if event._internal:
60
- return False, "_internal is True"
52
+ return False, "event is internal and output modules don't accept internal events"
61
53
 
62
54
  return True, reason
63
55
 
@@ -82,18 +82,21 @@ class paramminer_headers(BaseModule):
82
82
 
83
83
  header_regex = re.compile(r"^[!#$%&\'*+\-.^_`|~0-9a-zA-Z]+: [^\r\n]+$")
84
84
 
85
- async def setup(self):
86
- self.recycle_words = self.config.get("recycle_words", True)
87
- self.event_dict = {}
88
- self.already_checked = set()
85
+ async def setup_deps(self):
89
86
  wordlist = self.config.get("wordlist", "")
90
87
  if not wordlist:
91
88
  wordlist = f"{self.helpers.wordlist_dir}/{self.default_wordlist}"
89
+ self.wordlist_file = await self.helpers.wordlist(wordlist)
92
90
  self.debug(f"Using wordlist: [{wordlist}]")
91
+ return True
92
+
93
+ async def setup(self):
94
+ self.recycle_words = self.config.get("recycle_words", True)
95
+ self.event_dict = {}
96
+ self.already_checked = set()
97
+
93
98
  self.wl = {
94
- h.strip().lower()
95
- for h in self.helpers.read_file(await self.helpers.wordlist(wordlist))
96
- if len(h) > 0 and "%" not in h
99
+ h.strip().lower() for h in self.helpers.read_file(self.wordlist_file) if len(h) > 0 and "%" not in h
97
100
  }
98
101
 
99
102
  # check against the boring list (if the option is set)
@@ -19,6 +19,8 @@ class portfilter(BaseInterceptModule):
19
19
  }
20
20
 
21
21
  _priority = 4
22
+ # we consume URLs but we don't want to automatically enable httpx
23
+ _disable_auto_module_deps = True
22
24
 
23
25
  async def setup(self):
24
26
  self.cdn_tags = [t.strip() for t in self.config.get("cdn_tags", "").split(",")]
@@ -7,7 +7,7 @@ from bbot.modules.templates.postman import postman
7
7
  class postman_download(postman):
8
8
  watched_events = ["CODE_REPOSITORY"]
9
9
  produced_events = ["FILESYSTEM"]
10
- flags = ["passive", "subdomain-enum", "safe", "code-enum"]
10
+ flags = ["passive", "subdomain-enum", "safe", "code-enum", "download"]
11
11
  meta = {
12
12
  "description": "Download workspaces, collections, requests from Postman",
13
13
  "created_date": "2024-09-07",