bbot 2.6.0.6879rc0__py3-none-any.whl → 2.7.2.7254rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (75) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/core/engine.py +1 -1
  3. bbot/core/flags.py +1 -0
  4. bbot/core/helpers/bloom.py +6 -7
  5. bbot/core/helpers/dns/dns.py +0 -1
  6. bbot/core/helpers/dns/engine.py +0 -2
  7. bbot/core/helpers/files.py +2 -2
  8. bbot/core/helpers/git.py +17 -0
  9. bbot/core/helpers/misc.py +1 -0
  10. bbot/core/helpers/ntlm.py +0 -2
  11. bbot/core/helpers/regex.py +1 -1
  12. bbot/core/modules.py +0 -54
  13. bbot/defaults.yml +4 -2
  14. bbot/modules/apkpure.py +1 -1
  15. bbot/modules/base.py +11 -5
  16. bbot/modules/dnsbimi.py +1 -4
  17. bbot/modules/dnsdumpster.py +35 -52
  18. bbot/modules/dnstlsrpt.py +0 -6
  19. bbot/modules/docker_pull.py +1 -1
  20. bbot/modules/emailformat.py +17 -1
  21. bbot/modules/filedownload.py +1 -1
  22. bbot/modules/git_clone.py +47 -22
  23. bbot/modules/gitdumper.py +4 -14
  24. bbot/modules/github_workflows.py +1 -1
  25. bbot/modules/gitlab_com.py +31 -0
  26. bbot/modules/gitlab_onprem.py +84 -0
  27. bbot/modules/gowitness.py +0 -6
  28. bbot/modules/graphql_introspection.py +5 -2
  29. bbot/modules/httpx.py +2 -0
  30. bbot/modules/iis_shortnames.py +0 -7
  31. bbot/modules/internal/unarchive.py +9 -3
  32. bbot/modules/lightfuzz/lightfuzz.py +5 -1
  33. bbot/modules/nuclei.py +1 -1
  34. bbot/modules/output/base.py +0 -5
  35. bbot/modules/postman_download.py +1 -1
  36. bbot/modules/retirejs.py +232 -0
  37. bbot/modules/securitytxt.py +0 -3
  38. bbot/modules/subdomaincenter.py +1 -16
  39. bbot/modules/telerik.py +6 -1
  40. bbot/modules/templates/gitlab.py +98 -0
  41. bbot/modules/trufflehog.py +1 -1
  42. bbot/scanner/manager.py +7 -4
  43. bbot/scanner/scanner.py +1 -1
  44. bbot/scripts/benchmark_report.py +433 -0
  45. bbot/test/benchmarks/__init__.py +2 -0
  46. bbot/test/benchmarks/test_bloom_filter_benchmarks.py +105 -0
  47. bbot/test/benchmarks/test_closest_match_benchmarks.py +76 -0
  48. bbot/test/benchmarks/test_event_validation_benchmarks.py +438 -0
  49. bbot/test/benchmarks/test_excavate_benchmarks.py +291 -0
  50. bbot/test/benchmarks/test_ipaddress_benchmarks.py +143 -0
  51. bbot/test/benchmarks/test_weighted_shuffle_benchmarks.py +70 -0
  52. bbot/test/test_step_1/test_bbot_fastapi.py +2 -2
  53. bbot/test/test_step_1/test_events.py +0 -1
  54. bbot/test/test_step_1/test_scan.py +1 -8
  55. bbot/test/test_step_2/module_tests/base.py +6 -1
  56. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +2 -1
  57. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +3 -5
  58. bbot/test/test_step_2/module_tests/test_module_emailformat.py +1 -1
  59. bbot/test/test_step_2/module_tests/test_module_emails.py +2 -2
  60. bbot/test/test_step_2/module_tests/test_module_excavate.py +35 -6
  61. bbot/test/test_step_2/module_tests/test_module_gitlab_com.py +66 -0
  62. bbot/test/test_step_2/module_tests/{test_module_gitlab.py → test_module_gitlab_onprem.py} +4 -69
  63. bbot/test/test_step_2/module_tests/test_module_lightfuzz.py +2 -2
  64. bbot/test/test_step_2/module_tests/test_module_retirejs.py +159 -0
  65. bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
  66. {bbot-2.6.0.6879rc0.dist-info → bbot-2.7.2.7254rc0.dist-info}/METADATA +7 -4
  67. {bbot-2.6.0.6879rc0.dist-info → bbot-2.7.2.7254rc0.dist-info}/RECORD +70 -60
  68. {bbot-2.6.0.6879rc0.dist-info → bbot-2.7.2.7254rc0.dist-info}/WHEEL +1 -1
  69. bbot/modules/censys.py +0 -98
  70. bbot/modules/gitlab.py +0 -141
  71. bbot/modules/zoomeye.py +0 -77
  72. bbot/test/test_step_2/module_tests/test_module_censys.py +0 -83
  73. bbot/test/test_step_2/module_tests/test_module_zoomeye.py +0 -35
  74. {bbot-2.6.0.6879rc0.dist-info → bbot-2.7.2.7254rc0.dist-info}/entry_points.txt +0 -0
  75. {bbot-2.6.0.6879rc0.dist-info → bbot-2.7.2.7254rc0.dist-info/licenses}/LICENSE +0 -0
@@ -8,7 +8,7 @@ from bbot.modules.templates.github import github
8
8
  class github_workflows(github):
9
9
  watched_events = ["CODE_REPOSITORY"]
10
10
  produced_events = ["FILESYSTEM"]
11
- flags = ["passive", "safe", "code-enum"]
11
+ flags = ["passive", "safe", "code-enum", "download"]
12
12
  meta = {
13
13
  "description": "Download a github repositories workflow logs and workflow artifacts",
14
14
  "created_date": "2024-04-29",
@@ -0,0 +1,31 @@
1
+ from bbot.modules.templates.gitlab import GitLabBaseModule
2
+
3
+
4
+ class gitlab_com(GitLabBaseModule):
5
+ watched_events = ["SOCIAL"]
6
+ produced_events = [
7
+ "CODE_REPOSITORY",
8
+ ]
9
+ flags = ["active", "safe", "code-enum"]
10
+ meta = {
11
+ "description": "Enumerate GitLab SaaS (gitlab.com/org) for projects and groups",
12
+ "created_date": "2024-03-11",
13
+ "author": "@TheTechromancer",
14
+ }
15
+
16
+ options = {"api_key": ""}
17
+ options_desc = {"api_key": "GitLab access token (for gitlab.com/org only)"}
18
+
19
+ # This is needed because we are consuming SOCIAL events, which aren't in scope
20
+ scope_distance_modifier = 2
21
+
22
+ async def handle_event(self, event):
23
+ await self.handle_social(event)
24
+
25
+ async def filter_event(self, event):
26
+ if event.data["platform"] != "gitlab":
27
+ return False, "platform is not gitlab"
28
+ _, domain = self.helpers.split_domain(event.host)
29
+ if domain not in self.saas_domains:
30
+ return False, "gitlab instance is not gitlab.com/org"
31
+ return True
@@ -0,0 +1,84 @@
1
+ from bbot.modules.templates.gitlab import GitLabBaseModule
2
+
3
+
4
+ class gitlab_onprem(GitLabBaseModule):
5
+ watched_events = ["HTTP_RESPONSE", "TECHNOLOGY", "SOCIAL"]
6
+ produced_events = [
7
+ "TECHNOLOGY",
8
+ "SOCIAL",
9
+ "CODE_REPOSITORY",
10
+ "FINDING",
11
+ ]
12
+ flags = ["active", "safe", "code-enum"]
13
+ meta = {
14
+ "description": "Detect self-hosted GitLab instances and query them for repositories",
15
+ "created_date": "2024-03-11",
16
+ "author": "@TheTechromancer",
17
+ }
18
+
19
+ # Optional GitLab access token (only required for gitlab.com, but still
20
+ # supported for on-prem installations that expose private projects).
21
+ options = {"api_key": ""}
22
+ options_desc = {"api_key": "GitLab access token (for self-hosted instances only)"}
23
+
24
+ # Allow accepting events slightly beyond configured max distance so we can
25
+ # discover repos on neighbouring infrastructure.
26
+ scope_distance_modifier = 2
27
+
28
+ async def handle_event(self, event):
29
+ if event.type == "HTTP_RESPONSE":
30
+ await self.handle_http_response(event)
31
+ elif event.type == "TECHNOLOGY":
32
+ await self.handle_technology(event)
33
+ elif event.type == "SOCIAL":
34
+ await self.handle_social(event)
35
+
36
+ async def filter_event(self, event):
37
+ # only accept out-of-scope SOCIAL events
38
+ if event.type == "HTTP_RESPONSE":
39
+ if event.scope_distance > self.scan.scope_search_distance:
40
+ return False, "event is out of scope distance"
41
+ elif event.type == "TECHNOLOGY":
42
+ if not event.data["technology"].lower().startswith("gitlab"):
43
+ return False, "technology is not gitlab"
44
+ if not self.helpers.is_ip(event.host) and self.helpers.tldextract(event.host).domain == "gitlab":
45
+ return False, "gitlab instance is not self-hosted"
46
+ elif event.type == "SOCIAL":
47
+ if event.data["platform"] != "gitlab":
48
+ return False, "platform is not gitlab"
49
+ _, domain = self.helpers.split_domain(event.host)
50
+ if domain in self.saas_domains:
51
+ return False, "gitlab instance is not self-hosted"
52
+ return True
53
+
54
+ async def handle_http_response(self, event):
55
+ """Identify GitLab servers from HTTP responses."""
56
+ headers = event.data.get("header", {})
57
+ if "x_gitlab_meta" in headers:
58
+ url = event.parsed_url._replace(path="/").geturl()
59
+ await self.emit_event(
60
+ {"host": str(event.host), "technology": "GitLab", "url": url},
61
+ "TECHNOLOGY",
62
+ parent=event,
63
+ context=f"{{module}} detected {{event.type}}: GitLab at {url}",
64
+ )
65
+ description = f"GitLab server at {event.host}"
66
+ await self.emit_event(
67
+ {"host": str(event.host), "description": description},
68
+ "FINDING",
69
+ parent=event,
70
+ context=f"{{module}} detected {{event.type}}: {description}",
71
+ )
72
+
73
+ async def handle_technology(self, event):
74
+ """Enumerate projects & groups once we know a host is GitLab."""
75
+ base_url = self.get_base_url(event)
76
+
77
+ # Projects owned by the authenticated user (or public projects if no
78
+ # authentication).
79
+ projects_url = self.helpers.urljoin(base_url, "api/v4/projects?simple=true")
80
+ await self.handle_projects_url(projects_url, event)
81
+
82
+ # Group enumeration.
83
+ groups_url = self.helpers.urljoin(base_url, "api/v4/groups?simple=true")
84
+ await self.handle_groups_url(groups_url, event)
bbot/modules/gowitness.py CHANGED
@@ -161,7 +161,6 @@ class gowitness(BaseModule):
161
161
  key = e.data["url"]
162
162
  event_dict[key] = e
163
163
  stdin = "\n".join(list(event_dict))
164
- self.hugeinfo(f"Gowitness input: {stdin}")
165
164
 
166
165
  try:
167
166
  async for line in self.run_process_live(self.command, input=stdin, idle_timeout=self.idle_timeout):
@@ -182,7 +181,6 @@ class gowitness(BaseModule):
182
181
  # NOTE: this prevents long filenames from causing problems in BBOT, but gowitness will still fail to save it.
183
182
  filename = self.helpers.truncate_filename(filename)
184
183
  webscreenshot_data = {"path": str(filename), "url": final_url}
185
- self.hugewarning(event_dict)
186
184
  parent_event = event_dict[url]
187
185
  await self.emit_event(
188
186
  webscreenshot_data,
@@ -259,9 +257,7 @@ class gowitness(BaseModule):
259
257
  con.row_factory = aiosqlite.Row
260
258
  con.text_factory = self.helpers.smart_decode
261
259
  async with con.execute("SELECT * FROM results") as cur:
262
- self.critical(f"CUR: {cur}")
263
260
  async for row in cur:
264
- self.critical(f"SCREENSHOT: {row}")
265
261
  row = dict(row)
266
262
  _id = row["id"]
267
263
  if _id not in self.screenshots_taken:
@@ -276,7 +272,6 @@ class gowitness(BaseModule):
276
272
  con.row_factory = aiosqlite.Row
277
273
  async with con.execute("SELECT * FROM network_logs") as cur:
278
274
  async for row in cur:
279
- self.critical(f"NETWORK LOG: {row}")
280
275
  row = dict(row)
281
276
  url = row["url"]
282
277
  if url not in self.connections_logged:
@@ -291,7 +286,6 @@ class gowitness(BaseModule):
291
286
  con.row_factory = aiosqlite.Row
292
287
  async with con.execute("SELECT * FROM technologies") as cur:
293
288
  async for row in cur:
294
- self.critical(f"TECHNOLOGY: {row}")
295
289
  _id = row["id"]
296
290
  if _id not in self.technologies_found:
297
291
  self.technologies_found.add(_id)
@@ -27,7 +27,6 @@ class graphql_introspection(BaseModule):
27
27
  self.output_dir = Path(output_folder) / "graphql-schemas"
28
28
  else:
29
29
  self.output_dir = self.scan.home / "graphql-schemas"
30
- self.helpers.mkdir(self.output_dir)
31
30
  return True
32
31
 
33
32
  async def filter_event(self, event):
@@ -120,7 +119,10 @@ fragment TypeRef on __Type {
120
119
  }
121
120
  response = await self.helpers.request(**request_args)
122
121
  if not response or response.status_code != 200:
123
- self.debug(f"Failed to get GraphQL schema for {url} (status code {response.status_code})")
122
+ self.debug(
123
+ f"Failed to get GraphQL schema for {url} "
124
+ f"{f'(status code {response.status_code})' if response else ''}"
125
+ )
124
126
  continue
125
127
  try:
126
128
  response_json = response.json()
@@ -128,6 +130,7 @@ fragment TypeRef on __Type {
128
130
  self.debug(f"Failed to parse JSON for {url}")
129
131
  continue
130
132
  if response_json.get("data", {}).get("__schema", {}).get("types", []):
133
+ self.helpers.mkdir(self.output_dir)
131
134
  filename = f"schema-{self.helpers.tagify(url)}.json"
132
135
  filename = self.output_dir / filename
133
136
  with open(filename, "w") as f:
bbot/modules/httpx.py CHANGED
@@ -50,6 +50,8 @@ class httpx(BaseModule):
50
50
  _shuffle_incoming_queue = False
51
51
  _batch_size = 500
52
52
  _priority = 2
53
+ # accept Javascript URLs
54
+ accept_url_special = True
53
55
 
54
56
  async def setup(self):
55
57
  self.threads = self.config.get("threads", 50)
@@ -116,13 +116,6 @@ class iis_shortnames(BaseModule):
116
116
 
117
117
  return duplicates
118
118
 
119
- async def threaded_request(self, method, url, affirmative_status_code, c):
120
- r = await self.helpers.request(method=method, url=url, allow_redirects=False, retries=2, timeout=10)
121
- if r is not None:
122
- if r.status_code == affirmative_status_code:
123
- return True, c
124
- return None, c
125
-
126
119
  async def solve_valid_chars(self, method, target, affirmative_status_code):
127
120
  confirmed_chars = []
128
121
  confirmed_exts = []
@@ -1,4 +1,5 @@
1
1
  from pathlib import Path
2
+ from contextlib import suppress
2
3
  from bbot.modules.internal.base import BaseInternalModule
3
4
  from bbot.core.helpers.libmagic import get_magic_info, get_compression
4
5
 
@@ -62,15 +63,20 @@ class unarchive(BaseInternalModule):
62
63
  context=f'extracted "{path}" to: {output_dir}',
63
64
  )
64
65
  else:
65
- output_dir.rmdir()
66
+ with suppress(OSError):
67
+ output_dir.rmdir()
66
68
 
67
69
  async def extract_file(self, path, output_dir):
68
70
  extension, mime_type, description, confidence = get_magic_info(path)
69
71
  compression_format = get_compression(mime_type)
70
72
  cmd_list = self.compression_methods.get(compression_format, [])
71
73
  if cmd_list:
72
- if not output_dir.exists():
73
- self.helpers.mkdir(output_dir)
74
+ # output dir must not already exist
75
+ try:
76
+ output_dir.mkdir(exist_ok=False)
77
+ except FileExistsError:
78
+ self.warning(f"Destination directory {output_dir} already exists, aborting unarchive for {path}")
79
+ return False
74
80
  command = [s.format(filename=path, extract_dir=output_dir) for s in cmd_list]
75
81
  try:
76
82
  await self.run_process(command, check=True)
@@ -34,6 +34,7 @@ class lightfuzz(BaseModule):
34
34
  self.event_dict = {}
35
35
  self.interactsh_subdomain_tags = {}
36
36
  self.interactsh_instance = None
37
+ self.interactsh_domain = None
37
38
  self.disable_post = self.config.get("disable_post", False)
38
39
  self.enabled_submodules = self.config.get("enabled_submodules")
39
40
  self.interactsh_disable = self.scan.config.get("interactsh_disable", False)
@@ -51,13 +52,16 @@ class lightfuzz(BaseModule):
51
52
  self.submodules[submodule_name] = submodule_class
52
53
 
53
54
  interactsh_needed = any(submodule.uses_interactsh for submodule in self.submodules.values())
54
-
55
55
  if interactsh_needed and not self.interactsh_disable:
56
56
  try:
57
57
  self.interactsh_instance = self.helpers.interactsh()
58
58
  self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback)
59
+ if not self.interactsh_domain:
60
+ self.warning("Interactsh failure: No domain returned from self.interactsh_instance.register()")
61
+ self.interactsh_instance = None
59
62
  except InteractshError as e:
60
63
  self.warning(f"Interactsh failure: {e}")
64
+ self.interactsh_instance = None
61
65
  return True
62
66
 
63
67
  async def interactsh_callback(self, r):
bbot/modules/nuclei.py CHANGED
@@ -15,7 +15,7 @@ class nuclei(BaseModule):
15
15
  }
16
16
 
17
17
  options = {
18
- "version": "3.4.7",
18
+ "version": "3.4.10",
19
19
  "tags": "",
20
20
  "templates": "",
21
21
  "severity": "",
@@ -38,11 +38,6 @@ class BaseOutputModule(BaseModule):
38
38
  if self._is_graph_important(event):
39
39
  return True, "event is critical to the graph"
40
40
 
41
- # exclude certain URLs (e.g. javascript):
42
- # TODO: revisit this after httpx rework
43
- if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags:
44
- return False, (f"Omitting {event} from output because it's marked as httpx-only")
45
-
46
41
  # omit certain event types
47
42
  if event._omit:
48
43
  if "target" in event.tags:
@@ -7,7 +7,7 @@ from bbot.modules.templates.postman import postman
7
7
  class postman_download(postman):
8
8
  watched_events = ["CODE_REPOSITORY"]
9
9
  produced_events = ["FILESYSTEM"]
10
- flags = ["passive", "subdomain-enum", "safe", "code-enum"]
10
+ flags = ["passive", "subdomain-enum", "safe", "code-enum", "download"]
11
11
  meta = {
12
12
  "description": "Download workspaces, collections, requests from Postman",
13
13
  "created_date": "2024-09-07",
@@ -0,0 +1,232 @@
1
+ import json
2
+ from enum import IntEnum
3
+ from bbot.modules.base import BaseModule
4
+
5
+
6
+ class RetireJSSeverity(IntEnum):
7
+ NONE = 0
8
+ LOW = 1
9
+ MEDIUM = 2
10
+ HIGH = 3
11
+ CRITICAL = 4
12
+
13
+ @classmethod
14
+ def from_string(cls, severity_str):
15
+ try:
16
+ return cls[severity_str.upper()]
17
+ except (KeyError, AttributeError):
18
+ return cls.NONE
19
+
20
+
21
+ class retirejs(BaseModule):
22
+ watched_events = ["URL_UNVERIFIED"]
23
+ produced_events = ["FINDING"]
24
+ flags = ["active", "safe", "web-thorough"]
25
+ meta = {
26
+ "description": "Detect vulnerable/out-of-date JavaScript libraries",
27
+ "created_date": "2025-08-19",
28
+ "author": "@liquidsec",
29
+ }
30
+ options = {
31
+ "version": "5.3.0",
32
+ "node_version": "18.19.1",
33
+ "severity": "medium",
34
+ }
35
+ options_desc = {
36
+ "version": "retire.js version",
37
+ "node_version": "Node.js version to install locally",
38
+ "severity": "Minimum severity level to report (none, low, medium, high, critical)",
39
+ }
40
+
41
+ deps_ansible = [
42
+ # Download Node.js binary (Linux x64)
43
+ {
44
+ "name": "Download Node.js binary (Linux x64)",
45
+ "get_url": {
46
+ "url": "https://nodejs.org/dist/v#{BBOT_MODULES_RETIREJS_NODE_VERSION}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64.tar.xz",
47
+ "dest": "#{BBOT_TEMP}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64.tar.xz",
48
+ "mode": "0644",
49
+ },
50
+ },
51
+ # Extract Node.js binary (x64)
52
+ {
53
+ "name": "Extract Node.js binary (x64)",
54
+ "unarchive": {
55
+ "src": "#{BBOT_TEMP}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64.tar.xz",
56
+ "dest": "#{BBOT_TOOLS}",
57
+ "remote_src": True,
58
+ },
59
+ },
60
+ # Remove existing node directory if it exists
61
+ {
62
+ "name": "Remove existing node directory",
63
+ "file": {"path": "#{BBOT_TOOLS}/node", "state": "absent"},
64
+ },
65
+ # Rename extracted directory to 'node' (x64)
66
+ {
67
+ "name": "Rename Node.js directory (x64)",
68
+ "command": "mv #{BBOT_TOOLS}/node-v#{BBOT_MODULES_RETIREJS_NODE_VERSION}-linux-x64 #{BBOT_TOOLS}/node",
69
+ },
70
+ # Set permissions on entire Node.js bin directory
71
+ {
72
+ "name": "Set permissions on Node.js bin directory",
73
+ "file": {"path": "#{BBOT_TOOLS}/node/bin", "mode": "0755", "recurse": "yes"},
74
+ },
75
+ # Make Node.js binary executable
76
+ {
77
+ "name": "Make Node.js binary executable",
78
+ "file": {"path": "#{BBOT_TOOLS}/node/bin/node", "mode": "0755"},
79
+ },
80
+ # Remove existing retirejs directory if it exists
81
+ {
82
+ "name": "Remove existing retirejs directory",
83
+ "file": {"path": "#{BBOT_TOOLS}/retirejs", "state": "absent"},
84
+ },
85
+ # Create retire.js local directory
86
+ {
87
+ "name": "Create retire.js directory in BBOT_TOOLS",
88
+ "file": {"path": "#{BBOT_TOOLS}/retirejs", "state": "directory", "mode": "0755"},
89
+ },
90
+ # Install retire.js locally using local Node.js
91
+ {
92
+ "name": "Install retire.js locally",
93
+ "shell": "cd #{BBOT_TOOLS}/retirejs && #{BBOT_TOOLS}/node/bin/node #{BBOT_TOOLS}/node/lib/node_modules/npm/bin/npm-cli.js install --prefix . retire@#{BBOT_MODULES_RETIREJS_VERSION} --no-fund --no-audit --silent --no-optional",
94
+ "args": {"creates": "#{BBOT_TOOLS}/retirejs/node_modules/.bin/retire"},
95
+ "timeout": 600,
96
+ "ignore_errors": False,
97
+ },
98
+ # Make retire script executable
99
+ {
100
+ "name": "Make retire script executable",
101
+ "file": {"path": "#{BBOT_TOOLS}/retirejs/node_modules/.bin/retire", "mode": "0755"},
102
+ },
103
+ # Create retire cache directory
104
+ {
105
+ "name": "Create retire cache directory",
106
+ "file": {"path": "#{BBOT_CACHE}/retire_cache", "state": "directory", "mode": "0755"},
107
+ },
108
+ ]
109
+
110
+ accept_url_special = True
111
+ scope_distance_modifier = 1
112
+ _module_threads = 4
113
+
114
+ async def setup(self):
115
+ excavate_enabled = self.scan.config.get("excavate")
116
+ if not excavate_enabled:
117
+ return None, "retirejs will not function without excavate enabled"
118
+
119
+ # Validate severity level
120
+ valid_severities = ["none", "low", "medium", "high", "critical"]
121
+ configured_severity = self.config.get("severity", "medium").lower()
122
+ if configured_severity not in valid_severities:
123
+ return (
124
+ False,
125
+ f"Invalid severity level '{configured_severity}'. Valid options are: {', '.join(valid_severities)}",
126
+ )
127
+
128
+ self.repofile = await self.helpers.download(
129
+ "https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository-v4.json", cache_hrs=24
130
+ )
131
+ if not self.repofile:
132
+ return False, "failed to download retire.js repository file"
133
+ return True
134
+
135
+ async def handle_event(self, event):
136
+ js_file = await self.helpers.request(event.data)
137
+ if js_file:
138
+ js_file_body = js_file.text
139
+ if js_file_body:
140
+ js_file_body_saved = self.helpers.tempfile(js_file_body, pipe=False, extension="js")
141
+ results = await self.execute_retirejs(js_file_body_saved)
142
+ if not results:
143
+ self.warning("no output from retire.js")
144
+ return
145
+ results_json = json.loads(results)
146
+ if results_json.get("data"):
147
+ for file_result in results_json["data"]:
148
+ for component_result in file_result.get("results", []):
149
+ component = component_result.get("component", "unknown")
150
+ version = component_result.get("version", "unknown")
151
+ vulnerabilities = component_result.get("vulnerabilities", [])
152
+ for vuln in vulnerabilities:
153
+ severity = vuln.get("severity", "unknown")
154
+
155
+ # Filter by minimum severity level
156
+ min_severity = RetireJSSeverity.from_string(self.config.get("severity", "medium"))
157
+ vuln_severity = RetireJSSeverity.from_string(severity)
158
+ if vuln_severity < min_severity:
159
+ self.debug(
160
+ f"Skipping vulnerability with severity '{severity}' (below minimum '{min_severity.name.lower()}')"
161
+ )
162
+ continue
163
+
164
+ identifiers = vuln.get("identifiers", {})
165
+ summary = identifiers.get("summary", "Unknown vulnerability")
166
+ cves = identifiers.get("CVE", [])
167
+ description_parts = [
168
+ f"Vulnerable JavaScript library detected: {component} v{version}",
169
+ f"Severity: {severity.upper()}",
170
+ f"Summary: {summary}",
171
+ f"JavaScript URL: {event.data}",
172
+ ]
173
+ if cves:
174
+ description_parts.append(f"CVE(s): {', '.join(cves)}")
175
+
176
+ below_version = vuln.get("below", "")
177
+ at_or_above = vuln.get("atOrAbove", "")
178
+ if at_or_above and below_version:
179
+ description_parts.append(f"Affected versions: [{at_or_above} to {below_version})")
180
+ elif below_version:
181
+ description_parts.append(f"Affected versions: [< {below_version}]")
182
+ elif at_or_above:
183
+ description_parts.append(f"Affected versions: [>= {at_or_above}]")
184
+ description = " ".join(description_parts)
185
+ data = {
186
+ "description": description,
187
+ "severity": severity,
188
+ "component": component,
189
+ "url": event.parent.data["url"],
190
+ }
191
+ await self.emit_event(
192
+ data,
193
+ "FINDING",
194
+ parent=event,
195
+ context=f"{{module}} identified vulnerable JavaScript library {component} v{version} ({severity} severity)",
196
+ )
197
+
198
+ async def filter_event(self, event):
199
+ url_extension = getattr(event, "url_extension", "")
200
+ if url_extension != "js":
201
+ return False, f"it is a {url_extension} URL but retirejs only accepts js URLs"
202
+ return True
203
+
204
+ async def execute_retirejs(self, js_file):
205
+ cache_dir = self.helpers.cache_dir / "retire_cache"
206
+ retire_dir = self.scan.helpers.tools_dir / "retirejs"
207
+ local_node_dir = self.scan.helpers.tools_dir / "node"
208
+
209
+ # Use the retire binary directly with our local Node.js
210
+ retire_binary_path = retire_dir / "node_modules" / ".bin" / "retire"
211
+ command = [
212
+ str(local_node_dir / "bin" / "node"),
213
+ str(retire_binary_path),
214
+ "--outputformat",
215
+ "json",
216
+ "--cachedir",
217
+ str(cache_dir),
218
+ "--path",
219
+ js_file,
220
+ "--jsrepo",
221
+ str(self.repofile),
222
+ ]
223
+
224
+ proxy = self.scan.web_config.get("http_proxy")
225
+ if proxy:
226
+ command.extend(["--proxy", proxy])
227
+
228
+ self.verbose(f"Running retire.js on {js_file}")
229
+ self.verbose(f"retire.js command: {command}")
230
+
231
+ result = await self.run_process(command)
232
+ return result.stdout
@@ -123,6 +123,3 @@ class securitytxt(BaseModule):
123
123
 
124
124
  if found_url != url and self._urls is True:
125
125
  await self.emit_event(found_url, "URL_UNVERIFIED", parent=event, tags=tags)
126
-
127
-
128
- # EOF
@@ -12,25 +12,10 @@ class subdomaincenter(subdomain_enum):
12
12
  }
13
13
 
14
14
  base_url = "https://api.subdomain.center"
15
- retries = 2
16
-
17
- async def sleep(self, time_to_wait):
18
- self.info(f"Sleeping for {time_to_wait} seconds to avoid rate limit")
19
- await self.helpers.sleep(time_to_wait)
20
15
 
21
16
  async def request_url(self, query):
22
17
  url = f"{self.base_url}/?domain={self.helpers.quote(query)}"
23
- response = None
24
- status_code = 0
25
- for i, _ in enumerate(range(self.retries + 1)):
26
- if i > 0:
27
- self.verbose(f"Retry #{i} for {query} after response code {status_code}")
28
- response = await self.helpers.request(url, timeout=self.http_timeout + 30)
29
- status_code = getattr(response, "status_code", 0)
30
- if status_code == 429:
31
- await self.sleep(20)
32
- else:
33
- break
18
+ response = await self.api_request(url)
34
19
  return response
35
20
 
36
21
  async def parse_results(self, r, query):
bbot/modules/telerik.py CHANGED
@@ -204,7 +204,7 @@ class telerik(BaseModule):
204
204
  webresource = "Telerik.Web.UI.WebResource.axd?type=rau"
205
205
  result, _ = await self.test_detector(base_url, webresource)
206
206
  if result:
207
- if "RadAsyncUpload handler is registered successfully" in result.text:
207
+ if "RadAsyncUpload handler is registered succesfully" in result.text:
208
208
  self.verbose("Detected Telerik instance (Telerik.Web.UI.WebResource.axd?type=rau)")
209
209
 
210
210
  probe_data = {
@@ -263,6 +263,11 @@ class telerik(BaseModule):
263
263
  str(root_tool_path / "testfile.txt"),
264
264
  result.url,
265
265
  ]
266
+
267
+ # Add proxy if set in the scan config
268
+ if self.scan.http_proxy:
269
+ command.append(self.scan.http_proxy)
270
+
266
271
  output = await self.run_process(command)
267
272
  description = f"[CVE-2017-11317] [{str(version)}] {webresource}"
268
273
  if "fileInfo" in output.stdout: