bbot 2.3.0.5538rc0__py3-none-any.whl → 2.3.0.5809rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (116) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +1 -1
  3. bbot/core/engine.py +1 -1
  4. bbot/core/event/base.py +7 -5
  5. bbot/core/helpers/async_helpers.py +7 -1
  6. bbot/core/helpers/depsinstaller/installer.py +7 -2
  7. bbot/core/helpers/diff.py +13 -4
  8. bbot/core/helpers/dns/brute.py +8 -2
  9. bbot/core/helpers/dns/engine.py +3 -2
  10. bbot/core/helpers/ratelimiter.py +8 -2
  11. bbot/core/helpers/regexes.py +5 -2
  12. bbot/core/helpers/web/engine.py +1 -1
  13. bbot/core/helpers/web/web.py +1 -1
  14. bbot/core/shared_deps.py +14 -0
  15. bbot/defaults.yml +44 -0
  16. bbot/modules/ajaxpro.py +64 -37
  17. bbot/modules/baddns.py +23 -15
  18. bbot/modules/baddns_direct.py +2 -2
  19. bbot/modules/badsecrets.py +2 -2
  20. bbot/modules/base.py +49 -15
  21. bbot/modules/censys.py +1 -1
  22. bbot/modules/deadly/dastardly.py +3 -3
  23. bbot/modules/deadly/nuclei.py +1 -1
  24. bbot/modules/dehashed.py +2 -2
  25. bbot/modules/dnsbrute_mutations.py +3 -1
  26. bbot/modules/docker_pull.py +1 -1
  27. bbot/modules/dockerhub.py +2 -2
  28. bbot/modules/dotnetnuke.py +12 -12
  29. bbot/modules/extractous.py +1 -1
  30. bbot/modules/ffuf_shortnames.py +107 -48
  31. bbot/modules/filedownload.py +6 -0
  32. bbot/modules/generic_ssrf.py +54 -40
  33. bbot/modules/github_codesearch.py +2 -2
  34. bbot/modules/github_org.py +16 -20
  35. bbot/modules/github_workflows.py +6 -2
  36. bbot/modules/gowitness.py +6 -0
  37. bbot/modules/hunt.py +1 -1
  38. bbot/modules/hunterio.py +1 -1
  39. bbot/modules/iis_shortnames.py +23 -7
  40. bbot/modules/internal/excavate.py +5 -3
  41. bbot/modules/internal/unarchive.py +82 -0
  42. bbot/modules/jadx.py +2 -2
  43. bbot/modules/output/asset_inventory.py +1 -1
  44. bbot/modules/output/base.py +1 -1
  45. bbot/modules/output/discord.py +2 -1
  46. bbot/modules/output/slack.py +2 -1
  47. bbot/modules/output/teams.py +10 -25
  48. bbot/modules/output/web_parameters.py +55 -0
  49. bbot/modules/paramminer_headers.py +15 -10
  50. bbot/modules/portfilter.py +41 -0
  51. bbot/modules/portscan.py +1 -22
  52. bbot/modules/postman.py +61 -43
  53. bbot/modules/postman_download.py +10 -147
  54. bbot/modules/sitedossier.py +1 -1
  55. bbot/modules/skymem.py +1 -1
  56. bbot/modules/templates/postman.py +163 -1
  57. bbot/modules/templates/subdomain_enum.py +1 -1
  58. bbot/modules/templates/webhook.py +17 -26
  59. bbot/modules/trufflehog.py +3 -3
  60. bbot/modules/wappalyzer.py +1 -1
  61. bbot/modules/zoomeye.py +1 -1
  62. bbot/presets/kitchen-sink.yml +1 -1
  63. bbot/presets/nuclei/nuclei-budget.yml +19 -0
  64. bbot/presets/nuclei/nuclei-intense.yml +28 -0
  65. bbot/presets/nuclei/nuclei-technology.yml +23 -0
  66. bbot/presets/nuclei/nuclei.yml +34 -0
  67. bbot/presets/spider-intense.yml +13 -0
  68. bbot/scanner/preset/args.py +29 -3
  69. bbot/scanner/preset/preset.py +43 -24
  70. bbot/scanner/scanner.py +17 -7
  71. bbot/test/bbot_fixtures.py +7 -7
  72. bbot/test/test_step_1/test_bloom_filter.py +2 -2
  73. bbot/test/test_step_1/test_cli.py +5 -5
  74. bbot/test/test_step_1/test_dns.py +33 -0
  75. bbot/test/test_step_1/test_events.py +15 -5
  76. bbot/test/test_step_1/test_modules_basic.py +21 -21
  77. bbot/test/test_step_1/test_presets.py +94 -4
  78. bbot/test/test_step_1/test_regexes.py +13 -13
  79. bbot/test/test_step_1/test_scan.py +78 -0
  80. bbot/test/test_step_1/test_web.py +4 -4
  81. bbot/test/test_step_2/module_tests/test_module_ajaxpro.py +43 -23
  82. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +3 -3
  83. bbot/test/test_step_2/module_tests/test_module_baddns.py +3 -3
  84. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +6 -6
  85. bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +3 -3
  86. bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +3 -3
  87. bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +3 -3
  88. bbot/test/test_step_2/module_tests/test_module_dnscaa.py +6 -6
  89. bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +9 -9
  90. bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +12 -12
  91. bbot/test/test_step_2/module_tests/test_module_excavate.py +15 -15
  92. bbot/test/test_step_2/module_tests/test_module_extractous.py +3 -3
  93. bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +8 -8
  94. bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py +3 -1
  95. bbot/test/test_step_2/module_tests/test_module_github_codesearch.py +3 -3
  96. bbot/test/test_step_2/module_tests/test_module_gowitness.py +9 -9
  97. bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py +1 -1
  98. bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +35 -1
  99. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +3 -3
  100. bbot/test/test_step_2/module_tests/test_module_portfilter.py +48 -0
  101. bbot/test/test_step_2/module_tests/test_module_postman.py +338 -3
  102. bbot/test/test_step_2/module_tests/test_module_postman_download.py +4 -161
  103. bbot/test/test_step_2/module_tests/test_module_securitytxt.py +12 -12
  104. bbot/test/test_step_2/module_tests/test_module_teams.py +10 -1
  105. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +1 -1
  106. bbot/test/test_step_2/module_tests/test_module_unarchive.py +229 -0
  107. bbot/test/test_step_2/module_tests/test_module_viewdns.py +3 -3
  108. bbot/test/test_step_2/module_tests/test_module_web_parameters.py +59 -0
  109. bbot/test/test_step_2/module_tests/test_module_websocket.py +5 -4
  110. {bbot-2.3.0.5538rc0.dist-info → bbot-2.3.0.5809rc0.dist-info}/METADATA +7 -7
  111. {bbot-2.3.0.5538rc0.dist-info → bbot-2.3.0.5809rc0.dist-info}/RECORD +115 -105
  112. {bbot-2.3.0.5538rc0.dist-info → bbot-2.3.0.5809rc0.dist-info}/WHEEL +1 -1
  113. bbot/wordlists/ffuf_shortname_candidates.txt +0 -107982
  114. /bbot/presets/{baddns-thorough.yml → baddns-intense.yml} +0 -0
  115. {bbot-2.3.0.5538rc0.dist-info → bbot-2.3.0.5809rc0.dist-info}/LICENSE +0 -0
  116. {bbot-2.3.0.5538rc0.dist-info → bbot-2.3.0.5809rc0.dist-info}/entry_points.txt +0 -0
@@ -14,8 +14,170 @@ class postman(BaseModule):
14
14
 
15
15
  headers = {
16
16
  "Content-Type": "application/json",
17
- "X-App-Version": "10.18.8-230926-0808",
17
+ "X-App-Version": "11.27.4-250109-2338",
18
18
  "X-Entity-Team-Id": "0",
19
19
  "Origin": "https://www.postman.com",
20
20
  "Referer": "https://www.postman.com/search?q=&scope=public&type=all",
21
21
  }
22
+ auth_required = True
23
+
24
+ async def setup(self):
25
+ await super().setup()
26
+ self.headers = {}
27
+ api_keys = set()
28
+ modules_config = self.scan.config.get("modules", {})
29
+ postman_modules = [m for m in modules_config if str(m).startswith("postman")]
30
+ for module_name in postman_modules:
31
+ module_config = modules_config.get(module_name, {})
32
+ api_key = module_config.get("api_key", "")
33
+ if isinstance(api_key, str):
34
+ api_key = [api_key]
35
+ for key in api_key:
36
+ key = key.strip()
37
+ if key:
38
+ api_keys.add(key)
39
+ if not api_keys:
40
+ if self.auth_required:
41
+ return None, "No API key set"
42
+ self.api_key = api_keys
43
+ if self.api_key:
44
+ try:
45
+ await self.ping()
46
+ self.hugesuccess("API is ready")
47
+ return True
48
+ except Exception as e:
49
+ self.trace()
50
+ return None, f"Error with API ({str(e).strip()})"
51
+ return True
52
+
53
+ def prepare_api_request(self, url, kwargs):
54
+ if self.api_key:
55
+ kwargs["headers"]["X-Api-Key"] = self.api_key
56
+ return url, kwargs
57
+
58
+ async def get_workspace_id(self, repo_url):
59
+ workspace_id = ""
60
+ profile = repo_url.split("/")[-2]
61
+ name = repo_url.split("/")[-1]
62
+ url = f"{self.base_url}/ws/proxy"
63
+ json = {
64
+ "service": "workspaces",
65
+ "method": "GET",
66
+ "path": f"/workspaces?handle={profile}&slug={name}",
67
+ }
68
+ r = await self.helpers.request(url, method="POST", json=json, headers=self.headers)
69
+ if r is None:
70
+ return workspace_id
71
+ status_code = getattr(r, "status_code", 0)
72
+ try:
73
+ json = r.json()
74
+ except Exception as e:
75
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
76
+ return workspace_id
77
+ data = json.get("data", [])
78
+ if len(data) == 1:
79
+ workspace_id = data[0]["id"]
80
+ return workspace_id
81
+
82
+ async def request_workspace(self, id):
83
+ data = {"workspace": {}, "environments": [], "collections": []}
84
+ workspace = await self.get_workspace(id)
85
+ if workspace:
86
+ # Main Workspace
87
+ name = workspace["name"]
88
+ data["workspace"] = workspace
89
+
90
+ # Workspace global variables
91
+ self.verbose(f"Searching globals for workspace {name}")
92
+ globals = await self.get_globals(id)
93
+ data["environments"].append(globals)
94
+
95
+ # Workspace Environments
96
+ workspace_environments = workspace.get("environments", [])
97
+ if workspace_environments:
98
+ self.verbose(f"Searching environments for workspace {name}")
99
+ for _ in workspace_environments:
100
+ environment_id = _["uid"]
101
+ environment = await self.get_environment(environment_id)
102
+ data["environments"].append(environment)
103
+
104
+ # Workspace Collections
105
+ workspace_collections = workspace.get("collections", [])
106
+ if workspace_collections:
107
+ self.verbose(f"Searching collections for workspace {name}")
108
+ for _ in workspace_collections:
109
+ collection_id = _["uid"]
110
+ collection = await self.get_collection(collection_id)
111
+ data["collections"].append(collection)
112
+ return data
113
+
114
+ async def get_workspace(self, workspace_id):
115
+ workspace = {}
116
+ workspace_url = f"{self.api_url}/workspaces/{workspace_id}"
117
+ r = await self.api_request(workspace_url)
118
+ if r is None:
119
+ return workspace
120
+ status_code = getattr(r, "status_code", 0)
121
+ try:
122
+ json = r.json()
123
+ except Exception as e:
124
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
125
+ return workspace
126
+ workspace = json.get("workspace", {})
127
+ return workspace
128
+
129
+ async def get_globals(self, workspace_id):
130
+ globals = {}
131
+ globals_url = f"{self.base_url}/workspace/{workspace_id}/globals"
132
+ r = await self.helpers.request(globals_url, headers=self.headers)
133
+ if r is None:
134
+ return globals
135
+ status_code = getattr(r, "status_code", 0)
136
+ try:
137
+ json = r.json()
138
+ except Exception as e:
139
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
140
+ return globals
141
+ globals = json.get("data", {})
142
+ return globals
143
+
144
+ async def get_environment(self, environment_id):
145
+ environment = {}
146
+ environment_url = f"{self.api_url}/environments/{environment_id}"
147
+ r = await self.api_request(environment_url)
148
+ if r is None:
149
+ return environment
150
+ status_code = getattr(r, "status_code", 0)
151
+ try:
152
+ json = r.json()
153
+ except Exception as e:
154
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
155
+ return environment
156
+ environment = json.get("environment", {})
157
+ return environment
158
+
159
+ async def get_collection(self, collection_id):
160
+ collection = {}
161
+ collection_url = f"{self.api_url}/collections/{collection_id}"
162
+ r = await self.api_request(collection_url)
163
+ if r is None:
164
+ return collection
165
+ status_code = getattr(r, "status_code", 0)
166
+ try:
167
+ json = r.json()
168
+ except Exception as e:
169
+ self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
170
+ return collection
171
+ collection = json.get("collection", {})
172
+ return collection
173
+
174
+ async def validate_workspace(self, workspace, environments, collections):
175
+ name = workspace.get("name", "")
176
+ full_wks = str([workspace, environments, collections])
177
+ in_scope_hosts = await self.scan.extract_in_scope_hostnames(full_wks)
178
+ if in_scope_hosts:
179
+ self.verbose(
180
+ f'Found in-scope hostname(s): "{in_scope_hosts}" in workspace {name}, it appears to be in-scope'
181
+ )
182
+ return True
183
+ return False
@@ -150,7 +150,7 @@ class subdomain_enum(BaseModule):
150
150
  break
151
151
  yield subdomains
152
152
  finally:
153
- agen.aclose()
153
+ await agen.aclose()
154
154
 
155
155
  async def _is_wildcard(self, query):
156
156
  rdtypes = ("A", "AAAA", "CNAME")
@@ -13,41 +13,32 @@ class WebhookOutputModule(BaseOutputModule):
13
13
  content_key = "content"
14
14
  vuln_severities = ["UNKNOWN", "LOW", "MEDIUM", "HIGH", "CRITICAL"]
15
15
 
16
+ # abort module after 10 failed requests (not including retries)
17
+ _api_failure_abort_threshold = 10
18
+ # retry each request up to 10 times, respecting the Retry-After header
19
+ _api_retries = 10
20
+
16
21
  async def setup(self):
22
+ self._api_retries = self.config.get("retries", 10)
17
23
  self.webhook_url = self.config.get("webhook_url", "")
18
24
  self.min_severity = self.config.get("min_severity", "LOW").strip().upper()
19
- assert (
20
- self.min_severity in self.vuln_severities
21
- ), f"min_severity must be one of the following: {','.join(self.vuln_severities)}"
25
+ assert self.min_severity in self.vuln_severities, (
26
+ f"min_severity must be one of the following: {','.join(self.vuln_severities)}"
27
+ )
22
28
  self.allowed_severities = self.vuln_severities[self.vuln_severities.index(self.min_severity) :]
23
29
  if not self.webhook_url:
24
30
  self.warning("Must set Webhook URL")
25
31
  return False
26
- return True
32
+ return await super().setup()
27
33
 
28
34
  async def handle_event(self, event):
29
- while 1:
30
- message = self.format_message(event)
31
- data = {self.content_key: message}
32
-
33
- response = await self.helpers.request(
34
- url=self.webhook_url,
35
- method="POST",
36
- json=data,
37
- )
38
- status_code = getattr(response, "status_code", 0)
39
- if self.evaluate_response(response):
40
- break
41
- else:
42
- response_data = getattr(response, "text", "")
43
- try:
44
- retry_after = response.json().get("retry_after", 1)
45
- except Exception:
46
- retry_after = 1
47
- self.verbose(
48
- f"Error sending {event}: status code {status_code}, response: {response_data}, retrying in {retry_after} seconds"
49
- )
50
- await self.helpers.sleep(retry_after)
35
+ message = self.format_message(event)
36
+ data = {self.content_key: message}
37
+ await self.api_request(
38
+ url=self.webhook_url,
39
+ method="POST",
40
+ json=data,
41
+ )
51
42
 
52
43
  def get_watched_events(self):
53
44
  if self._watched_events is None:
@@ -13,7 +13,7 @@ class trufflehog(BaseModule):
13
13
  }
14
14
 
15
15
  options = {
16
- "version": "3.88.0",
16
+ "version": "3.88.2",
17
17
  "config": "",
18
18
  "only_verified": True,
19
19
  "concurrency": 8,
@@ -76,8 +76,8 @@ class trufflehog(BaseModule):
76
76
  else:
77
77
  return False, "Deleted forks is not enabled"
78
78
  else:
79
- if "parsed-folder" in event.tags:
80
- return False, "Not accepting parsed-folder events"
79
+ if "unarchived-folder" in event.tags:
80
+ return False, "Not accepting unarchived-folder events"
81
81
  return True
82
82
 
83
83
  async def handle_event(self, event):
@@ -19,7 +19,7 @@ class wappalyzer(BaseModule):
19
19
  "created_date": "2022-04-15",
20
20
  "author": "@liquidsec",
21
21
  }
22
- deps_pip = ["python-Wappalyzer~=0.3.1", "aiohttp~=3.9.0b0"]
22
+ deps_pip = ["python-Wappalyzer~=0.3.1", "aiohttp~=3.9.0b0", "setuptools"]
23
23
  # accept all events regardless of scope distance
24
24
  scope_distance_modifier = None
25
25
  _module_threads = 5
bbot/modules/zoomeye.py CHANGED
@@ -67,7 +67,7 @@ class zoomeye(subdomain_enum_apikey):
67
67
  break
68
68
  i += 1
69
69
  finally:
70
- agen.aclose()
70
+ await agen.aclose()
71
71
  return results
72
72
 
73
73
  async def parse_results(self, r):
@@ -10,7 +10,7 @@ include:
10
10
  - paramminer
11
11
  - dirbust-light
12
12
  - web-screenshots
13
- - baddns-thorough
13
+ - baddns-intense
14
14
 
15
15
  config:
16
16
  modules:
@@ -0,0 +1,19 @@
1
+ description: Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests
2
+
3
+ modules:
4
+ - httpx
5
+ - nuclei
6
+ - portfilter
7
+
8
+ config:
9
+ modules:
10
+ nuclei:
11
+ mode: budget
12
+ budget: 10
13
+ directory_only: true # Do not run nuclei on individual non-directory URLs
14
+
15
+ conditions:
16
+ - |
17
+ {% if config.web.spider_distance != 0 %}
18
+ {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }}
19
+ {% endif %}
@@ -0,0 +1,28 @@
1
+ description: Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules.
2
+
3
+ modules:
4
+ - httpx
5
+ - nuclei
6
+ - robots
7
+ - urlscan
8
+ - portfilter
9
+ - wayback
10
+
11
+ config:
12
+ modules:
13
+ nuclei:
14
+ directory_only: False # Will run nuclei on ALL discovered URLs - Be careful!
15
+ wayback:
16
+ urls: true
17
+
18
+ conditions:
19
+ - |
20
+ {% if config.web.spider_distance == 0 and config.modules.nuclei.directory_only == False %}
21
+ {{ warn("The 'nuclei-intense' preset turns the 'directory_only' limitation off on the nuclei module. To make the best use of this, you may want to enable spidering with 'spider' or 'spider-intense' preset.") }}
22
+ {% endif %}
23
+
24
+
25
+ # Example for also running a dirbust
26
+
27
+ #include:
28
+ # - dirbust-light
@@ -0,0 +1,23 @@
1
+ description: Run nuclei scans against all discovered targets, running templates which match discovered technologies
2
+
3
+ modules:
4
+ - httpx
5
+ - nuclei
6
+ - portfilter
7
+
8
+ config:
9
+ modules:
10
+ nuclei:
11
+ mode: technology
12
+ directory_only: True # Do not run nuclei on individual non-directory URLs. This is less unsafe to disable with technology mode.
13
+
14
+ conditions:
15
+ - |
16
+ {% if config.web.spider_distance != 0 %}
17
+ {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }}
18
+ {% endif %}
19
+
20
+ # Example for also running a dirbust
21
+
22
+ #include:
23
+ # - dirbust-light
@@ -0,0 +1,34 @@
1
+ description: Run nuclei scans against all discovered targets
2
+
3
+ modules:
4
+ - httpx
5
+ - nuclei
6
+ - portfilter
7
+
8
+ config:
9
+ modules:
10
+ nuclei:
11
+ directory_only: True # Do not run nuclei on individual non-directory URLs
12
+
13
+
14
+ conditions:
15
+ - |
16
+ {% if config.web.spider_distance != 0 %}
17
+ {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }}
18
+ {% endif %}
19
+
20
+
21
+
22
+ # Additional Examples:
23
+
24
+ # Slowing Down Scan
25
+
26
+ #config:
27
+ # modules:
28
+ # nuclei:
29
+ # ratelimit: 10
30
+ # concurrency: 5
31
+
32
+
33
+
34
+
@@ -0,0 +1,13 @@
1
+ description: Recursive web spider with more aggressive settings
2
+
3
+ include:
4
+ - spider
5
+
6
+ config:
7
+ web:
8
+ # how many links to follow in a row
9
+ spider_distance: 4
10
+ # don't follow links whose directory depth is higher than 6
11
+ spider_depth: 6
12
+ # maximum number of links to follow per page
13
+ spider_links_per_page: 50
@@ -133,6 +133,8 @@ class BBOTArgs:
133
133
  )
134
134
  if self.parsed.event_types:
135
135
  args_preset.core.merge_custom({"modules": {"stdout": {"event_types": self.parsed.event_types}}})
136
+ if self.parsed.exclude_cdn:
137
+ args_preset.explicit_scan_modules.add("portfilter")
136
138
 
137
139
  # dependencies
138
140
  deps_config = args_preset.core.custom_config.get("deps", {})
@@ -166,6 +168,21 @@ class BBOTArgs:
166
168
  {"modules": {"excavate": {"custom_yara_rules": self.parsed.custom_yara_rules}}}
167
169
  )
168
170
 
171
+ # Check if both user_agent and user_agent_suffix are set. If so combine them and merge into the config
172
+ if self.parsed.user_agent and self.parsed.user_agent_suffix:
173
+ modified_user_agent = f"{self.parsed.user_agent} {self.parsed.user_agent_suffix}"
174
+ args_preset.core.merge_custom({"web": {"user_agent": modified_user_agent}})
175
+
176
+ # If only user_agent_suffix is set, retrieve the existing user_agent from the merged config and append the suffix
177
+ elif self.parsed.user_agent_suffix:
178
+ existing_user_agent = args_preset.core.config.get("web", {}).get("user_agent", "")
179
+ modified_user_agent = f"{existing_user_agent} {self.parsed.user_agent_suffix}"
180
+ args_preset.core.merge_custom({"web": {"user_agent": modified_user_agent}})
181
+
182
+ # If only user_agent is set, merge it directly
183
+ elif self.parsed.user_agent:
184
+ args_preset.core.merge_custom({"web": {"user_agent": self.parsed.user_agent}})
185
+
169
186
  # CLI config options (dot-syntax)
170
187
  for config_arg in self.parsed.config:
171
188
  try:
@@ -232,7 +249,7 @@ class BBOTArgs:
232
249
  "--modules",
233
250
  nargs="+",
234
251
  default=[],
235
- help=f'Modules to enable. Choices: {",".join(sorted(self.preset.module_loader.scan_module_choices))}',
252
+ help=f"Modules to enable. Choices: {','.join(sorted(self.preset.module_loader.scan_module_choices))}",
236
253
  metavar="MODULE",
237
254
  )
238
255
  modules.add_argument("-l", "--list-modules", action="store_true", help="List available modules.")
@@ -247,7 +264,7 @@ class BBOTArgs:
247
264
  "--flags",
248
265
  nargs="+",
249
266
  default=[],
250
- help=f'Enable modules by flag. Choices: {",".join(sorted(self.preset.module_loader.flag_choices))}',
267
+ help=f"Enable modules by flag. Choices: {','.join(sorted(self.preset.module_loader.flag_choices))}",
251
268
  metavar="FLAG",
252
269
  )
253
270
  modules.add_argument("-lf", "--list-flags", action="store_true", help="List available flags.")
@@ -309,13 +326,19 @@ class BBOTArgs:
309
326
  "--output-modules",
310
327
  nargs="+",
311
328
  default=[],
312
- help=f'Output module(s). Choices: {",".join(sorted(self.preset.module_loader.output_module_choices))}',
329
+ help=f"Output module(s). Choices: {','.join(sorted(self.preset.module_loader.output_module_choices))}",
313
330
  metavar="MODULE",
314
331
  )
315
332
  output.add_argument("-lo", "--list-output-modules", action="store_true", help="List available output modules")
316
333
  output.add_argument("--json", "-j", action="store_true", help="Output scan data in JSON format")
317
334
  output.add_argument("--brief", "-br", action="store_true", help="Output only the data itself")
318
335
  output.add_argument("--event-types", nargs="+", default=[], help="Choose which event types to display")
336
+ output.add_argument(
337
+ "--exclude-cdn",
338
+ "-ec",
339
+ action="store_true",
340
+ help="Filter out unwanted open ports on CDNs/WAFs (80,443 only)",
341
+ )
319
342
 
320
343
  deps = p.add_argument_group(
321
344
  title="Module dependencies", description="Control how modules install their dependencies"
@@ -340,6 +363,9 @@ class BBOTArgs:
340
363
  help="List of custom headers as key value pairs (header=value).",
341
364
  )
342
365
  misc.add_argument("--custom-yara-rules", "-cy", help="Add custom yara rules to excavate")
366
+
367
+ misc.add_argument("--user-agent", "-ua", help="Set the user-agent for all HTTP requests")
368
+ misc.add_argument("--user-agent-suffix", "-uas", help=argparse.SUPPRESS, metavar="SUFFIX", default=None)
343
369
  return p
344
370
 
345
371
  def sanitize_args(self):
@@ -24,7 +24,39 @@ _preset_cache = {}
24
24
  DEFAULT_PRESETS = None
25
25
 
26
26
 
27
- class Preset:
27
+ class BasePreset(type):
28
+ def __call__(cls, *args, include=None, presets=None, name=None, description=None, _exclude=None, **kwargs):
29
+ """
30
+ Handles loading of "included" presets, while preserving the proper load order
31
+
32
+ Overriding __call__() allows us to reuse the logic from .merge() without duplicating functionality in __init__().
33
+ """
34
+ include_preset = None
35
+
36
+ # "presets" is alias to "include"
37
+ if presets and include:
38
+ raise ValueError(
39
+ 'Cannot use both "presets" and "include" args at the same time (presets is an alias to include). Please pick one or the other :)'
40
+ )
41
+ if presets and not include:
42
+ include = presets
43
+ # include other presets
44
+ if include and not isinstance(include, (list, tuple, set)):
45
+ include = [include]
46
+
47
+ main_preset = type.__call__(cls, *args, name=name, description=description, _exclude=_exclude, **kwargs)
48
+
49
+ if include:
50
+ include_preset = type.__call__(cls, name=name, description=description, _exclude=_exclude)
51
+ for included_preset in include:
52
+ include_preset.include_preset(included_preset)
53
+ include_preset.merge(main_preset)
54
+ return include_preset
55
+
56
+ return main_preset
57
+
58
+
59
+ class Preset(metaclass=BasePreset):
28
60
  """
29
61
  A preset is the central config for a BBOT scan. It contains everything a scan needs to run --
30
62
  targets, modules, flags, config options like API keys, etc.
@@ -94,12 +126,10 @@ class Preset:
94
126
  exclude_flags=None,
95
127
  config=None,
96
128
  module_dirs=None,
97
- include=None,
98
- presets=None,
99
129
  output_dir=None,
100
- scan_name=None,
101
130
  name=None,
102
131
  description=None,
132
+ scan_name=None,
103
133
  conditions=None,
104
134
  force_start=False,
105
135
  verbose=False,
@@ -238,20 +268,6 @@ class Preset:
238
268
 
239
269
  self._target = None
240
270
 
241
- # "presets" is alias to "include"
242
- if presets and include:
243
- raise ValueError(
244
- 'Cannot use both "presets" and "include" args at the same time (presets is an alias to include). Please pick one or the other :)'
245
- )
246
- if presets and not include:
247
- include = presets
248
- # include other presets
249
- if include and not isinstance(include, (list, tuple, set)):
250
- include = [include]
251
- if include:
252
- for included_preset in include:
253
- self.include_preset(included_preset)
254
-
255
271
  # we don't fill self.modules yet (that happens in .bake())
256
272
  self.explicit_scan_modules.update(set(modules))
257
273
  self.explicit_output_modules.update(set(output_modules))
@@ -260,6 +276,8 @@ class Preset:
260
276
  self.exclude_flags.update(set(exclude_flags))
261
277
  self.require_flags.update(set(require_flags))
262
278
 
279
+ # log.critical(f"{self.name}: verbose: {self.verbose}, debug: {self.debug}, silent: {self.silent}")
280
+
263
281
  @property
264
282
  def bbot_home(self):
265
283
  return Path(self.config.get("home", "~/.bbot")).expanduser().resolve()
@@ -332,6 +350,7 @@ class Preset:
332
350
  ['portscan', 'sslcert']
333
351
  """
334
352
  self.log_debug(f'Merging preset "{other.name}" into "{self.name}"')
353
+
335
354
  # config
336
355
  self.core.merge_custom(other.core.custom_config)
337
356
  self.module_loader.core = self.core
@@ -671,11 +690,10 @@ class Preset:
671
690
  >>> preset.include_preset("/home/user/my_preset.yml")
672
691
  """
673
692
  self.log_debug(f'Including preset "{filename}"')
674
- preset_filename = PRESET_PATH.find(filename)
675
- preset_from_yaml = self.from_yaml_file(preset_filename, _exclude=self._preset_files_loaded)
693
+ preset_from_yaml = self.from_yaml_file(filename, _exclude=self._preset_files_loaded)
676
694
  if preset_from_yaml is not False:
677
695
  self.merge(preset_from_yaml)
678
- self._preset_files_loaded.add(preset_filename)
696
+ self._preset_files_loaded.add(preset_from_yaml.filename)
679
697
 
680
698
  @classmethod
681
699
  def from_yaml_file(cls, filename, _exclude=None, _log=False):
@@ -687,7 +705,7 @@ class Preset:
687
705
  Examples:
688
706
  >>> preset = Preset.from_yaml_file("/home/user/my_preset.yml")
689
707
  """
690
- filename = Path(filename).resolve()
708
+ filename = PRESET_PATH.find(filename)
691
709
  try:
692
710
  return _preset_cache[filename]
693
711
  except KeyError:
@@ -707,13 +725,14 @@ class Preset:
707
725
  omegaconf.OmegaConf.create(yaml_str), name=filename.stem, _exclude=_exclude, _log=_log
708
726
  )
709
727
  preset._yaml_str = yaml_str
728
+ preset.filename = filename
710
729
  _preset_cache[filename] = preset
711
730
  return preset
712
731
 
713
732
  @classmethod
714
733
  def from_yaml_string(cls, yaml_preset):
715
734
  """
716
- Create a preset from a YAML file. If the full path is not specified, BBOT will look in all the usual places for it.
735
+ Create a preset from a YAML string.
717
736
 
718
737
  The file extension is optional.
719
738
 
@@ -870,7 +889,7 @@ class Preset:
870
889
  if f in self.exclude_flags:
871
890
  return False, f'it has excluded flag, "{f}"', preloaded
872
891
  if self.require_flags and not all(f in module_flags for f in self.require_flags):
873
- return False, f'it doesn\'t have the required flags ({",".join(self.require_flags)})', preloaded
892
+ return False, f"it doesn't have the required flags ({','.join(self.require_flags)})", preloaded
874
893
 
875
894
  return True, "", preloaded
876
895