bbot 2.3.0.5376rc0__py3-none-any.whl → 2.3.0.5382rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bbot might be problematic. Click here for more details.

Files changed (143) hide show
  1. bbot/__init__.py +1 -1
  2. bbot/cli.py +2 -2
  3. bbot/core/config/logger.py +1 -1
  4. bbot/core/core.py +1 -1
  5. bbot/core/event/base.py +13 -13
  6. bbot/core/helpers/command.py +4 -4
  7. bbot/core/helpers/depsinstaller/installer.py +5 -5
  8. bbot/core/helpers/diff.py +7 -7
  9. bbot/core/helpers/dns/brute.py +1 -1
  10. bbot/core/helpers/dns/dns.py +1 -1
  11. bbot/core/helpers/dns/engine.py +4 -4
  12. bbot/core/helpers/files.py +1 -1
  13. bbot/core/helpers/helper.py +3 -1
  14. bbot/core/helpers/interactsh.py +3 -3
  15. bbot/core/helpers/misc.py +11 -11
  16. bbot/core/helpers/regex.py +1 -1
  17. bbot/core/helpers/regexes.py +3 -3
  18. bbot/core/helpers/validators.py +1 -1
  19. bbot/core/helpers/web/client.py +1 -1
  20. bbot/core/helpers/web/engine.py +1 -1
  21. bbot/core/helpers/web/web.py +2 -2
  22. bbot/core/helpers/wordcloud.py +5 -5
  23. bbot/core/modules.py +21 -21
  24. bbot/modules/azure_tenant.py +2 -2
  25. bbot/modules/base.py +16 -16
  26. bbot/modules/bypass403.py +5 -5
  27. bbot/modules/c99.py +1 -1
  28. bbot/modules/columbus.py +1 -1
  29. bbot/modules/deadly/ffuf.py +8 -8
  30. bbot/modules/deadly/nuclei.py +1 -1
  31. bbot/modules/deadly/vhost.py +3 -3
  32. bbot/modules/dnsbimi.py +1 -1
  33. bbot/modules/dnsdumpster.py +2 -2
  34. bbot/modules/dockerhub.py +1 -1
  35. bbot/modules/extractous.py +1 -1
  36. bbot/modules/filedownload.py +1 -1
  37. bbot/modules/generic_ssrf.py +3 -3
  38. bbot/modules/github_workflows.py +1 -1
  39. bbot/modules/gowitness.py +7 -7
  40. bbot/modules/host_header.py +5 -5
  41. bbot/modules/httpx.py +1 -1
  42. bbot/modules/iis_shortnames.py +6 -6
  43. bbot/modules/internal/cloudcheck.py +5 -5
  44. bbot/modules/internal/dnsresolve.py +7 -7
  45. bbot/modules/internal/excavate.py +5 -5
  46. bbot/modules/internal/speculate.py +4 -4
  47. bbot/modules/ipneighbor.py +1 -1
  48. bbot/modules/jadx.py +1 -1
  49. bbot/modules/newsletters.py +2 -2
  50. bbot/modules/output/asset_inventory.py +6 -6
  51. bbot/modules/output/base.py +1 -1
  52. bbot/modules/output/csv.py +1 -1
  53. bbot/modules/output/stdout.py +2 -2
  54. bbot/modules/paramminer_headers.py +3 -3
  55. bbot/modules/portscan.py +3 -3
  56. bbot/modules/report/asn.py +11 -11
  57. bbot/modules/robots.py +3 -3
  58. bbot/modules/securitytxt.py +1 -1
  59. bbot/modules/sitedossier.py +1 -1
  60. bbot/modules/social.py +1 -1
  61. bbot/modules/subdomainradar.py +1 -1
  62. bbot/modules/telerik.py +7 -7
  63. bbot/modules/templates/bucket.py +1 -1
  64. bbot/modules/templates/github.py +1 -1
  65. bbot/modules/templates/shodan.py +1 -1
  66. bbot/modules/templates/subdomain_enum.py +1 -1
  67. bbot/modules/templates/webhook.py +1 -1
  68. bbot/modules/trufflehog.py +1 -1
  69. bbot/modules/url_manipulation.py +3 -3
  70. bbot/modules/urlscan.py +1 -1
  71. bbot/modules/viewdns.py +1 -1
  72. bbot/modules/wafw00f.py +1 -1
  73. bbot/scanner/preset/args.py +10 -10
  74. bbot/scanner/preset/preset.py +9 -9
  75. bbot/scanner/scanner.py +17 -17
  76. bbot/scanner/target.py +1 -1
  77. bbot/scripts/docs.py +1 -1
  78. bbot/test/bbot_fixtures.py +1 -1
  79. bbot/test/conftest.py +1 -1
  80. bbot/test/run_tests.sh +4 -4
  81. bbot/test/test_step_1/test_bbot_fastapi.py +2 -2
  82. bbot/test/test_step_1/test_cli.py +56 -56
  83. bbot/test/test_step_1/test_dns.py +15 -15
  84. bbot/test/test_step_1/test_engine.py +17 -17
  85. bbot/test/test_step_1/test_events.py +22 -22
  86. bbot/test/test_step_1/test_helpers.py +26 -26
  87. bbot/test/test_step_1/test_manager_scope_accuracy.py +306 -306
  88. bbot/test/test_step_1/test_modules_basic.py +52 -53
  89. bbot/test/test_step_1/test_presets.py +81 -81
  90. bbot/test/test_step_1/test_regexes.py +5 -5
  91. bbot/test/test_step_1/test_scan.py +4 -4
  92. bbot/test/test_step_1/test_target.py +25 -25
  93. bbot/test/test_step_1/test_web.py +5 -5
  94. bbot/test/test_step_2/module_tests/base.py +6 -6
  95. bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
  96. bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
  97. bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
  98. bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +2 -4
  99. bbot/test/test_step_2/module_tests/test_module_bevigil.py +4 -4
  100. bbot/test/test_step_2/module_tests/test_module_binaryedge.py +2 -2
  101. bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
  102. bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
  103. bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
  104. bbot/test/test_step_2/module_tests/test_module_c99.py +9 -9
  105. bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
  106. bbot/test/test_step_2/module_tests/test_module_credshed.py +2 -2
  107. bbot/test/test_step_2/module_tests/test_module_dehashed.py +1 -1
  108. bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
  109. bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +8 -8
  110. bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +2 -2
  111. bbot/test/test_step_2/module_tests/test_module_excavate.py +10 -10
  112. bbot/test/test_step_2/module_tests/test_module_extractous.py +9 -9
  113. bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
  114. bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
  115. bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
  116. bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
  117. bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
  118. bbot/test/test_step_2/module_tests/test_module_httpx.py +7 -7
  119. bbot/test/test_step_2/module_tests/test_module_leakix.py +2 -2
  120. bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
  121. bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
  122. bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
  123. bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
  124. bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
  125. bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
  126. bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -1
  127. bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -2
  128. bbot/test/test_step_2/module_tests/test_module_portscan.py +3 -3
  129. bbot/test/test_step_2/module_tests/test_module_postgres.py +1 -1
  130. bbot/test/test_step_2/module_tests/test_module_rapiddns.py +9 -9
  131. bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
  132. bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
  133. bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
  134. bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
  135. bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
  136. bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
  137. bbot/test/test_step_2/module_tests/test_module_trufflehog.py +2 -2
  138. bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
  139. {bbot-2.3.0.5376rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/METADATA +2 -2
  140. {bbot-2.3.0.5376rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/RECORD +143 -143
  141. {bbot-2.3.0.5376rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/LICENSE +0 -0
  142. {bbot-2.3.0.5376rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/WHEEL +0 -0
  143. {bbot-2.3.0.5376rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/entry_points.txt +0 -0
@@ -39,7 +39,7 @@ class iis_shortnames(BaseModule):
39
39
  test_url = f"{target}*~1*/a.aspx"
40
40
 
41
41
  for method in ["GET", "POST", "OPTIONS", "DEBUG", "HEAD", "TRACE"]:
42
- kwargs = dict(method=method, allow_redirects=False, timeout=10)
42
+ kwargs = {"method": method, "allow_redirects": False, "timeout": 10}
43
43
  confirmations = 0
44
44
  iterations = 5 # one failed detection is tolerated, as long as its not the first run
45
45
  while iterations > 0:
@@ -128,7 +128,7 @@ class iis_shortnames(BaseModule):
128
128
  suffix = "/a.aspx"
129
129
 
130
130
  urls_and_kwargs = []
131
- kwargs = dict(method=method, allow_redirects=False, retries=2, timeout=10)
131
+ kwargs = {"method": method, "allow_redirects": False, "retries": 2, "timeout": 10}
132
132
  for c in valid_chars:
133
133
  for file_part in ("stem", "ext"):
134
134
  payload = encode_all(f"*{c}*~1*")
@@ -160,7 +160,7 @@ class iis_shortnames(BaseModule):
160
160
  url_hint_list = []
161
161
  found_results = False
162
162
 
163
- cl = ext_char_list if extension_mode == True else char_list
163
+ cl = ext_char_list if extension_mode is True else char_list
164
164
 
165
165
  urls_and_kwargs = []
166
166
 
@@ -169,7 +169,7 @@ class iis_shortnames(BaseModule):
169
169
  wildcard = "*" if extension_mode else "*~1*"
170
170
  payload = encode_all(f"{prefix}{c}{wildcard}")
171
171
  url = f"{target}{payload}{suffix}"
172
- kwargs = dict(method=method)
172
+ kwargs = {"method": method}
173
173
  urls_and_kwargs.append((url, kwargs, c))
174
174
 
175
175
  async for url, kwargs, c, response in self.helpers.request_custom_batch(urls_and_kwargs):
@@ -209,7 +209,7 @@ class iis_shortnames(BaseModule):
209
209
  extension_mode,
210
210
  node_count=node_count,
211
211
  )
212
- if len(prefix) > 0 and found_results == False:
212
+ if len(prefix) > 0 and found_results is False:
213
213
  url_hint_list.append(f"{prefix}")
214
214
  self.verbose(f"Found new (possibly partial) URL_HINT: {prefix} from node {target}")
215
215
  return url_hint_list
@@ -234,7 +234,7 @@ class iis_shortnames(BaseModule):
234
234
  {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description},
235
235
  "VULNERABILITY",
236
236
  event,
237
- context=f"{{module}} detected low {{event.type}}: IIS shortname enumeration",
237
+ context="{module} detected low {event.type}: IIS shortname enumeration",
238
238
  )
239
239
  if not self.config.get("detect_only"):
240
240
  for detection in detections:
@@ -15,7 +15,7 @@ class CloudCheck(BaseInterceptModule):
15
15
 
16
16
  def make_dummy_modules(self):
17
17
  self.dummy_modules = {}
18
- for provider_name, provider in self.helpers.cloud.providers.items():
18
+ for provider_name in self.helpers.cloud.providers.keys():
19
19
  module = self.scan._make_dummy_module(f"cloud_{provider_name}", _type="scan")
20
20
  module.default_discovery_context = "{module} derived {event.type}: {event.host}"
21
21
  self.dummy_modules[provider_name] = module
@@ -56,9 +56,9 @@ class CloudCheck(BaseInterceptModule):
56
56
  # loop through each provider
57
57
  for provider in self.helpers.cloud.providers.values():
58
58
  provider_name = provider.name.lower()
59
- base_kwargs = dict(
60
- parent=event, tags=[f"{provider.provider_type}-{provider_name}"], _provider=provider_name
61
- )
59
+ base_kwargs = {
60
+ "parent": event, "tags": [f"{provider.provider_type}-{provider_name}"], "_provider": provider_name
61
+ }
62
62
  # loop through the provider's regex signatures, if any
63
63
  for event_type, sigs in provider.signatures.items():
64
64
  if event_type != "STORAGE_BUCKET":
@@ -74,7 +74,7 @@ class CloudCheck(BaseInterceptModule):
74
74
  if match:
75
75
  matches.append(match.groups())
76
76
  for match in matches:
77
- if not match in found:
77
+ if match not in found:
78
78
  found.add(match)
79
79
 
80
80
  _kwargs = dict(base_kwargs)
@@ -131,9 +131,9 @@ class DNSResolve(BaseInterceptModule):
131
131
  event.host, rdtypes=rdtypes, raw_dns_records=event.raw_dns_records
132
132
  )
133
133
  for rdtype, (is_wildcard, wildcard_host) in wildcard_rdtypes.items():
134
- if is_wildcard == False:
134
+ if is_wildcard is False:
135
135
  continue
136
- elif is_wildcard == True:
136
+ elif is_wildcard is True:
137
137
  event.add_tag("wildcard")
138
138
  wildcard_tag = "wildcard"
139
139
  else:
@@ -142,16 +142,16 @@ class DNSResolve(BaseInterceptModule):
142
142
  event.add_tag(f"{rdtype}-{wildcard_tag}")
143
143
 
144
144
  # wildcard event modification (www.evilcorp.com --> _wildcard.evilcorp.com)
145
- if wildcard_rdtypes and not "target" in event.tags:
145
+ if wildcard_rdtypes and "target" not in event.tags:
146
146
  # these are the rdtypes that have wildcards
147
147
  wildcard_rdtypes_set = set(wildcard_rdtypes)
148
148
  # consider the event a full wildcard if all its records are wildcards
149
149
  event_is_wildcard = False
150
150
  if wildcard_rdtypes_set:
151
- event_is_wildcard = all(r[0] == True for r in wildcard_rdtypes.values())
151
+ event_is_wildcard = all(r[0] is True for r in wildcard_rdtypes.values())
152
152
 
153
153
  if event_is_wildcard:
154
- if event.type in ("DNS_NAME",) and not "_wildcard" in event.data.split("."):
154
+ if event.type in ("DNS_NAME",) and "_wildcard" not in event.data.split("."):
155
155
  wildcard_parent = self.helpers.parent_domain(event.host)
156
156
  for rdtype, (_is_wildcard, _parent_domain) in wildcard_rdtypes.items():
157
157
  if _is_wildcard:
@@ -273,7 +273,7 @@ class DNSResolve(BaseInterceptModule):
273
273
  # tag event with errors
274
274
  for rdtype, errors in dns_errors.items():
275
275
  # only consider it an error if there weren't any results for that rdtype
276
- if errors and not rdtype in event.dns_children:
276
+ if errors and rdtype not in event.dns_children:
277
277
  event.add_tag(f"{rdtype}-error")
278
278
 
279
279
  def get_dns_parent(self, event):
@@ -307,7 +307,7 @@ class DNSResolve(BaseInterceptModule):
307
307
  def emit_raw_records(self):
308
308
  if self._emit_raw_records is None:
309
309
  watching_raw_records = any(
310
- ["RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values()]
310
+ "RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values()
311
311
  )
312
312
  omitted_event_types = self.scan.config.get("omit_event_types", [])
313
313
  omit_raw_records = "RAW_DNS_RECORD" in omitted_event_types
@@ -153,7 +153,7 @@ class ExcavateRule:
153
153
  yara_results = {}
154
154
  for h in r.strings:
155
155
  yara_results[h.identifier.lstrip("$")] = sorted(
156
- set([i.matched_data.decode("utf-8", errors="ignore") for i in h.instances])
156
+ {i.matched_data.decode("utf-8", errors="ignore") for i in h.instances}
157
157
  )
158
158
  await self.process(yara_results, event, yara_rule_settings, discovery_context)
159
159
 
@@ -180,7 +180,7 @@ class ExcavateRule:
180
180
  Returns:
181
181
  None
182
182
  """
183
- for identifier, results in yara_results.items():
183
+ for results in yara_results.values():
184
184
  for result in results:
185
185
  event_data = {"description": f"{discovery_context} {yara_rule_settings.description}"}
186
186
  if yara_rule_settings.emit_match:
@@ -314,7 +314,7 @@ class excavate(BaseInternalModule, BaseInterceptModule):
314
314
 
315
315
  _module_threads = 8
316
316
 
317
- parameter_blacklist = set(
317
+ parameter_blacklist = {
318
318
  p.lower()
319
319
  for p in [
320
320
  "__VIEWSTATE",
@@ -329,7 +329,7 @@ class excavate(BaseInternalModule, BaseInterceptModule):
329
329
  "JSESSIONID",
330
330
  "PHPSESSID",
331
331
  ]
332
- )
332
+ }
333
333
 
334
334
  yara_rule_name_regex = re.compile(r"rule\s(\w+)\s{")
335
335
  yara_rule_regex = re.compile(r"(?s)((?:rule\s+\w+\s*{[^{}]*(?:{[^{}]*}[^{}]*)*[^{}]*(?:/\S*?}[^/]*?/)*)*})")
@@ -634,7 +634,7 @@ class excavate(BaseInternalModule, BaseInterceptModule):
634
634
  scheme_blacklist = ["javascript", "mailto", "tel", "data", "vbscript", "about", "file"]
635
635
 
636
636
  async def process(self, yara_results, event, yara_rule_settings, discovery_context):
637
- for identifier, results in yara_results.items():
637
+ for results in yara_results.values():
638
638
  for url_str in results:
639
639
  scheme = url_str.split("://")[0]
640
640
  if scheme in self.scheme_blacklist:
@@ -45,10 +45,10 @@ class speculate(BaseInternalModule):
45
45
 
46
46
  async def setup(self):
47
47
  scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"]
48
- self.open_port_consumers = any(["OPEN_TCP_PORT" in m.watched_events for m in scan_modules])
48
+ self.open_port_consumers = any("OPEN_TCP_PORT" in m.watched_events for m in scan_modules)
49
49
  # only consider active portscanners (still speculate if only passive ones are enabled)
50
50
  self.portscanner_enabled = any(
51
- ["portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values()]
51
+ "portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values()
52
52
  )
53
53
  self.emit_open_ports = self.open_port_consumers and not self.portscanner_enabled
54
54
  self.range_to_ip = True
@@ -71,7 +71,7 @@ class speculate(BaseInternalModule):
71
71
  self.hugewarning(
72
72
  f"Selected target ({target_len:,} hosts) is too large, skipping IP_RANGE --> IP_ADDRESS speculation"
73
73
  )
74
- self.hugewarning(f'Enabling the "portscan" module is highly recommended')
74
+ self.hugewarning('Enabling the "portscan" module is highly recommended')
75
75
  self.range_to_ip = False
76
76
 
77
77
  return True
@@ -126,7 +126,7 @@ class speculate(BaseInternalModule):
126
126
  parent = self.helpers.parent_domain(event.host_original)
127
127
  if parent != event.data:
128
128
  await self.emit_event(
129
- parent, "DNS_NAME", parent=event, context=f"speculated parent {{event.type}}: {{event.data}}"
129
+ parent, "DNS_NAME", parent=event, context="speculated parent {event.type}: {event.data}"
130
130
  )
131
131
 
132
132
  # URL --> OPEN_TCP_PORT
@@ -31,7 +31,7 @@ class ipneighbor(BaseModule):
31
31
  netmask = main_ip.max_prefixlen - min(main_ip.max_prefixlen, self.num_bits)
32
32
  network = ipaddress.ip_network(f"{main_ip}/{netmask}", strict=False)
33
33
  subnet_hash = hash(network)
34
- if not subnet_hash in self.processed:
34
+ if subnet_hash not in self.processed:
35
35
  self.processed.add(subnet_hash)
36
36
  for ip in network:
37
37
  if ip != main_ip:
bbot/modules/jadx.py CHANGED
@@ -43,7 +43,7 @@ class jadx(BaseModule):
43
43
 
44
44
  async def filter_event(self, event):
45
45
  if "file" in event.tags:
46
- if not event.data["magic_description"].lower() in self.allowed_file_types:
46
+ if event.data["magic_description"].lower() not in self.allowed_file_types:
47
47
  return False, f"Jadx is not able to decompile this file type: {event.data['magic_description']}"
48
48
  else:
49
49
  return False, "Event is not a file"
@@ -46,11 +46,11 @@ class newsletters(BaseModule):
46
46
  body = _event.data["body"]
47
47
  soup = self.helpers.beautifulsoup(body, "html.parser")
48
48
  if soup is False:
49
- self.debug(f"BeautifulSoup returned False")
49
+ self.debug("BeautifulSoup returned False")
50
50
  return
51
51
  result = self.find_type(soup)
52
52
  if result:
53
- description = f"Found a Newsletter Submission Form that could be used for email bombing attacks"
53
+ description = "Found a Newsletter Submission Form that could be used for email bombing attacks"
54
54
  data = {"host": str(_event.host), "description": description, "url": _event.data["url"]}
55
55
  await self.emit_event(
56
56
  data,
@@ -91,15 +91,15 @@ class asset_inventory(CSV):
91
91
  self.assets[hostkey].absorb_event(event)
92
92
 
93
93
  async def report(self):
94
- stats = dict()
95
- totals = dict()
94
+ stats = {}
95
+ totals = {}
96
96
 
97
97
  def increment_stat(stat, value):
98
98
  try:
99
99
  totals[stat] += 1
100
100
  except KeyError:
101
101
  totals[stat] = 1
102
- if not stat in stats:
102
+ if stat not in stats:
103
103
  stats[stat] = {}
104
104
  try:
105
105
  stats[stat][value] += 1
@@ -263,13 +263,13 @@ class Asset:
263
263
  if not self.recheck:
264
264
  # ports
265
265
  ports = [i.strip() for i in row.get("Open Ports", "").split(",")]
266
- self.ports.update(set(i for i in ports if i and is_port(i)))
266
+ self.ports.update({i for i in ports if i and is_port(i)})
267
267
  # findings
268
268
  findings = [i.strip() for i in row.get("Findings", "").splitlines()]
269
- self.findings.update(set(i for i in findings if i))
269
+ self.findings.update({i for i in findings if i})
270
270
  # technologies
271
271
  technologies = [i.strip() for i in row.get("Technologies", "").splitlines()]
272
- self.technologies.update(set(i for i in technologies if i))
272
+ self.technologies.update({i for i in technologies if i})
273
273
  # risk rating
274
274
  risk_rating = row.get("Risk Rating", "").strip()
275
275
  if risk_rating and risk_rating.isdigit() and int(risk_rating) > self.risk_rating:
@@ -24,7 +24,7 @@ class BaseOutputModule(BaseModule):
24
24
  if event.type in ("FINISHED",):
25
25
  return True, "its type is FINISHED"
26
26
  if self.errored:
27
- return False, f"module is in error state"
27
+ return False, "module is in error state"
28
28
  # exclude non-watched types
29
29
  if not any(t in self.get_watched_events() for t in ("*", event.type)):
30
30
  return False, "its type is not in watched_events"
@@ -64,7 +64,7 @@ class CSV(BaseOutputModule):
64
64
  ),
65
65
  "Source Module": str(getattr(event, "module_sequence", "")),
66
66
  "Scope Distance": str(getattr(event, "scope_distance", "")),
67
- "Event Tags": ",".join(sorted(list(getattr(event, "tags", [])))),
67
+ "Event Tags": ",".join(sorted(getattr(event, "tags", []))),
68
68
  "Discovery Path": " --> ".join(discovery_path),
69
69
  }
70
70
  )
@@ -20,7 +20,7 @@ class Stdout(BaseOutputModule):
20
20
 
21
21
  async def setup(self):
22
22
  self.text_format = self.config.get("format", "text").strip().lower()
23
- if not self.text_format in self.format_choices:
23
+ if self.text_format not in self.format_choices:
24
24
  return (
25
25
  False,
26
26
  f'Invalid text format choice, "{self.text_format}" (choices: {",".join(self.format_choices)})',
@@ -33,7 +33,7 @@ class Stdout(BaseOutputModule):
33
33
 
34
34
  async def filter_event(self, event):
35
35
  if self.accept_event_types:
36
- if not event.type in self.accept_event_types:
36
+ if event.type not in self.accept_event_types:
37
37
  return False, f'Event type "{event.type}" is not in the allowed event_types'
38
38
  return True
39
39
 
@@ -89,11 +89,11 @@ class paramminer_headers(BaseModule):
89
89
  if not wordlist:
90
90
  wordlist = f"{self.helpers.wordlist_dir}/{self.default_wordlist}"
91
91
  self.debug(f"Using wordlist: [{wordlist}]")
92
- self.wl = set(
92
+ self.wl = {
93
93
  h.strip().lower()
94
94
  for h in self.helpers.read_file(await self.helpers.wordlist(wordlist))
95
95
  if len(h) > 0 and "%" not in h
96
- )
96
+ }
97
97
 
98
98
  # check against the boring list (if the option is set)
99
99
  if self.config.get("skip_boring_words", True):
@@ -238,7 +238,7 @@ class paramminer_headers(BaseModule):
238
238
  return await compare_helper.compare(url, headers=test_headers, check_reflection=(len(header_list) == 1))
239
239
 
240
240
  async def finish(self):
241
- untested_matches = sorted(list(self.extracted_words_master.copy()))
241
+ untested_matches = sorted(self.extracted_words_master.copy())
242
242
  for url, (event, batch_size) in list(self.event_dict.items()):
243
243
  try:
244
244
  compare_helper = self.helpers.http_compare(url)
bbot/modules/portscan.py CHANGED
@@ -99,7 +99,7 @@ class portscan(BaseModule):
99
99
  return False, "Masscan failed to run"
100
100
  returncode = getattr(ipv6_result, "returncode", 0)
101
101
  if returncode and "failed to detect IPv6 address" in ipv6_result.stderr:
102
- self.warning(f"It looks like you are not set up for IPv6. IPv6 targets will not be scanned.")
102
+ self.warning("It looks like you are not set up for IPv6. IPv6 targets will not be scanned.")
103
103
  self.ipv6_support = False
104
104
  return True
105
105
 
@@ -109,7 +109,7 @@ class portscan(BaseModule):
109
109
  self.scanned_initial_targets = True
110
110
  events = set(events)
111
111
  events.update(
112
- set([e for e in self.scan.target.seeds.events if e.type in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE")])
112
+ {e for e in self.scan.target.seeds.events if e.type in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE")}
113
113
  )
114
114
 
115
115
  # ping scan
@@ -334,7 +334,7 @@ class portscan(BaseModule):
334
334
  if "FAIL" in s:
335
335
  self.warning(s)
336
336
  self.warning(
337
- f'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.'
337
+ 'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.'
338
338
  )
339
339
  else:
340
340
  self.verbose(s)
@@ -38,7 +38,7 @@ class asn(BaseReportModule):
38
38
 
39
39
  async def handle_event(self, event):
40
40
  host = event.host
41
- if self.cache_get(host) == False:
41
+ if self.cache_get(host) is False:
42
42
  asns, source = await self.get_asn(host)
43
43
  if not asns:
44
44
  self.cache_put(self.unknown_asn)
@@ -96,7 +96,7 @@ class asn(BaseReportModule):
96
96
  for p in self.helpers.ip_network_parents(ip):
97
97
  try:
98
98
  self.asn_counts[p] += 1
99
- if ret == False:
99
+ if ret is False:
100
100
  ret = p
101
101
  except KeyError:
102
102
  continue
@@ -112,7 +112,7 @@ class asn(BaseReportModule):
112
112
  for i, source in enumerate(list(self.sources)):
113
113
  get_asn_fn = getattr(self, f"get_asn_{source}")
114
114
  res = await get_asn_fn(ip)
115
- if res == False:
115
+ if res is False:
116
116
  # demote the current source to lowest priority since it just failed
117
117
  self.sources.append(self.sources.pop(i))
118
118
  self.verbose(f"Failed to contact {source}, retrying")
@@ -125,7 +125,7 @@ class asn(BaseReportModule):
125
125
  url = f"https://stat.ripe.net/data/network-info/data.json?resource={ip}"
126
126
  response = await self.get_url(url, "ASN")
127
127
  asns = []
128
- if response == False:
128
+ if response is False:
129
129
  return False
130
130
  data = response.get("data", {})
131
131
  if not data:
@@ -138,7 +138,7 @@ class asn(BaseReportModule):
138
138
  asn_numbers = []
139
139
  for number in asn_numbers:
140
140
  asn = await self.get_asn_metadata_ripe(number)
141
- if asn == False:
141
+ if asn is False:
142
142
  return False
143
143
  asn["subnet"] = prefix
144
144
  asns.append(asn)
@@ -155,7 +155,7 @@ class asn(BaseReportModule):
155
155
  }
156
156
  url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}"
157
157
  response = await self.get_url(url, "ASN Metadata", cache=True)
158
- if response == False:
158
+ if response is False:
159
159
  return False
160
160
  data = response.get("data", {})
161
161
  if not data:
@@ -187,7 +187,7 @@ class asn(BaseReportModule):
187
187
  data = await self.get_url(url, "ASN")
188
188
  asns = []
189
189
  asns_tried = set()
190
- if data == False:
190
+ if data is False:
191
191
  return False
192
192
  data = data.get("data", {})
193
193
  prefixes = data.get("prefixes", [])
@@ -201,13 +201,13 @@ class asn(BaseReportModule):
201
201
  description = details.get("description") or prefix.get("description") or ""
202
202
  country = details.get("country_code") or prefix.get("country_code") or ""
203
203
  emails = []
204
- if not asn in asns_tried:
204
+ if asn not in asns_tried:
205
205
  emails = await self.get_emails_bgpview(asn)
206
- if emails == False:
206
+ if emails is False:
207
207
  return False
208
208
  asns_tried.add(asn)
209
209
  asns.append(
210
- dict(asn=asn, subnet=subnet, name=name, description=description, country=country, emails=emails)
210
+ {"asn": asn, "subnet": subnet, "name": name, "description": description, "country": country, "emails": emails}
211
211
  )
212
212
  if not asns:
213
213
  self.debug(f'No results for "{ip}"')
@@ -217,7 +217,7 @@ class asn(BaseReportModule):
217
217
  contacts = []
218
218
  url = f"https://api.bgpview.io/asn/{asn}"
219
219
  data = await self.get_url(url, "ASN metadata", cache=True)
220
- if data == False:
220
+ if data is False:
221
221
  return False
222
222
  data = data.get("data", {})
223
223
  if not data:
bbot/modules/robots.py CHANGED
@@ -33,14 +33,14 @@ class robots(BaseModule):
33
33
  for l in lines:
34
34
  if len(l) > 0:
35
35
  split_l = l.split(": ")
36
- if (split_l[0].lower() == "allow" and self.config.get("include_allow") == True) or (
37
- split_l[0].lower() == "disallow" and self.config.get("include_disallow") == True
36
+ if (split_l[0].lower() == "allow" and self.config.get("include_allow") is True) or (
37
+ split_l[0].lower() == "disallow" and self.config.get("include_disallow") is True
38
38
  ):
39
39
  unverified_url = f"{host}{split_l[1].lstrip('/')}".replace(
40
40
  "*", self.helpers.rand_string(4)
41
41
  )
42
42
 
43
- elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") == True:
43
+ elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") is True:
44
44
  unverified_url = split_l[1]
45
45
  else:
46
46
  continue
@@ -121,7 +121,7 @@ class securitytxt(BaseModule):
121
121
  start, end = match.span()
122
122
  found_url = v[start:end]
123
123
 
124
- if found_url != url and self._urls == True:
124
+ if found_url != url and self._urls is True:
125
125
  await self.emit_event(found_url, "URL_UNVERIFIED", parent=event, tags=tags)
126
126
 
127
127
 
@@ -52,5 +52,5 @@ class sitedossier(subdomain_enum):
52
52
  results.add(hostname)
53
53
  yield hostname
54
54
  if '<a href="/parentdomain/' not in response.text:
55
- self.debug(f"Next page not found")
55
+ self.debug("Next page not found")
56
56
  break
bbot/modules/social.py CHANGED
@@ -45,7 +45,7 @@ class social(BaseModule):
45
45
  url = f"https://{url}"
46
46
  event_data = {"platform": platform, "url": url, "profile_name": profile_name}
47
47
  # only emit if the same event isn't already in the parent chain
48
- if not any([e.type == "SOCIAL" and e.data == event_data for e in event.get_parents()]):
48
+ if not any(e.type == "SOCIAL" and e.data == event_data for e in event.get_parents()):
49
49
  social_event = self.make_event(
50
50
  event_data,
51
51
  "SOCIAL",
@@ -46,7 +46,7 @@ class SubdomainRadar(subdomain_enum_apikey):
46
46
  try:
47
47
  j = response.json()
48
48
  except Exception:
49
- return False, f"Failed to get enumerators: failed to parse response as JSON"
49
+ return False, "Failed to get enumerators: failed to parse response as JSON"
50
50
  for group in j:
51
51
  group_name = group.get("name", "").strip().lower()
52
52
  if group_name:
bbot/modules/telerik.py CHANGED
@@ -174,7 +174,7 @@ class telerik(BaseModule):
174
174
  result, _ = await self.test_detector(event.data, webresource)
175
175
  if result:
176
176
  if "RadAsyncUpload handler is registered succesfully" in result.text:
177
- self.debug(f"Detected Telerik instance (Telerik.Web.UI.WebResource.axd?type=rau)")
177
+ self.debug("Detected Telerik instance (Telerik.Web.UI.WebResource.axd?type=rau)")
178
178
 
179
179
  probe_data = {
180
180
  "rauPostData": (
@@ -216,7 +216,7 @@ class telerik(BaseModule):
216
216
  event,
217
217
  context=f"{{module}} scanned {event.data} and identified {{event.type}}: Telerik RAU AXD Handler",
218
218
  )
219
- if self.config.get("exploit_RAU_crypto") == True:
219
+ if self.config.get("exploit_RAU_crypto") is True:
220
220
  hostname = urlparse(event.data).netloc
221
221
  if hostname not in self.RAUConfirmed:
222
222
  self.RAUConfirmed.append(hostname)
@@ -270,7 +270,7 @@ class telerik(BaseModule):
270
270
  else:
271
271
  if "Cannot deserialize dialog parameters" in response.text:
272
272
  self.debug(f"Detected Telerik UI instance ({dh})")
273
- description = f"Telerik DialogHandler detected"
273
+ description = "Telerik DialogHandler detected"
274
274
  await self.emit_event(
275
275
  {"host": str(event.host), "url": f"{event.data}{dh}", "description": description},
276
276
  "FINDING",
@@ -289,8 +289,8 @@ class telerik(BaseModule):
289
289
  self.debug(validate_result)
290
290
  validate_status_code = getattr(validate_result, "status_code", 0)
291
291
  if validate_status_code not in (0, 500):
292
- self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)")
293
- description = f"Telerik SpellCheckHandler detected"
292
+ self.debug("Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)")
293
+ description = "Telerik SpellCheckHandler detected"
294
294
  await self.emit_event(
295
295
  {
296
296
  "host": str(event.host),
@@ -334,7 +334,7 @@ class telerik(BaseModule):
334
334
  },
335
335
  "FINDING",
336
336
  event,
337
- context=f"{{module}} searched HTTP_RESPONSE and identified {{event.type}}: Telerik ChartImage AXD Handler",
337
+ context="{module} searched HTTP_RESPONSE and identified {event.type}: Telerik ChartImage AXD Handler",
338
338
  )
339
339
  elif '"_serializedConfiguration":"' in resp_body:
340
340
  await self.emit_event(
@@ -345,7 +345,7 @@ class telerik(BaseModule):
345
345
  },
346
346
  "FINDING",
347
347
  event,
348
- context=f"{{module}} searched HTTP_RESPONSE and identified {{event.type}}: Telerik AsyncUpload",
348
+ context="{module} searched HTTP_RESPONSE and identified {event.type}: Telerik AsyncUpload",
349
349
  )
350
350
 
351
351
  # Check for RAD Controls in URL
@@ -159,7 +159,7 @@ class bucket_template(BaseModule):
159
159
  valid = self.cloud_helper.is_valid_bucket_name(bucket_name)
160
160
  if valid and not self.helpers.is_ip(bucket_name):
161
161
  bucket_hash = hash(bucket_name)
162
- if not bucket_hash in self.buckets_tried:
162
+ if bucket_hash not in self.buckets_tried:
163
163
  self.buckets_tried.add(bucket_hash)
164
164
  return True
165
165
  return False
@@ -38,7 +38,7 @@ class github(BaseModule):
38
38
  self.api_key = api_keys
39
39
  try:
40
40
  await self.ping()
41
- self.hugesuccess(f"API is ready")
41
+ self.hugesuccess("API is ready")
42
42
  return True
43
43
  except Exception as e:
44
44
  self.trace(traceback.format_exc())
@@ -28,7 +28,7 @@ class shodan(subdomain_enum):
28
28
  self.api_key = api_keys
29
29
  try:
30
30
  await self.ping()
31
- self.hugesuccess(f"API is ready")
31
+ self.hugesuccess("API is ready")
32
32
  return True
33
33
  except Exception as e:
34
34
  self.trace(traceback.format_exc())
@@ -155,7 +155,7 @@ class subdomain_enum(BaseModule):
155
155
  async def _is_wildcard(self, query):
156
156
  rdtypes = ("A", "AAAA", "CNAME")
157
157
  if self.helpers.is_dns_name(query):
158
- for domain, wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).items():
158
+ for wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).values():
159
159
  if any(t in wildcard_rdtypes for t in rdtypes):
160
160
  return True
161
161
  return False
@@ -60,7 +60,7 @@ class WebhookOutputModule(BaseOutputModule):
60
60
  async def filter_event(self, event):
61
61
  if event.type == "VULNERABILITY":
62
62
  severity = event.data.get("severity", "UNKNOWN")
63
- if not severity in self.allowed_severities:
63
+ if severity not in self.allowed_severities:
64
64
  return False, f"{severity} is below min_severity threshold"
65
65
  return True
66
66
 
@@ -51,7 +51,7 @@ class trufflehog(BaseModule):
51
51
  self.github_token = ""
52
52
  if self.deleted_forks:
53
53
  self.warning(
54
- f"Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours."
54
+ "Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours."
55
55
  )
56
56
  for module_name in ("github", "github_codesearch", "github_org", "git_clone"):
57
57
  module_config = self.scan.config.get("modules", {}).get(module_name, {})