bbot 2.3.0.5370rc0__py3-none-any.whl → 2.3.0.5382rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +2 -6
- bbot/core/config/files.py +0 -1
- bbot/core/config/logger.py +1 -1
- bbot/core/core.py +1 -1
- bbot/core/event/base.py +13 -16
- bbot/core/helpers/command.py +4 -4
- bbot/core/helpers/depsinstaller/installer.py +5 -5
- bbot/core/helpers/diff.py +7 -7
- bbot/core/helpers/dns/brute.py +1 -1
- bbot/core/helpers/dns/dns.py +1 -2
- bbot/core/helpers/dns/engine.py +4 -6
- bbot/core/helpers/dns/mock.py +0 -1
- bbot/core/helpers/files.py +1 -1
- bbot/core/helpers/helper.py +3 -1
- bbot/core/helpers/interactsh.py +3 -3
- bbot/core/helpers/libmagic.py +0 -1
- bbot/core/helpers/misc.py +11 -11
- bbot/core/helpers/process.py +0 -2
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +3 -3
- bbot/core/helpers/validators.py +1 -2
- bbot/core/helpers/web/client.py +1 -1
- bbot/core/helpers/web/engine.py +1 -2
- bbot/core/helpers/web/web.py +2 -3
- bbot/core/helpers/wordcloud.py +5 -5
- bbot/core/modules.py +21 -22
- bbot/db/sql/models.py +0 -1
- bbot/modules/azure_tenant.py +2 -2
- bbot/modules/baddns.py +0 -2
- bbot/modules/baddns_direct.py +0 -1
- bbot/modules/base.py +16 -16
- bbot/modules/bypass403.py +5 -5
- bbot/modules/c99.py +1 -1
- bbot/modules/columbus.py +1 -1
- bbot/modules/deadly/ffuf.py +8 -8
- bbot/modules/deadly/nuclei.py +1 -1
- bbot/modules/deadly/vhost.py +3 -3
- bbot/modules/dnsbimi.py +1 -1
- bbot/modules/dnsdumpster.py +2 -2
- bbot/modules/dockerhub.py +1 -1
- bbot/modules/dotnetnuke.py +0 -2
- bbot/modules/extractous.py +1 -1
- bbot/modules/filedownload.py +1 -1
- bbot/modules/generic_ssrf.py +3 -3
- bbot/modules/github_workflows.py +1 -1
- bbot/modules/gowitness.py +7 -7
- bbot/modules/host_header.py +5 -5
- bbot/modules/httpx.py +1 -1
- bbot/modules/iis_shortnames.py +6 -6
- bbot/modules/internal/cloudcheck.py +5 -5
- bbot/modules/internal/dnsresolve.py +7 -7
- bbot/modules/internal/excavate.py +23 -26
- bbot/modules/internal/speculate.py +4 -4
- bbot/modules/ipneighbor.py +1 -1
- bbot/modules/jadx.py +1 -1
- bbot/modules/newsletters.py +2 -2
- bbot/modules/output/asset_inventory.py +6 -6
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/csv.py +1 -1
- bbot/modules/output/stdout.py +2 -2
- bbot/modules/paramminer_headers.py +4 -7
- bbot/modules/portscan.py +3 -3
- bbot/modules/report/asn.py +11 -11
- bbot/modules/robots.py +3 -3
- bbot/modules/securitytxt.py +1 -1
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/social.py +1 -1
- bbot/modules/subdomainradar.py +1 -1
- bbot/modules/telerik.py +7 -7
- bbot/modules/templates/bucket.py +1 -1
- bbot/modules/templates/github.py +1 -1
- bbot/modules/templates/shodan.py +1 -1
- bbot/modules/templates/subdomain_enum.py +1 -1
- bbot/modules/templates/webhook.py +1 -1
- bbot/modules/trufflehog.py +2 -2
- bbot/modules/url_manipulation.py +3 -3
- bbot/modules/urlscan.py +1 -1
- bbot/modules/viewdns.py +1 -1
- bbot/modules/wafw00f.py +1 -1
- bbot/scanner/preset/args.py +10 -11
- bbot/scanner/preset/environ.py +0 -1
- bbot/scanner/preset/preset.py +9 -9
- bbot/scanner/scanner.py +17 -17
- bbot/scanner/target.py +1 -1
- bbot/scripts/docs.py +1 -1
- bbot/test/bbot_fixtures.py +1 -1
- bbot/test/conftest.py +1 -1
- bbot/test/run_tests.sh +4 -4
- bbot/test/test_step_1/test_bbot_fastapi.py +2 -2
- bbot/test/test_step_1/test_cli.py +56 -56
- bbot/test/test_step_1/test_dns.py +15 -15
- bbot/test/test_step_1/test_engine.py +17 -17
- bbot/test/test_step_1/test_events.py +22 -22
- bbot/test/test_step_1/test_helpers.py +26 -26
- bbot/test/test_step_1/test_manager_scope_accuracy.py +306 -306
- bbot/test/test_step_1/test_modules_basic.py +52 -53
- bbot/test/test_step_1/test_presets.py +81 -81
- bbot/test/test_step_1/test_regexes.py +5 -5
- bbot/test/test_step_1/test_scan.py +4 -4
- bbot/test/test_step_1/test_target.py +25 -25
- bbot/test/test_step_1/test_web.py +5 -5
- bbot/test/test_step_2/module_tests/base.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +2 -4
- bbot/test/test_step_2/module_tests/test_module_bevigil.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_binaryedge.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_c99.py +9 -9
- bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_credshed.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_dehashed.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +8 -9
- bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -2
- bbot/test/test_step_2/module_tests/test_module_excavate.py +10 -30
- bbot/test/test_step_2/module_tests/test_module_extractous.py +9 -9
- bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_httpx.py +7 -7
- bbot/test/test_step_2/module_tests/test_module_leakix.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
- bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
- bbot/test/test_step_2/module_tests/test_module_portscan.py +3 -4
- bbot/test/test_step_2/module_tests/test_module_postgres.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_rapiddns.py +9 -9
- bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
- bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
- {bbot-2.3.0.5370rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/METADATA +2 -2
- {bbot-2.3.0.5370rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/RECORD +157 -157
- {bbot-2.3.0.5370rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/LICENSE +0 -0
- {bbot-2.3.0.5370rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/WHEEL +0 -0
- {bbot-2.3.0.5370rc0.dist-info → bbot-2.3.0.5382rc0.dist-info}/entry_points.txt +0 -0
bbot/core/helpers/wordcloud.py
CHANGED
|
@@ -111,7 +111,7 @@ class WordCloud(dict):
|
|
|
111
111
|
results = set()
|
|
112
112
|
for word in words:
|
|
113
113
|
h = hash(word)
|
|
114
|
-
if not
|
|
114
|
+
if h not in results:
|
|
115
115
|
results.add(h)
|
|
116
116
|
yield (word,)
|
|
117
117
|
if numbers > 0:
|
|
@@ -119,7 +119,7 @@ class WordCloud(dict):
|
|
|
119
119
|
for word in words:
|
|
120
120
|
for number_mutation in self.get_number_mutations(word, n=numbers, padding=number_padding):
|
|
121
121
|
h = hash(number_mutation)
|
|
122
|
-
if not
|
|
122
|
+
if h not in results:
|
|
123
123
|
results.add(h)
|
|
124
124
|
yield (number_mutation,)
|
|
125
125
|
for word in words:
|
|
@@ -322,7 +322,7 @@ class WordCloud(dict):
|
|
|
322
322
|
|
|
323
323
|
@property
|
|
324
324
|
def default_filename(self):
|
|
325
|
-
return self.parent_helper.preset.scan.home /
|
|
325
|
+
return self.parent_helper.preset.scan.home / "wordcloud.tsv"
|
|
326
326
|
|
|
327
327
|
def save(self, filename=None, limit=None):
|
|
328
328
|
"""
|
|
@@ -357,7 +357,7 @@ class WordCloud(dict):
|
|
|
357
357
|
log.debug(f"Saved word cloud ({len(self):,} words) to {filename}")
|
|
358
358
|
return True, filename
|
|
359
359
|
else:
|
|
360
|
-
log.debug(
|
|
360
|
+
log.debug("No words to save")
|
|
361
361
|
except Exception as e:
|
|
362
362
|
import traceback
|
|
363
363
|
|
|
@@ -421,7 +421,7 @@ class Mutator(dict):
|
|
|
421
421
|
def mutate(self, word, max_mutations=None, mutations=None):
|
|
422
422
|
if mutations is None:
|
|
423
423
|
mutations = self.top_mutations(max_mutations)
|
|
424
|
-
for mutation
|
|
424
|
+
for mutation in mutations.keys():
|
|
425
425
|
ret = []
|
|
426
426
|
for s in mutation:
|
|
427
427
|
if s is not None:
|
bbot/core/modules.py
CHANGED
|
@@ -153,7 +153,7 @@ class ModuleLoader:
|
|
|
153
153
|
else:
|
|
154
154
|
log.debug(f"Preloading {module_name} from disk")
|
|
155
155
|
if module_dir.name == "modules":
|
|
156
|
-
namespace =
|
|
156
|
+
namespace = "bbot.modules"
|
|
157
157
|
else:
|
|
158
158
|
namespace = f"bbot.modules.{module_dir.name}"
|
|
159
159
|
try:
|
|
@@ -235,7 +235,7 @@ class ModuleLoader:
|
|
|
235
235
|
return self.__preloaded
|
|
236
236
|
|
|
237
237
|
def get_recursive_dirs(self, *dirs):
|
|
238
|
-
dirs =
|
|
238
|
+
dirs = {Path(d).resolve() for d in dirs}
|
|
239
239
|
for d in list(dirs):
|
|
240
240
|
if not d.is_dir():
|
|
241
241
|
continue
|
|
@@ -337,74 +337,73 @@ class ModuleLoader:
|
|
|
337
337
|
# look for classes
|
|
338
338
|
if type(root_element) == ast.ClassDef:
|
|
339
339
|
for class_attr in root_element.body:
|
|
340
|
-
|
|
341
340
|
# class attributes that are dictionaries
|
|
342
341
|
if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict:
|
|
343
342
|
# module options
|
|
344
|
-
if any(
|
|
343
|
+
if any(target.id == "options" for target in class_attr.targets):
|
|
345
344
|
config.update(ast.literal_eval(class_attr.value))
|
|
346
345
|
# module options
|
|
347
|
-
elif any(
|
|
346
|
+
elif any(target.id == "options_desc" for target in class_attr.targets):
|
|
348
347
|
options_desc.update(ast.literal_eval(class_attr.value))
|
|
349
348
|
# module metadata
|
|
350
|
-
elif any(
|
|
349
|
+
elif any(target.id == "meta" for target in class_attr.targets):
|
|
351
350
|
meta = ast.literal_eval(class_attr.value)
|
|
352
351
|
|
|
353
352
|
# class attributes that are lists
|
|
354
353
|
if type(class_attr) == ast.Assign and type(class_attr.value) == ast.List:
|
|
355
354
|
# flags
|
|
356
|
-
if any(
|
|
355
|
+
if any(target.id == "flags" for target in class_attr.targets):
|
|
357
356
|
for flag in class_attr.value.elts:
|
|
358
357
|
if type(flag.value) == str:
|
|
359
358
|
flags.add(flag.value)
|
|
360
359
|
# watched events
|
|
361
|
-
elif any(
|
|
360
|
+
elif any(target.id == "watched_events" for target in class_attr.targets):
|
|
362
361
|
for event_type in class_attr.value.elts:
|
|
363
362
|
if type(event_type.value) == str:
|
|
364
363
|
watched_events.add(event_type.value)
|
|
365
364
|
# produced events
|
|
366
|
-
elif any(
|
|
365
|
+
elif any(target.id == "produced_events" for target in class_attr.targets):
|
|
367
366
|
for event_type in class_attr.value.elts:
|
|
368
367
|
if type(event_type.value) == str:
|
|
369
368
|
produced_events.add(event_type.value)
|
|
370
369
|
|
|
371
370
|
# bbot module dependencies
|
|
372
|
-
elif any(
|
|
371
|
+
elif any(target.id == "deps_modules" for target in class_attr.targets):
|
|
373
372
|
for dep_module in class_attr.value.elts:
|
|
374
373
|
if type(dep_module.value) == str:
|
|
375
374
|
deps_modules.add(dep_module.value)
|
|
376
375
|
# python dependencies
|
|
377
|
-
elif any(
|
|
376
|
+
elif any(target.id == "deps_pip" for target in class_attr.targets):
|
|
378
377
|
for dep_pip in class_attr.value.elts:
|
|
379
378
|
if type(dep_pip.value) == str:
|
|
380
379
|
deps_pip.append(dep_pip.value)
|
|
381
|
-
elif any(
|
|
380
|
+
elif any(target.id == "deps_pip_constraints" for target in class_attr.targets):
|
|
382
381
|
for dep_pip in class_attr.value.elts:
|
|
383
382
|
if type(dep_pip.value) == str:
|
|
384
383
|
deps_pip_constraints.append(dep_pip.value)
|
|
385
384
|
# apt dependencies
|
|
386
|
-
elif any(
|
|
385
|
+
elif any(target.id == "deps_apt" for target in class_attr.targets):
|
|
387
386
|
for dep_apt in class_attr.value.elts:
|
|
388
387
|
if type(dep_apt.value) == str:
|
|
389
388
|
deps_apt.append(dep_apt.value)
|
|
390
389
|
# bash dependencies
|
|
391
|
-
elif any(
|
|
390
|
+
elif any(target.id == "deps_shell" for target in class_attr.targets):
|
|
392
391
|
for dep_shell in class_attr.value.elts:
|
|
393
392
|
deps_shell.append(ast.literal_eval(dep_shell))
|
|
394
393
|
# ansible playbook
|
|
395
|
-
elif any(
|
|
394
|
+
elif any(target.id == "deps_ansible" for target in class_attr.targets):
|
|
396
395
|
ansible_tasks = ast.literal_eval(class_attr.value)
|
|
397
396
|
# shared/common module dependencies
|
|
398
|
-
elif any(
|
|
397
|
+
elif any(target.id == "deps_common" for target in class_attr.targets):
|
|
399
398
|
for dep_common in class_attr.value.elts:
|
|
400
399
|
if type(dep_common.value) == str:
|
|
401
400
|
deps_common.append(dep_common.value)
|
|
402
401
|
|
|
403
402
|
for task in ansible_tasks:
|
|
404
|
-
if
|
|
403
|
+
if "become" not in task:
|
|
405
404
|
task["become"] = False
|
|
406
405
|
# don't sudo brew
|
|
407
|
-
elif os_platform() == "darwin" and ("package" in task and task.get("become", False)
|
|
406
|
+
elif os_platform() == "darwin" and ("package" in task and task.get("become", False) is True):
|
|
408
407
|
task["become"] = False
|
|
409
408
|
|
|
410
409
|
preloaded_data = {
|
|
@@ -437,8 +436,8 @@ class ModuleLoader:
|
|
|
437
436
|
f'Error while preloading module "{module_file}": No shared dependency named "{dep_common}" (choices: {common_choices})'
|
|
438
437
|
)
|
|
439
438
|
for ansible_task in ansible_task_list:
|
|
440
|
-
if any(x
|
|
441
|
-
x
|
|
439
|
+
if any(x is True for x in search_dict_by_key("become", ansible_task)) or any(
|
|
440
|
+
x is True for x in search_dict_by_key("ansible_become", ansible_tasks)
|
|
442
441
|
):
|
|
443
442
|
preloaded_data["sudo"] = True
|
|
444
443
|
return preloaded_data
|
|
@@ -541,7 +540,7 @@ class ModuleLoader:
|
|
|
541
540
|
with suppress(KeyError):
|
|
542
541
|
choices.remove(modname)
|
|
543
542
|
if event_type not in resolve_choices:
|
|
544
|
-
resolve_choices[event_type] =
|
|
543
|
+
resolve_choices[event_type] = {}
|
|
545
544
|
deps = resolve_choices[event_type]
|
|
546
545
|
self.add_or_create(deps, "required_by", modname)
|
|
547
546
|
for c in choices:
|
|
@@ -640,7 +639,7 @@ class ModuleLoader:
|
|
|
640
639
|
def modules_options_table(self, modules=None, mod_type=None):
|
|
641
640
|
table = []
|
|
642
641
|
header = ["Config Option", "Type", "Description", "Default"]
|
|
643
|
-
for
|
|
642
|
+
for module_options in self.modules_options(modules, mod_type).values():
|
|
644
643
|
table += module_options
|
|
645
644
|
return make_table(table, header)
|
|
646
645
|
|
bbot/db/sql/models.py
CHANGED
bbot/modules/azure_tenant.py
CHANGED
|
@@ -102,7 +102,7 @@ class azure_tenant(BaseModule):
|
|
|
102
102
|
status_code = getattr(r, "status_code", 0)
|
|
103
103
|
if status_code not in (200, 421):
|
|
104
104
|
self.verbose(f'Error retrieving azure_tenant domains for "{domain}" (status code: {status_code})')
|
|
105
|
-
return set(),
|
|
105
|
+
return set(), {}
|
|
106
106
|
found_domains = list(set(await self.helpers.re.findall(self.d_xml_regex, r.text)))
|
|
107
107
|
domains = set()
|
|
108
108
|
|
|
@@ -116,7 +116,7 @@ class azure_tenant(BaseModule):
|
|
|
116
116
|
self.scan.word_cloud.absorb_word(d)
|
|
117
117
|
|
|
118
118
|
r = await openid_task
|
|
119
|
-
openid_config =
|
|
119
|
+
openid_config = {}
|
|
120
120
|
with suppress(Exception):
|
|
121
121
|
openid_config = r.json()
|
|
122
122
|
|
bbot/modules/baddns.py
CHANGED
|
@@ -55,7 +55,6 @@ class baddns(BaseModule):
|
|
|
55
55
|
return True
|
|
56
56
|
|
|
57
57
|
async def handle_event(self, event):
|
|
58
|
-
|
|
59
58
|
tasks = []
|
|
60
59
|
for ModuleClass in self.select_modules():
|
|
61
60
|
kwargs = {
|
|
@@ -75,7 +74,6 @@ class baddns(BaseModule):
|
|
|
75
74
|
tasks.append((module_instance, task))
|
|
76
75
|
|
|
77
76
|
async for completed_task in self.helpers.as_completed([task for _, task in tasks]):
|
|
78
|
-
|
|
79
77
|
module_instance = next((m for m, t in tasks if t == completed_task), None)
|
|
80
78
|
try:
|
|
81
79
|
task_result = await completed_task
|
bbot/modules/baddns_direct.py
CHANGED
bbot/modules/base.py
CHANGED
|
@@ -311,7 +311,7 @@ class BaseModule:
|
|
|
311
311
|
if self.auth_secret:
|
|
312
312
|
try:
|
|
313
313
|
await self.ping()
|
|
314
|
-
self.hugesuccess(
|
|
314
|
+
self.hugesuccess("API is ready")
|
|
315
315
|
return True, ""
|
|
316
316
|
except Exception as e:
|
|
317
317
|
self.trace(traceback.format_exc())
|
|
@@ -332,10 +332,10 @@ class BaseModule:
|
|
|
332
332
|
|
|
333
333
|
def cycle_api_key(self):
|
|
334
334
|
if len(self._api_keys) > 1:
|
|
335
|
-
self.verbose(
|
|
335
|
+
self.verbose("Cycling API key")
|
|
336
336
|
self._api_keys.insert(0, self._api_keys.pop())
|
|
337
337
|
else:
|
|
338
|
-
self.debug(
|
|
338
|
+
self.debug("No extra API keys to cycle")
|
|
339
339
|
|
|
340
340
|
@property
|
|
341
341
|
def api_retries(self):
|
|
@@ -669,7 +669,7 @@ class BaseModule:
|
|
|
669
669
|
if self.incoming_event_queue is not False:
|
|
670
670
|
event = await self.incoming_event_queue.get()
|
|
671
671
|
else:
|
|
672
|
-
self.debug(
|
|
672
|
+
self.debug("Event queue is in bad state")
|
|
673
673
|
break
|
|
674
674
|
except asyncio.queues.QueueEmpty:
|
|
675
675
|
continue
|
|
@@ -700,7 +700,7 @@ class BaseModule:
|
|
|
700
700
|
else:
|
|
701
701
|
self.error(f"Critical failure in module {self.name}: {e}")
|
|
702
702
|
self.error(traceback.format_exc())
|
|
703
|
-
self.log.trace(
|
|
703
|
+
self.log.trace("Worker stopped")
|
|
704
704
|
|
|
705
705
|
@property
|
|
706
706
|
def max_scope_distance(self):
|
|
@@ -743,7 +743,7 @@ class BaseModule:
|
|
|
743
743
|
if event.type in ("FINISHED",):
|
|
744
744
|
return True, "its type is FINISHED"
|
|
745
745
|
if self.errored:
|
|
746
|
-
return False,
|
|
746
|
+
return False, "module is in error state"
|
|
747
747
|
# exclude non-watched types
|
|
748
748
|
if not any(t in self.get_watched_events() for t in ("*", event.type)):
|
|
749
749
|
return False, "its type is not in watched_events"
|
|
@@ -770,7 +770,7 @@ class BaseModule:
|
|
|
770
770
|
# check duplicates
|
|
771
771
|
is_incoming_duplicate, reason = self.is_incoming_duplicate(event, add=True)
|
|
772
772
|
if is_incoming_duplicate and not self.accept_dupes:
|
|
773
|
-
return False,
|
|
773
|
+
return False, "module has already seen it" + (f" ({reason})" if reason else "")
|
|
774
774
|
|
|
775
775
|
return acceptable, reason
|
|
776
776
|
|
|
@@ -863,7 +863,7 @@ class BaseModule:
|
|
|
863
863
|
"""
|
|
864
864
|
async with self._task_counter.count("queue_event()", _log=False):
|
|
865
865
|
if self.incoming_event_queue is False:
|
|
866
|
-
self.debug(
|
|
866
|
+
self.debug("Not in an acceptable state to queue incoming event")
|
|
867
867
|
return
|
|
868
868
|
acceptable, reason = self._event_precheck(event)
|
|
869
869
|
if not acceptable:
|
|
@@ -879,7 +879,7 @@ class BaseModule:
|
|
|
879
879
|
if event.type != "FINISHED":
|
|
880
880
|
self.scan._new_activity = True
|
|
881
881
|
except AttributeError:
|
|
882
|
-
self.debug(
|
|
882
|
+
self.debug("Not in an acceptable state to queue incoming event")
|
|
883
883
|
|
|
884
884
|
async def queue_outgoing_event(self, event, **kwargs):
|
|
885
885
|
"""
|
|
@@ -904,7 +904,7 @@ class BaseModule:
|
|
|
904
904
|
try:
|
|
905
905
|
await self.outgoing_event_queue.put((event, kwargs))
|
|
906
906
|
except AttributeError:
|
|
907
|
-
self.debug(
|
|
907
|
+
self.debug("Not in an acceptable state to queue outgoing event")
|
|
908
908
|
|
|
909
909
|
def set_error_state(self, message=None, clear_outgoing_queue=False, critical=False):
|
|
910
910
|
"""
|
|
@@ -939,7 +939,7 @@ class BaseModule:
|
|
|
939
939
|
self.errored = True
|
|
940
940
|
# clear incoming queue
|
|
941
941
|
if self.incoming_event_queue is not False:
|
|
942
|
-
self.debug(
|
|
942
|
+
self.debug("Emptying event_queue")
|
|
943
943
|
with suppress(asyncio.queues.QueueEmpty):
|
|
944
944
|
while 1:
|
|
945
945
|
self.incoming_event_queue.get_nowait()
|
|
@@ -1126,7 +1126,7 @@ class BaseModule:
|
|
|
1126
1126
|
"""
|
|
1127
1127
|
if self.api_key:
|
|
1128
1128
|
url = url.format(api_key=self.api_key)
|
|
1129
|
-
if
|
|
1129
|
+
if "headers" not in kwargs:
|
|
1130
1130
|
kwargs["headers"] = {}
|
|
1131
1131
|
kwargs["headers"]["Authorization"] = f"Bearer {self.api_key}"
|
|
1132
1132
|
return url, kwargs
|
|
@@ -1142,7 +1142,7 @@ class BaseModule:
|
|
|
1142
1142
|
|
|
1143
1143
|
# loop until we have a successful request
|
|
1144
1144
|
for _ in range(self.api_retries):
|
|
1145
|
-
if
|
|
1145
|
+
if "headers" not in kwargs:
|
|
1146
1146
|
kwargs["headers"] = {}
|
|
1147
1147
|
new_url, kwargs = self.prepare_api_request(url, kwargs)
|
|
1148
1148
|
kwargs["url"] = new_url
|
|
@@ -1589,7 +1589,7 @@ class BaseInterceptModule(BaseModule):
|
|
|
1589
1589
|
event = incoming
|
|
1590
1590
|
kwargs = {}
|
|
1591
1591
|
else:
|
|
1592
|
-
self.debug(
|
|
1592
|
+
self.debug("Event queue is in bad state")
|
|
1593
1593
|
break
|
|
1594
1594
|
except asyncio.queues.QueueEmpty:
|
|
1595
1595
|
await asyncio.sleep(0.1)
|
|
@@ -1644,7 +1644,7 @@ class BaseInterceptModule(BaseModule):
|
|
|
1644
1644
|
else:
|
|
1645
1645
|
self.critical(f"Critical failure in intercept module {self.name}: {e}")
|
|
1646
1646
|
self.critical(traceback.format_exc())
|
|
1647
|
-
self.log.trace(
|
|
1647
|
+
self.log.trace("Worker stopped")
|
|
1648
1648
|
|
|
1649
1649
|
async def get_incoming_event(self):
|
|
1650
1650
|
"""
|
|
@@ -1675,7 +1675,7 @@ class BaseInterceptModule(BaseModule):
|
|
|
1675
1675
|
try:
|
|
1676
1676
|
self.incoming_event_queue.put_nowait((event, kwargs))
|
|
1677
1677
|
except AttributeError:
|
|
1678
|
-
self.debug(
|
|
1678
|
+
self.debug("Not in an acceptable state to queue incoming event")
|
|
1679
1679
|
|
|
1680
1680
|
async def _event_postcheck(self, event):
|
|
1681
1681
|
return await self._event_postcheck_inner(event)
|
bbot/modules/bypass403.py
CHANGED
|
@@ -92,7 +92,7 @@ class bypass403(BaseModule):
|
|
|
92
92
|
return None
|
|
93
93
|
|
|
94
94
|
sig = self.format_signature(sig, event)
|
|
95
|
-
if sig[2]
|
|
95
|
+
if sig[2] is not None:
|
|
96
96
|
headers = dict(sig[2])
|
|
97
97
|
else:
|
|
98
98
|
headers = None
|
|
@@ -106,13 +106,13 @@ class bypass403(BaseModule):
|
|
|
106
106
|
continue
|
|
107
107
|
|
|
108
108
|
# In some cases WAFs will respond with a 200 code which causes a false positive
|
|
109
|
-
if subject_response
|
|
109
|
+
if subject_response is not None:
|
|
110
110
|
for ws in waf_strings:
|
|
111
111
|
if ws in subject_response.text:
|
|
112
112
|
self.debug("Rejecting result based on presence of WAF string")
|
|
113
113
|
return
|
|
114
114
|
|
|
115
|
-
if match
|
|
115
|
+
if match is False:
|
|
116
116
|
if str(subject_response.status_code)[0] != "4":
|
|
117
117
|
if sig[2]:
|
|
118
118
|
added_header_tuple = next(iter(sig[2].items()))
|
|
@@ -165,13 +165,13 @@ class bypass403(BaseModule):
|
|
|
165
165
|
return False
|
|
166
166
|
|
|
167
167
|
def format_signature(self, sig, event):
|
|
168
|
-
if sig[3]
|
|
168
|
+
if sig[3] is True:
|
|
169
169
|
cleaned_path = event.parsed_url.path.strip("/")
|
|
170
170
|
else:
|
|
171
171
|
cleaned_path = event.parsed_url.path.lstrip("/")
|
|
172
172
|
kwargs = {"scheme": event.parsed_url.scheme, "netloc": event.parsed_url.netloc, "path": cleaned_path}
|
|
173
173
|
formatted_url = sig[1].format(**kwargs)
|
|
174
|
-
if sig[2]
|
|
174
|
+
if sig[2] is not None:
|
|
175
175
|
formatted_headers = {k: v.format(**kwargs) for k, v in sig[2].items()}
|
|
176
176
|
else:
|
|
177
177
|
formatted_headers = None
|
bbot/modules/c99.py
CHANGED
|
@@ -20,7 +20,7 @@ class c99(subdomain_enum_apikey):
|
|
|
20
20
|
async def ping(self):
|
|
21
21
|
url = f"{self.base_url}/randomnumber?key={{api_key}}&between=1,100&json"
|
|
22
22
|
response = await self.api_request(url)
|
|
23
|
-
assert response.json()["success"]
|
|
23
|
+
assert response.json()["success"] is True, getattr(response, "text", "no response from server")
|
|
24
24
|
|
|
25
25
|
async def request_url(self, query):
|
|
26
26
|
url = f"{self.base_url}/subdomainfinder?key={{api_key}}&domain={self.helpers.quote(query)}&json"
|
bbot/modules/columbus.py
CHANGED
bbot/modules/deadly/ffuf.py
CHANGED
|
@@ -28,7 +28,7 @@ class ffuf(BaseModule):
|
|
|
28
28
|
|
|
29
29
|
deps_common = ["ffuf"]
|
|
30
30
|
|
|
31
|
-
banned_characters =
|
|
31
|
+
banned_characters = {" "}
|
|
32
32
|
blacklist = ["images", "css", "image"]
|
|
33
33
|
|
|
34
34
|
in_scope_only = True
|
|
@@ -52,7 +52,7 @@ class ffuf(BaseModule):
|
|
|
52
52
|
|
|
53
53
|
async def handle_event(self, event):
|
|
54
54
|
if self.helpers.url_depth(event.data) > self.config.get("max_depth"):
|
|
55
|
-
self.debug(
|
|
55
|
+
self.debug("Exceeded max depth, aborting event")
|
|
56
56
|
return
|
|
57
57
|
|
|
58
58
|
# only FFUF against a directory
|
|
@@ -122,7 +122,7 @@ class ffuf(BaseModule):
|
|
|
122
122
|
continue
|
|
123
123
|
|
|
124
124
|
# if the codes are different, we should abort, this should also be a warning, as it is highly unusual behavior
|
|
125
|
-
if len(
|
|
125
|
+
if len({d["status"] for d in canary_results}) != 1:
|
|
126
126
|
self.warning("Got different codes for each baseline. This could indicate load balancing")
|
|
127
127
|
filters[ext] = ["ABORT", "BASELINE_CHANGED_CODES"]
|
|
128
128
|
continue
|
|
@@ -148,7 +148,7 @@ class ffuf(BaseModule):
|
|
|
148
148
|
continue
|
|
149
149
|
|
|
150
150
|
# we start by seeing if all of the baselines have the same character count
|
|
151
|
-
if len(
|
|
151
|
+
if len({d["length"] for d in canary_results}) == 1:
|
|
152
152
|
self.debug("All baseline results had the same char count, we can make a filter on that")
|
|
153
153
|
filters[ext] = [
|
|
154
154
|
"-fc",
|
|
@@ -161,7 +161,7 @@ class ffuf(BaseModule):
|
|
|
161
161
|
continue
|
|
162
162
|
|
|
163
163
|
# if that doesn't work we can try words
|
|
164
|
-
if len(
|
|
164
|
+
if len({d["words"] for d in canary_results}) == 1:
|
|
165
165
|
self.debug("All baseline results had the same word count, we can make a filter on that")
|
|
166
166
|
filters[ext] = [
|
|
167
167
|
"-fc",
|
|
@@ -174,7 +174,7 @@ class ffuf(BaseModule):
|
|
|
174
174
|
continue
|
|
175
175
|
|
|
176
176
|
# as a last resort we will try lines
|
|
177
|
-
if len(
|
|
177
|
+
if len({d["lines"] for d in canary_results}) == 1:
|
|
178
178
|
self.debug("All baseline results had the same word count, we can make a filter on that")
|
|
179
179
|
filters[ext] = [
|
|
180
180
|
"-fc",
|
|
@@ -252,7 +252,7 @@ class ffuf(BaseModule):
|
|
|
252
252
|
self.warning(f"Exiting from FFUF run early, received an ABORT filter: [{filters[ext][1]}]")
|
|
253
253
|
continue
|
|
254
254
|
|
|
255
|
-
elif filters[ext]
|
|
255
|
+
elif filters[ext] is None:
|
|
256
256
|
pass
|
|
257
257
|
|
|
258
258
|
else:
|
|
@@ -282,7 +282,7 @@ class ffuf(BaseModule):
|
|
|
282
282
|
else:
|
|
283
283
|
if mode == "normal":
|
|
284
284
|
# before emitting, we are going to send another baseline. This will immediately catch things like a WAF flipping blocking on us mid-scan
|
|
285
|
-
if baseline
|
|
285
|
+
if baseline is False:
|
|
286
286
|
pre_emit_temp_canary = [
|
|
287
287
|
f
|
|
288
288
|
async for f in self.execute_ffuf(
|
bbot/modules/deadly/nuclei.py
CHANGED
|
@@ -226,7 +226,7 @@ class nuclei(BaseModule):
|
|
|
226
226
|
command.append(f"-{cli_option}")
|
|
227
227
|
command.append(option)
|
|
228
228
|
|
|
229
|
-
if self.scan.config.get("interactsh_disable")
|
|
229
|
+
if self.scan.config.get("interactsh_disable") is True:
|
|
230
230
|
self.info("Disbling interactsh in accordance with global settings")
|
|
231
231
|
command.append("-no-interactsh")
|
|
232
232
|
|
bbot/modules/deadly/vhost.py
CHANGED
|
@@ -23,7 +23,7 @@ class vhost(ffuf):
|
|
|
23
23
|
}
|
|
24
24
|
|
|
25
25
|
deps_common = ["ffuf"]
|
|
26
|
-
banned_characters =
|
|
26
|
+
banned_characters = {" ", "."}
|
|
27
27
|
|
|
28
28
|
in_scope_only = True
|
|
29
29
|
|
|
@@ -73,7 +73,7 @@ class vhost(ffuf):
|
|
|
73
73
|
|
|
74
74
|
async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=False):
|
|
75
75
|
filters = await self.baseline_ffuf(f"{host}/", exts=[""], suffix=basehost, mode="hostheader")
|
|
76
|
-
self.debug(
|
|
76
|
+
self.debug("Baseline completed and returned these filters:")
|
|
77
77
|
self.debug(filters)
|
|
78
78
|
if not wordlist:
|
|
79
79
|
wordlist = self.tempfile
|
|
@@ -90,7 +90,7 @@ class vhost(ffuf):
|
|
|
90
90
|
parent=event,
|
|
91
91
|
context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {vhost_str}",
|
|
92
92
|
)
|
|
93
|
-
if skip_dns_host
|
|
93
|
+
if skip_dns_host is False:
|
|
94
94
|
await self.emit_event(
|
|
95
95
|
f"{vhost_dict['vhost']}{basehost}",
|
|
96
96
|
"DNS_NAME",
|
bbot/modules/dnsbimi.py
CHANGED
|
@@ -80,7 +80,7 @@ class dnsbimi(BaseModule):
|
|
|
80
80
|
return False, "event is wildcard"
|
|
81
81
|
|
|
82
82
|
# there's no value in inspecting service records
|
|
83
|
-
if service_record(event.host)
|
|
83
|
+
if service_record(event.host) is True:
|
|
84
84
|
return False, "service record detected"
|
|
85
85
|
|
|
86
86
|
return True
|
bbot/modules/dnsdumpster.py
CHANGED
|
@@ -31,7 +31,7 @@ class dnsdumpster(subdomain_enum):
|
|
|
31
31
|
|
|
32
32
|
html = self.helpers.beautifulsoup(res1.content, "html.parser")
|
|
33
33
|
if html is False:
|
|
34
|
-
self.verbose(
|
|
34
|
+
self.verbose("BeautifulSoup returned False")
|
|
35
35
|
return ret
|
|
36
36
|
|
|
37
37
|
csrftoken = None
|
|
@@ -82,7 +82,7 @@ class dnsdumpster(subdomain_enum):
|
|
|
82
82
|
return ret
|
|
83
83
|
html = self.helpers.beautifulsoup(res2.content, "html.parser")
|
|
84
84
|
if html is False:
|
|
85
|
-
self.verbose(
|
|
85
|
+
self.verbose("BeautifulSoup returned False")
|
|
86
86
|
return ret
|
|
87
87
|
escaped_domain = re.escape(domain)
|
|
88
88
|
match_pattern = re.compile(r"^[\w\.-]+\." + escaped_domain + r"$")
|
bbot/modules/dockerhub.py
CHANGED
|
@@ -31,7 +31,7 @@ class dockerhub(BaseModule):
|
|
|
31
31
|
async def handle_org_stub(self, event):
|
|
32
32
|
profile_name = event.data
|
|
33
33
|
# docker usernames are case sensitive, so if there are capitalizations we also try a lowercase variation
|
|
34
|
-
profiles_to_check =
|
|
34
|
+
profiles_to_check = {profile_name, profile_name.lower()}
|
|
35
35
|
for p in profiles_to_check:
|
|
36
36
|
api_url = f"{self.api_url}/users/{p}"
|
|
37
37
|
api_result = await self.helpers.request(api_url, follow_redirects=True)
|
bbot/modules/dotnetnuke.py
CHANGED
|
@@ -32,7 +32,6 @@ class dotnetnuke(BaseModule):
|
|
|
32
32
|
self.interactsh_instance = None
|
|
33
33
|
|
|
34
34
|
if self.scan.config.get("interactsh_disable", False) == False:
|
|
35
|
-
|
|
36
35
|
try:
|
|
37
36
|
self.interactsh_instance = self.helpers.interactsh()
|
|
38
37
|
self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback)
|
|
@@ -114,7 +113,6 @@ class dotnetnuke(BaseModule):
|
|
|
114
113
|
)
|
|
115
114
|
|
|
116
115
|
if "endpoint" not in event.tags:
|
|
117
|
-
|
|
118
116
|
# NewsArticlesSlider ImageHandler.ashx File Read
|
|
119
117
|
result = await self.helpers.request(
|
|
120
118
|
f'{event.data["url"]}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx?img=~/web.config'
|
bbot/modules/extractous.py
CHANGED
|
@@ -67,7 +67,7 @@ class extractous(BaseModule):
|
|
|
67
67
|
scope_distance_modifier = 1
|
|
68
68
|
|
|
69
69
|
async def setup(self):
|
|
70
|
-
self.extensions = list(
|
|
70
|
+
self.extensions = list({e.lower().strip(".") for e in self.config.get("extensions", [])})
|
|
71
71
|
return True
|
|
72
72
|
|
|
73
73
|
async def filter_event(self, event):
|
bbot/modules/filedownload.py
CHANGED
|
@@ -87,7 +87,7 @@ class filedownload(BaseModule):
|
|
|
87
87
|
scope_distance_modifier = 3
|
|
88
88
|
|
|
89
89
|
async def setup(self):
|
|
90
|
-
self.extensions = list(
|
|
90
|
+
self.extensions = list({e.lower().strip(".") for e in self.config.get("extensions", [])})
|
|
91
91
|
self.max_filesize = self.config.get("max_filesize", "10MB")
|
|
92
92
|
self.download_dir = self.scan.home / "filedownload"
|
|
93
93
|
self.helpers.mkdir(self.download_dir)
|
bbot/modules/generic_ssrf.py
CHANGED
|
@@ -163,7 +163,7 @@ class generic_ssrf(BaseModule):
|
|
|
163
163
|
self.severity = None
|
|
164
164
|
self.generic_only = self.config.get("generic_only", False)
|
|
165
165
|
|
|
166
|
-
if self.scan.config.get("interactsh_disable", False)
|
|
166
|
+
if self.scan.config.get("interactsh_disable", False) is False:
|
|
167
167
|
try:
|
|
168
168
|
self.interactsh_instance = self.helpers.interactsh()
|
|
169
169
|
self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback)
|
|
@@ -216,7 +216,7 @@ class generic_ssrf(BaseModule):
|
|
|
216
216
|
self.debug("skipping result because subdomain tag was missing")
|
|
217
217
|
|
|
218
218
|
async def cleanup(self):
|
|
219
|
-
if self.scan.config.get("interactsh_disable", False)
|
|
219
|
+
if self.scan.config.get("interactsh_disable", False) is False:
|
|
220
220
|
try:
|
|
221
221
|
await self.interactsh_instance.deregister()
|
|
222
222
|
self.debug(
|
|
@@ -226,7 +226,7 @@ class generic_ssrf(BaseModule):
|
|
|
226
226
|
self.warning(f"Interactsh failure: {e}")
|
|
227
227
|
|
|
228
228
|
async def finish(self):
|
|
229
|
-
if self.scan.config.get("interactsh_disable", False)
|
|
229
|
+
if self.scan.config.get("interactsh_disable", False) is False:
|
|
230
230
|
await self.helpers.sleep(5)
|
|
231
231
|
try:
|
|
232
232
|
for r in await self.interactsh_instance.poll():
|
bbot/modules/github_workflows.py
CHANGED
|
@@ -166,7 +166,7 @@ class github_workflows(github):
|
|
|
166
166
|
main_logs = []
|
|
167
167
|
with zipfile.ZipFile(file_destination, "r") as logzip:
|
|
168
168
|
for name in logzip.namelist():
|
|
169
|
-
if fnmatch.fnmatch(name, "*.txt") and
|
|
169
|
+
if fnmatch.fnmatch(name, "*.txt") and "/" not in name:
|
|
170
170
|
logzip.extract(name, folder)
|
|
171
171
|
main_logs.append(folder / name)
|
|
172
172
|
return main_logs
|