bbot 2.0.1.4720rc0__py3-none-any.whl → 2.3.0.5397rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bbot might be problematic. Click here for more details.
- bbot/__init__.py +1 -1
- bbot/cli.py +3 -7
- bbot/core/config/files.py +0 -1
- bbot/core/config/logger.py +34 -4
- bbot/core/core.py +21 -4
- bbot/core/engine.py +9 -8
- bbot/core/event/base.py +131 -52
- bbot/core/helpers/bloom.py +10 -3
- bbot/core/helpers/command.py +8 -7
- bbot/core/helpers/depsinstaller/installer.py +31 -13
- bbot/core/helpers/diff.py +10 -10
- bbot/core/helpers/dns/brute.py +7 -4
- bbot/core/helpers/dns/dns.py +1 -2
- bbot/core/helpers/dns/engine.py +4 -6
- bbot/core/helpers/dns/helpers.py +2 -2
- bbot/core/helpers/dns/mock.py +0 -1
- bbot/core/helpers/files.py +1 -1
- bbot/core/helpers/helper.py +7 -4
- bbot/core/helpers/interactsh.py +3 -3
- bbot/core/helpers/libmagic.py +65 -0
- bbot/core/helpers/misc.py +65 -22
- bbot/core/helpers/names_generator.py +17 -3
- bbot/core/helpers/process.py +0 -20
- bbot/core/helpers/regex.py +1 -1
- bbot/core/helpers/regexes.py +12 -6
- bbot/core/helpers/validators.py +1 -2
- bbot/core/helpers/web/client.py +1 -1
- bbot/core/helpers/web/engine.py +1 -2
- bbot/core/helpers/web/web.py +4 -114
- bbot/core/helpers/wordcloud.py +5 -5
- bbot/core/modules.py +36 -27
- bbot/core/multiprocess.py +58 -0
- bbot/core/shared_deps.py +46 -3
- bbot/db/sql/models.py +147 -0
- bbot/defaults.yml +11 -9
- bbot/modules/anubisdb.py +2 -2
- bbot/modules/apkpure.py +63 -0
- bbot/modules/azure_tenant.py +2 -2
- bbot/modules/baddns.py +35 -19
- bbot/modules/baddns_direct.py +92 -0
- bbot/modules/baddns_zone.py +3 -8
- bbot/modules/badsecrets.py +4 -3
- bbot/modules/base.py +195 -51
- bbot/modules/bevigil.py +7 -7
- bbot/modules/binaryedge.py +7 -4
- bbot/modules/bufferoverrun.py +47 -0
- bbot/modules/builtwith.py +6 -10
- bbot/modules/bypass403.py +5 -5
- bbot/modules/c99.py +10 -7
- bbot/modules/censys.py +9 -13
- bbot/modules/certspotter.py +5 -3
- bbot/modules/chaos.py +9 -7
- bbot/modules/code_repository.py +1 -0
- bbot/modules/columbus.py +3 -3
- bbot/modules/crt.py +5 -3
- bbot/modules/deadly/dastardly.py +1 -1
- bbot/modules/deadly/ffuf.py +9 -9
- bbot/modules/deadly/nuclei.py +3 -3
- bbot/modules/deadly/vhost.py +4 -3
- bbot/modules/dehashed.py +1 -1
- bbot/modules/digitorus.py +1 -1
- bbot/modules/dnsbimi.py +145 -0
- bbot/modules/dnscaa.py +3 -3
- bbot/modules/dnsdumpster.py +4 -4
- bbot/modules/dnstlsrpt.py +144 -0
- bbot/modules/docker_pull.py +7 -5
- bbot/modules/dockerhub.py +2 -2
- bbot/modules/dotnetnuke.py +18 -19
- bbot/modules/emailformat.py +1 -1
- bbot/modules/extractous.py +122 -0
- bbot/modules/filedownload.py +9 -7
- bbot/modules/fullhunt.py +7 -4
- bbot/modules/generic_ssrf.py +5 -5
- bbot/modules/github_codesearch.py +3 -2
- bbot/modules/github_org.py +4 -4
- bbot/modules/github_workflows.py +4 -4
- bbot/modules/gitlab.py +2 -5
- bbot/modules/google_playstore.py +93 -0
- bbot/modules/gowitness.py +48 -50
- bbot/modules/hackertarget.py +5 -3
- bbot/modules/host_header.py +5 -5
- bbot/modules/httpx.py +1 -4
- bbot/modules/hunterio.py +3 -9
- bbot/modules/iis_shortnames.py +19 -30
- bbot/modules/internal/cloudcheck.py +27 -12
- bbot/modules/internal/dnsresolve.py +22 -20
- bbot/modules/internal/excavate.py +85 -48
- bbot/modules/internal/speculate.py +41 -32
- bbot/modules/internetdb.py +4 -2
- bbot/modules/ip2location.py +3 -5
- bbot/modules/ipneighbor.py +1 -1
- bbot/modules/ipstack.py +3 -8
- bbot/modules/jadx.py +87 -0
- bbot/modules/leakix.py +11 -10
- bbot/modules/myssl.py +2 -2
- bbot/modules/newsletters.py +2 -2
- bbot/modules/otx.py +5 -3
- bbot/modules/output/asset_inventory.py +7 -7
- bbot/modules/output/base.py +1 -1
- bbot/modules/output/csv.py +1 -1
- bbot/modules/output/http.py +20 -14
- bbot/modules/output/mysql.py +51 -0
- bbot/modules/output/neo4j.py +7 -2
- bbot/modules/output/postgres.py +49 -0
- bbot/modules/output/slack.py +0 -1
- bbot/modules/output/sqlite.py +29 -0
- bbot/modules/output/stdout.py +2 -2
- bbot/modules/output/teams.py +107 -6
- bbot/modules/paramminer_headers.py +5 -8
- bbot/modules/passivetotal.py +13 -13
- bbot/modules/portscan.py +32 -6
- bbot/modules/postman.py +50 -126
- bbot/modules/postman_download.py +220 -0
- bbot/modules/rapiddns.py +3 -8
- bbot/modules/report/asn.py +11 -11
- bbot/modules/robots.py +3 -3
- bbot/modules/securitytrails.py +7 -10
- bbot/modules/securitytxt.py +1 -1
- bbot/modules/shodan_dns.py +7 -9
- bbot/modules/sitedossier.py +1 -1
- bbot/modules/skymem.py +2 -2
- bbot/modules/social.py +2 -1
- bbot/modules/subdomaincenter.py +1 -1
- bbot/modules/subdomainradar.py +160 -0
- bbot/modules/telerik.py +8 -8
- bbot/modules/templates/bucket.py +1 -1
- bbot/modules/templates/github.py +22 -14
- bbot/modules/templates/postman.py +21 -0
- bbot/modules/templates/shodan.py +14 -13
- bbot/modules/templates/sql.py +95 -0
- bbot/modules/templates/subdomain_enum.py +51 -16
- bbot/modules/templates/webhook.py +2 -4
- bbot/modules/trickest.py +8 -37
- bbot/modules/trufflehog.py +10 -12
- bbot/modules/url_manipulation.py +3 -3
- bbot/modules/urlscan.py +1 -1
- bbot/modules/viewdns.py +1 -1
- bbot/modules/virustotal.py +8 -30
- bbot/modules/wafw00f.py +1 -1
- bbot/modules/wayback.py +1 -1
- bbot/modules/wpscan.py +17 -11
- bbot/modules/zoomeye.py +11 -6
- bbot/presets/baddns-thorough.yml +12 -0
- bbot/presets/fast.yml +16 -0
- bbot/presets/kitchen-sink.yml +1 -0
- bbot/presets/spider.yml +4 -0
- bbot/presets/subdomain-enum.yml +7 -7
- bbot/scanner/manager.py +5 -16
- bbot/scanner/preset/args.py +44 -26
- bbot/scanner/preset/environ.py +7 -2
- bbot/scanner/preset/path.py +7 -4
- bbot/scanner/preset/preset.py +36 -23
- bbot/scanner/scanner.py +172 -62
- bbot/scanner/target.py +236 -434
- bbot/scripts/docs.py +1 -1
- bbot/test/bbot_fixtures.py +13 -3
- bbot/test/conftest.py +132 -100
- bbot/test/fastapi_test.py +17 -0
- bbot/test/owasp_mastg.apk +0 -0
- bbot/test/run_tests.sh +4 -4
- bbot/test/test.conf +2 -0
- bbot/test/test_step_1/test_bbot_fastapi.py +82 -0
- bbot/test/test_step_1/test_bloom_filter.py +2 -0
- bbot/test/test_step_1/test_cli.py +138 -64
- bbot/test/test_step_1/test_dns.py +62 -25
- bbot/test/test_step_1/test_engine.py +17 -17
- bbot/test/test_step_1/test_events.py +183 -28
- bbot/test/test_step_1/test_helpers.py +64 -28
- bbot/test/test_step_1/test_manager_deduplication.py +1 -1
- bbot/test/test_step_1/test_manager_scope_accuracy.py +333 -330
- bbot/test/test_step_1/test_modules_basic.py +68 -70
- bbot/test/test_step_1/test_presets.py +184 -96
- bbot/test/test_step_1/test_python_api.py +7 -2
- bbot/test/test_step_1/test_regexes.py +35 -5
- bbot/test/test_step_1/test_scan.py +39 -5
- bbot/test/test_step_1/test_scope.py +4 -3
- bbot/test/test_step_1/test_target.py +243 -145
- bbot/test/test_step_1/test_web.py +14 -8
- bbot/test/test_step_2/module_tests/base.py +15 -7
- bbot/test/test_step_2/module_tests/test_module_anubisdb.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_apkpure.py +71 -0
- bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +0 -1
- bbot/test/test_step_2/module_tests/test_module_azure_realm.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_baddns.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +62 -0
- bbot/test/test_step_2/module_tests/test_module_bevigil.py +29 -2
- bbot/test/test_step_2/module_tests/test_module_binaryedge.py +4 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py +35 -0
- bbot/test/test_step_2/module_tests/test_module_builtwith.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_bypass403.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_c99.py +126 -0
- bbot/test/test_step_2/module_tests/test_module_censys.py +4 -1
- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +4 -0
- bbot/test/test_step_2/module_tests/test_module_code_repository.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_columbus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_credshed.py +3 -3
- bbot/test/test_step_2/module_tests/test_module_dastardly.py +2 -1
- bbot/test/test_step_2/module_tests/test_module_dehashed.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_digitorus.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_discord.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +103 -0
- bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +9 -10
- bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +64 -0
- bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +0 -8
- bbot/test/test_step_2/module_tests/test_module_excavate.py +17 -37
- bbot/test/test_step_2/module_tests/test_module_extractous.py +54 -0
- bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_filedownload.py +14 -14
- bbot/test/test_step_2/module_tests/test_module_git_clone.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_github_org.py +19 -8
- bbot/test/test_step_2/module_tests/test_module_github_workflows.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_gitlab.py +9 -4
- bbot/test/test_step_2/module_tests/test_module_google_playstore.py +83 -0
- bbot/test/test_step_2/module_tests/test_module_gowitness.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_host_header.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_http.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_httpx.py +10 -8
- bbot/test/test_step_2/module_tests/test_module_hunterio.py +68 -4
- bbot/test/test_step_2/module_tests/test_module_jadx.py +55 -0
- bbot/test/test_step_2/module_tests/test_module_json.py +22 -9
- bbot/test/test_step_2/module_tests/test_module_leakix.py +7 -3
- bbot/test/test_step_2/module_tests/test_module_mysql.py +76 -0
- bbot/test/test_step_2/module_tests/test_module_myssl.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_neo4j.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_newsletters.py +6 -6
- bbot/test/test_step_2/module_tests/test_module_ntlm.py +7 -7
- bbot/test/test_step_2/module_tests/test_module_oauth.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_otx.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +1 -2
- bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +0 -6
- bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +2 -9
- bbot/test/test_step_2/module_tests/test_module_passivetotal.py +3 -1
- bbot/test/test_step_2/module_tests/test_module_portscan.py +9 -8
- bbot/test/test_step_2/module_tests/test_module_postgres.py +74 -0
- bbot/test/test_step_2/module_tests/test_module_postman.py +84 -253
- bbot/test/test_step_2/module_tests/test_module_postman_download.py +439 -0
- bbot/test/test_step_2/module_tests/test_module_rapiddns.py +93 -1
- bbot/test/test_step_2/module_tests/test_module_shodan_dns.py +20 -1
- bbot/test/test_step_2/module_tests/test_module_sitedossier.py +2 -2
- bbot/test/test_step_2/module_tests/test_module_smuggler.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_social.py +11 -1
- bbot/test/test_step_2/module_tests/test_module_speculate.py +2 -6
- bbot/test/test_step_2/module_tests/test_module_splunk.py +4 -4
- bbot/test/test_step_2/module_tests/test_module_sqlite.py +18 -0
- bbot/test/test_step_2/module_tests/test_module_sslcert.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_stdout.py +5 -3
- bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_subdomainradar.py +208 -0
- bbot/test/test_step_2/module_tests/test_module_subdomains.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_teams.py +8 -6
- bbot/test/test_step_2/module_tests/test_module_telerik.py +1 -1
- bbot/test/test_step_2/module_tests/test_module_trufflehog.py +317 -14
- bbot/test/test_step_2/module_tests/test_module_wayback.py +1 -1
- bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +2 -2
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/METADATA +48 -18
- bbot-2.3.0.5397rc0.dist-info/RECORD +421 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/WHEEL +1 -1
- bbot/modules/unstructured.py +0 -163
- bbot/test/test_step_2/module_tests/test_module_unstructured.py +0 -102
- bbot-2.0.1.4720rc0.dist-info/RECORD +0 -387
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/LICENSE +0 -0
- {bbot-2.0.1.4720rc0.dist-info → bbot-2.3.0.5397rc0.dist-info}/entry_points.txt +0 -0
bbot/core/helpers/process.py
CHANGED
|
@@ -1,17 +1,12 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import traceback
|
|
3
3
|
import threading
|
|
4
|
-
import multiprocessing
|
|
5
4
|
from multiprocessing.context import SpawnProcess
|
|
6
5
|
|
|
7
6
|
from .misc import in_exception_chain
|
|
8
7
|
|
|
9
8
|
|
|
10
|
-
current_process = multiprocessing.current_process()
|
|
11
|
-
|
|
12
|
-
|
|
13
9
|
class BBOTThread(threading.Thread):
|
|
14
|
-
|
|
15
10
|
default_name = "default bbot thread"
|
|
16
11
|
|
|
17
12
|
def __init__(self, *args, **kwargs):
|
|
@@ -28,7 +23,6 @@ class BBOTThread(threading.Thread):
|
|
|
28
23
|
|
|
29
24
|
|
|
30
25
|
class BBOTProcess(SpawnProcess):
|
|
31
|
-
|
|
32
26
|
default_name = "bbot process pool"
|
|
33
27
|
|
|
34
28
|
def __init__(self, *args, **kwargs):
|
|
@@ -57,17 +51,3 @@ class BBOTProcess(SpawnProcess):
|
|
|
57
51
|
if not in_exception_chain(e, (KeyboardInterrupt,)):
|
|
58
52
|
log.warning(f"Error in {self.name}: {e}")
|
|
59
53
|
log.trace(traceback.format_exc())
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
if current_process.name == "MainProcess":
|
|
63
|
-
# if this is the main bbot process, set the logger and queue for the first time
|
|
64
|
-
from bbot.core import CORE
|
|
65
|
-
from functools import partialmethod
|
|
66
|
-
|
|
67
|
-
BBOTProcess.__init__ = partialmethod(
|
|
68
|
-
BBOTProcess.__init__, log_level=CORE.logger.log_level, log_queue=CORE.logger.queue
|
|
69
|
-
)
|
|
70
|
-
|
|
71
|
-
# this makes our process class the default for process pools, etc.
|
|
72
|
-
mp_context = multiprocessing.get_context("spawn")
|
|
73
|
-
mp_context.Process = BBOTProcess
|
bbot/core/helpers/regex.py
CHANGED
|
@@ -41,7 +41,7 @@ class RegexHelper:
|
|
|
41
41
|
"""
|
|
42
42
|
if not isinstance(compiled_regexes, dict):
|
|
43
43
|
raise ValueError('compiled_regexes must be a dictionary like this: {"regex_name": <compiled_regex>}')
|
|
44
|
-
for
|
|
44
|
+
for v in compiled_regexes.values():
|
|
45
45
|
self.ensure_compiled_regex(v)
|
|
46
46
|
|
|
47
47
|
tasks = {}
|
bbot/core/helpers/regexes.py
CHANGED
|
@@ -23,7 +23,7 @@ num_regex = re.compile(r"\d+")
|
|
|
23
23
|
_ipv4_regex = r"(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?:\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}"
|
|
24
24
|
ipv4_regex = re.compile(_ipv4_regex, re.I)
|
|
25
25
|
|
|
26
|
-
# IPv6 is complicated, so we have
|
|
26
|
+
# IPv6 is complicated, so we have accommodate alternative patterns,
|
|
27
27
|
# :(:[A-F0-9]{1,4}){1,7} == ::1, ::ffff:1
|
|
28
28
|
# ([A-F0-9]{1,4}:){1,7}: == 2001::, 2001:db8::, 2001:db8:0:1:2:3::
|
|
29
29
|
# ([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4}) == 2001::1, 2001:db8::1, 2001:db8:0:1:2:3::1
|
|
@@ -36,11 +36,12 @@ _ip_range_regexes = (
|
|
|
36
36
|
_ipv4_regex + r"\/[0-9]{1,2}",
|
|
37
37
|
_ipv6_regex + r"\/[0-9]{1,3}",
|
|
38
38
|
)
|
|
39
|
-
ip_range_regexes =
|
|
39
|
+
ip_range_regexes = [re.compile(r, re.I) for r in _ip_range_regexes]
|
|
40
40
|
|
|
41
41
|
# dns names with periods
|
|
42
42
|
_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?"
|
|
43
|
-
|
|
43
|
+
dns_name_extraction_regex = re.compile(_dns_name_regex, re.I)
|
|
44
|
+
dns_name_validation_regex = re.compile(r"^" + _dns_name_regex + r"$", re.I)
|
|
44
45
|
|
|
45
46
|
# dns names without periods
|
|
46
47
|
_hostname_regex = r"(?!\w*\.\w+)\w(?:[\w-]{0,100}\w)?"
|
|
@@ -54,20 +55,23 @@ ptr_regex = re.compile(_ptr_regex)
|
|
|
54
55
|
# uuid regex
|
|
55
56
|
_uuid_regex = r"[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}"
|
|
56
57
|
uuid_regex = re.compile(_uuid_regex, re.I)
|
|
58
|
+
# event uuid regex
|
|
59
|
+
_event_uuid_regex = r"[0-9A-Z_]+:[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}"
|
|
60
|
+
event_uuid_regex = re.compile(_event_uuid_regex, re.I)
|
|
57
61
|
|
|
58
62
|
_open_port_regexes = (
|
|
59
63
|
_dns_name_regex + r":[0-9]{1,5}",
|
|
60
64
|
_hostname_regex + r":[0-9]{1,5}",
|
|
61
65
|
r"\[" + _ipv6_regex + r"\]:[0-9]{1,5}",
|
|
62
66
|
)
|
|
63
|
-
open_port_regexes =
|
|
67
|
+
open_port_regexes = [re.compile(r, re.I) for r in _open_port_regexes]
|
|
64
68
|
|
|
65
69
|
_url_regexes = (
|
|
66
70
|
r"https?://" + _dns_name_regex + r"(?::[0-9]{1,5})?(?:(?:/|\?).*)?",
|
|
67
71
|
r"https?://" + _hostname_regex + r"(?::[0-9]{1,5})?(?:(?:/|\?).*)?",
|
|
68
72
|
r"https?://\[" + _ipv6_regex + r"\](?::[0-9]{1,5})?(?:(?:/|\?).*)?",
|
|
69
73
|
)
|
|
70
|
-
url_regexes =
|
|
74
|
+
url_regexes = [re.compile(r, re.I) for r in _url_regexes]
|
|
71
75
|
|
|
72
76
|
_double_slash_regex = r"/{2,}"
|
|
73
77
|
double_slash_regex = re.compile(_double_slash_regex)
|
|
@@ -114,7 +118,7 @@ event_type_regexes = OrderedDict(
|
|
|
114
118
|
scan_name_regex = re.compile(r"[a-z]{3,20}_[a-z]{3,20}")
|
|
115
119
|
|
|
116
120
|
|
|
117
|
-
# For use with excavate
|
|
121
|
+
# For use with excavate parameters extractor
|
|
118
122
|
input_tag_regex = re.compile(
|
|
119
123
|
r"<input[^>]+?name=[\"\']?([\.$\w]+)[\"\']?(?:[^>]*?value=[\"\']([=+\/\w]*)[\"\'])?[^>]*>"
|
|
120
124
|
)
|
|
@@ -152,3 +156,5 @@ extract_host_regex = re.compile(_extract_host_regex, re.I)
|
|
|
152
156
|
# for use in recursive_decode()
|
|
153
157
|
encoded_regex = re.compile(r"%[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\[ntrbv]")
|
|
154
158
|
backslash_regex = re.compile(r"(?P<slashes>\\+)(?P<char>[ntrvb])")
|
|
159
|
+
|
|
160
|
+
uuid_regex = re.compile(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}")
|
bbot/core/helpers/validators.py
CHANGED
|
@@ -132,7 +132,7 @@ def validate_host(host: Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address]
|
|
|
132
132
|
@validator
|
|
133
133
|
def validate_severity(severity: str):
|
|
134
134
|
severity = str(severity).strip().upper()
|
|
135
|
-
if not
|
|
135
|
+
if severity not in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"):
|
|
136
136
|
raise ValueError(f"Invalid severity: {severity}")
|
|
137
137
|
return severity
|
|
138
138
|
|
|
@@ -299,7 +299,6 @@ def is_email(email):
|
|
|
299
299
|
|
|
300
300
|
|
|
301
301
|
class Validators:
|
|
302
|
-
|
|
303
302
|
def __init__(self, parent_helper):
|
|
304
303
|
self.parent_helper = parent_helper
|
|
305
304
|
|
bbot/core/helpers/web/client.py
CHANGED
bbot/core/helpers/web/engine.py
CHANGED
|
@@ -14,7 +14,6 @@ log = logging.getLogger("bbot.core.helpers.web.engine")
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class HTTPEngine(EngineServer):
|
|
17
|
-
|
|
18
17
|
CMDS = {
|
|
19
18
|
0: "request",
|
|
20
19
|
1: "request_batch",
|
|
@@ -138,7 +137,7 @@ class HTTPEngine(EngineServer):
|
|
|
138
137
|
if max_size is not None:
|
|
139
138
|
max_size = human_to_bytes(max_size)
|
|
140
139
|
kwargs["follow_redirects"] = follow_redirects
|
|
141
|
-
if
|
|
140
|
+
if "method" not in kwargs:
|
|
142
141
|
kwargs["method"] = "GET"
|
|
143
142
|
try:
|
|
144
143
|
total_size = 0
|
bbot/core/helpers/web/web.py
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
|
-
import re
|
|
2
1
|
import logging
|
|
3
2
|
import warnings
|
|
4
|
-
import traceback
|
|
5
3
|
from pathlib import Path
|
|
6
4
|
from bs4 import BeautifulSoup
|
|
7
5
|
|
|
@@ -21,7 +19,6 @@ log = logging.getLogger("bbot.core.helpers.web")
|
|
|
21
19
|
|
|
22
20
|
|
|
23
21
|
class WebHelper(EngineClient):
|
|
24
|
-
|
|
25
22
|
SERVER_CLASS = HTTPEngine
|
|
26
23
|
ERROR_CLASS = WebError
|
|
27
24
|
|
|
@@ -60,7 +57,7 @@ class WebHelper(EngineClient):
|
|
|
60
57
|
self.ssl_verify = self.config.get("ssl_verify", False)
|
|
61
58
|
engine_debug = self.config.get("engine", {}).get("debug", False)
|
|
62
59
|
super().__init__(
|
|
63
|
-
server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.
|
|
60
|
+
server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.minimal},
|
|
64
61
|
debug=engine_debug,
|
|
65
62
|
)
|
|
66
63
|
|
|
@@ -264,7 +261,7 @@ class WebHelper(EngineClient):
|
|
|
264
261
|
"""
|
|
265
262
|
if not path:
|
|
266
263
|
raise WordlistError(f"Invalid wordlist: {path}")
|
|
267
|
-
if
|
|
264
|
+
if "cache_hrs" not in kwargs:
|
|
268
265
|
kwargs["cache_hrs"] = 720
|
|
269
266
|
if self.parent_helper.is_url(path):
|
|
270
267
|
filename = await self.download(str(path), **kwargs)
|
|
@@ -288,66 +285,6 @@ class WebHelper(EngineClient):
|
|
|
288
285
|
f.write(line)
|
|
289
286
|
return truncated_filename
|
|
290
287
|
|
|
291
|
-
async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **requests_kwargs):
|
|
292
|
-
"""
|
|
293
|
-
An asynchronous generator function for iterating through paginated API data.
|
|
294
|
-
|
|
295
|
-
This function continuously makes requests to a specified API URL, incrementing the page number
|
|
296
|
-
or applying a custom pagination function, and yields the received data one page at a time.
|
|
297
|
-
It is well-suited for APIs that provide paginated results.
|
|
298
|
-
|
|
299
|
-
Args:
|
|
300
|
-
url (str): The initial API URL. Can contain placeholders for 'page', 'page_size', and 'offset'.
|
|
301
|
-
page_size (int, optional): The number of items per page. Defaults to 100.
|
|
302
|
-
json (bool, optional): If True, attempts to deserialize the response content to a JSON object. Defaults to True.
|
|
303
|
-
next_key (callable, optional): A function that takes the last page's data and returns the URL for the next page. Defaults to None.
|
|
304
|
-
**requests_kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function.
|
|
305
|
-
|
|
306
|
-
Yields:
|
|
307
|
-
dict or httpx.Response: If 'json' is True, yields a dictionary containing the parsed JSON data. Otherwise, yields the raw HTTP response.
|
|
308
|
-
|
|
309
|
-
Note:
|
|
310
|
-
The loop will continue indefinitely unless manually stopped. Make sure to break out of the loop once the last page has been received.
|
|
311
|
-
|
|
312
|
-
Examples:
|
|
313
|
-
>>> agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}')
|
|
314
|
-
>>> try:
|
|
315
|
-
>>> async for page in agen:
|
|
316
|
-
>>> subdomains = page["subdomains"]
|
|
317
|
-
>>> self.hugesuccess(subdomains)
|
|
318
|
-
>>> if not subdomains:
|
|
319
|
-
>>> break
|
|
320
|
-
>>> finally:
|
|
321
|
-
>>> agen.aclose()
|
|
322
|
-
"""
|
|
323
|
-
page = 1
|
|
324
|
-
offset = 0
|
|
325
|
-
result = None
|
|
326
|
-
while 1:
|
|
327
|
-
if result and callable(next_key):
|
|
328
|
-
try:
|
|
329
|
-
new_url = next_key(result)
|
|
330
|
-
except Exception as e:
|
|
331
|
-
log.debug(f"Failed to extract next page of results from {url}: {e}")
|
|
332
|
-
log.debug(traceback.format_exc())
|
|
333
|
-
else:
|
|
334
|
-
new_url = url.format(page=page, page_size=page_size, offset=offset)
|
|
335
|
-
result = await self.request(new_url, **requests_kwargs)
|
|
336
|
-
if result is None:
|
|
337
|
-
log.verbose(f"api_page_iter() got no response for {url}")
|
|
338
|
-
break
|
|
339
|
-
try:
|
|
340
|
-
if json:
|
|
341
|
-
result = result.json()
|
|
342
|
-
yield result
|
|
343
|
-
except Exception:
|
|
344
|
-
log.warning(f'Error in api_page_iter() for url: "{new_url}"')
|
|
345
|
-
log.trace(traceback.format_exc())
|
|
346
|
-
break
|
|
347
|
-
finally:
|
|
348
|
-
offset += page_size
|
|
349
|
-
page += 1
|
|
350
|
-
|
|
351
288
|
async def curl(self, *args, **kwargs):
|
|
352
289
|
"""
|
|
353
290
|
An asynchronous function that runs a cURL command with specified arguments and options.
|
|
@@ -413,7 +350,7 @@ class WebHelper(EngineClient):
|
|
|
413
350
|
headers[hk] = hv
|
|
414
351
|
|
|
415
352
|
# add the timeout
|
|
416
|
-
if
|
|
353
|
+
if "timeout" not in kwargs:
|
|
417
354
|
timeout = http_timeout
|
|
418
355
|
|
|
419
356
|
curl_command.append("-m")
|
|
@@ -514,7 +451,7 @@ class WebHelper(EngineClient):
|
|
|
514
451
|
Perform an html parse of the 'markup' argument and return a soup instance
|
|
515
452
|
|
|
516
453
|
>>> email_type = soup.find(type="email")
|
|
517
|
-
Searches the soup instance for all
|
|
454
|
+
Searches the soup instance for all occurrences of the passed in argument
|
|
518
455
|
"""
|
|
519
456
|
try:
|
|
520
457
|
soup = BeautifulSoup(
|
|
@@ -525,53 +462,6 @@ class WebHelper(EngineClient):
|
|
|
525
462
|
log.debug(f"Error parsing beautifulsoup: {e}")
|
|
526
463
|
return False
|
|
527
464
|
|
|
528
|
-
user_keywords = [re.compile(r, re.I) for r in ["user", "login", "email"]]
|
|
529
|
-
pass_keywords = [re.compile(r, re.I) for r in ["pass"]]
|
|
530
|
-
|
|
531
|
-
def is_login_page(self, html):
|
|
532
|
-
"""
|
|
533
|
-
TODO: convert this into an excavate YARA rule
|
|
534
|
-
|
|
535
|
-
Determines if the provided HTML content contains a login page.
|
|
536
|
-
|
|
537
|
-
This function parses the HTML to search for forms with input fields typically used for
|
|
538
|
-
authentication. If it identifies password fields or a combination of username and password
|
|
539
|
-
fields, it returns True.
|
|
540
|
-
|
|
541
|
-
Args:
|
|
542
|
-
html (str): The HTML content to analyze.
|
|
543
|
-
|
|
544
|
-
Returns:
|
|
545
|
-
bool: True if the HTML contains a login page, otherwise False.
|
|
546
|
-
|
|
547
|
-
Examples:
|
|
548
|
-
>>> is_login_page('<form><input type="text" name="username"><input type="password" name="password"></form>')
|
|
549
|
-
True
|
|
550
|
-
|
|
551
|
-
>>> is_login_page('<form><input type="text" name="search"></form>')
|
|
552
|
-
False
|
|
553
|
-
"""
|
|
554
|
-
try:
|
|
555
|
-
soup = BeautifulSoup(html, "html.parser")
|
|
556
|
-
except Exception as e:
|
|
557
|
-
log.debug(f"Error parsing html: {e}")
|
|
558
|
-
return False
|
|
559
|
-
|
|
560
|
-
forms = soup.find_all("form")
|
|
561
|
-
|
|
562
|
-
# first, check for obvious password fields
|
|
563
|
-
for form in forms:
|
|
564
|
-
if form.find_all("input", {"type": "password"}):
|
|
565
|
-
return True
|
|
566
|
-
|
|
567
|
-
# next, check for forms that have both a user-like and password-like field
|
|
568
|
-
for form in forms:
|
|
569
|
-
user_fields = sum(bool(form.find_all("input", {"name": r})) for r in self.user_keywords)
|
|
570
|
-
pass_fields = sum(bool(form.find_all("input", {"name": r})) for r in self.pass_keywords)
|
|
571
|
-
if user_fields and pass_fields:
|
|
572
|
-
return True
|
|
573
|
-
return False
|
|
574
|
-
|
|
575
465
|
def response_to_json(self, response):
|
|
576
466
|
"""
|
|
577
467
|
Convert web response to JSON object, similar to the output of `httpx -irr -json`
|
bbot/core/helpers/wordcloud.py
CHANGED
|
@@ -111,7 +111,7 @@ class WordCloud(dict):
|
|
|
111
111
|
results = set()
|
|
112
112
|
for word in words:
|
|
113
113
|
h = hash(word)
|
|
114
|
-
if not
|
|
114
|
+
if h not in results:
|
|
115
115
|
results.add(h)
|
|
116
116
|
yield (word,)
|
|
117
117
|
if numbers > 0:
|
|
@@ -119,7 +119,7 @@ class WordCloud(dict):
|
|
|
119
119
|
for word in words:
|
|
120
120
|
for number_mutation in self.get_number_mutations(word, n=numbers, padding=number_padding):
|
|
121
121
|
h = hash(number_mutation)
|
|
122
|
-
if not
|
|
122
|
+
if h not in results:
|
|
123
123
|
results.add(h)
|
|
124
124
|
yield (number_mutation,)
|
|
125
125
|
for word in words:
|
|
@@ -322,7 +322,7 @@ class WordCloud(dict):
|
|
|
322
322
|
|
|
323
323
|
@property
|
|
324
324
|
def default_filename(self):
|
|
325
|
-
return self.parent_helper.preset.scan.home /
|
|
325
|
+
return self.parent_helper.preset.scan.home / "wordcloud.tsv"
|
|
326
326
|
|
|
327
327
|
def save(self, filename=None, limit=None):
|
|
328
328
|
"""
|
|
@@ -357,7 +357,7 @@ class WordCloud(dict):
|
|
|
357
357
|
log.debug(f"Saved word cloud ({len(self):,} words) to {filename}")
|
|
358
358
|
return True, filename
|
|
359
359
|
else:
|
|
360
|
-
log.debug(
|
|
360
|
+
log.debug("No words to save")
|
|
361
361
|
except Exception as e:
|
|
362
362
|
import traceback
|
|
363
363
|
|
|
@@ -421,7 +421,7 @@ class Mutator(dict):
|
|
|
421
421
|
def mutate(self, word, max_mutations=None, mutations=None):
|
|
422
422
|
if mutations is None:
|
|
423
423
|
mutations = self.top_mutations(max_mutations)
|
|
424
|
-
for mutation
|
|
424
|
+
for mutation in mutations.keys():
|
|
425
425
|
ret = []
|
|
426
426
|
for s in mutation:
|
|
427
427
|
if s is not None:
|
bbot/core/modules.py
CHANGED
|
@@ -153,7 +153,7 @@ class ModuleLoader:
|
|
|
153
153
|
else:
|
|
154
154
|
log.debug(f"Preloading {module_name} from disk")
|
|
155
155
|
if module_dir.name == "modules":
|
|
156
|
-
namespace =
|
|
156
|
+
namespace = "bbot.modules"
|
|
157
157
|
else:
|
|
158
158
|
namespace = f"bbot.modules.{module_dir.name}"
|
|
159
159
|
try:
|
|
@@ -235,7 +235,7 @@ class ModuleLoader:
|
|
|
235
235
|
return self.__preloaded
|
|
236
236
|
|
|
237
237
|
def get_recursive_dirs(self, *dirs):
|
|
238
|
-
dirs =
|
|
238
|
+
dirs = {Path(d).resolve() for d in dirs}
|
|
239
239
|
for d in list(dirs):
|
|
240
240
|
if not d.is_dir():
|
|
241
241
|
continue
|
|
@@ -337,77 +337,77 @@ class ModuleLoader:
|
|
|
337
337
|
# look for classes
|
|
338
338
|
if type(root_element) == ast.ClassDef:
|
|
339
339
|
for class_attr in root_element.body:
|
|
340
|
-
|
|
341
340
|
# class attributes that are dictionaries
|
|
342
341
|
if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict:
|
|
343
342
|
# module options
|
|
344
|
-
if any(
|
|
343
|
+
if any(target.id == "options" for target in class_attr.targets):
|
|
345
344
|
config.update(ast.literal_eval(class_attr.value))
|
|
346
345
|
# module options
|
|
347
|
-
elif any(
|
|
346
|
+
elif any(target.id == "options_desc" for target in class_attr.targets):
|
|
348
347
|
options_desc.update(ast.literal_eval(class_attr.value))
|
|
349
348
|
# module metadata
|
|
350
|
-
elif any(
|
|
349
|
+
elif any(target.id == "meta" for target in class_attr.targets):
|
|
351
350
|
meta = ast.literal_eval(class_attr.value)
|
|
352
351
|
|
|
353
352
|
# class attributes that are lists
|
|
354
353
|
if type(class_attr) == ast.Assign and type(class_attr.value) == ast.List:
|
|
355
354
|
# flags
|
|
356
|
-
if any(
|
|
355
|
+
if any(target.id == "flags" for target in class_attr.targets):
|
|
357
356
|
for flag in class_attr.value.elts:
|
|
358
357
|
if type(flag.value) == str:
|
|
359
358
|
flags.add(flag.value)
|
|
360
359
|
# watched events
|
|
361
|
-
elif any(
|
|
360
|
+
elif any(target.id == "watched_events" for target in class_attr.targets):
|
|
362
361
|
for event_type in class_attr.value.elts:
|
|
363
362
|
if type(event_type.value) == str:
|
|
364
363
|
watched_events.add(event_type.value)
|
|
365
364
|
# produced events
|
|
366
|
-
elif any(
|
|
365
|
+
elif any(target.id == "produced_events" for target in class_attr.targets):
|
|
367
366
|
for event_type in class_attr.value.elts:
|
|
368
367
|
if type(event_type.value) == str:
|
|
369
368
|
produced_events.add(event_type.value)
|
|
370
369
|
|
|
371
370
|
# bbot module dependencies
|
|
372
|
-
elif any(
|
|
371
|
+
elif any(target.id == "deps_modules" for target in class_attr.targets):
|
|
373
372
|
for dep_module in class_attr.value.elts:
|
|
374
373
|
if type(dep_module.value) == str:
|
|
375
374
|
deps_modules.add(dep_module.value)
|
|
376
375
|
# python dependencies
|
|
377
|
-
elif any(
|
|
376
|
+
elif any(target.id == "deps_pip" for target in class_attr.targets):
|
|
378
377
|
for dep_pip in class_attr.value.elts:
|
|
379
378
|
if type(dep_pip.value) == str:
|
|
380
379
|
deps_pip.append(dep_pip.value)
|
|
381
|
-
elif any(
|
|
380
|
+
elif any(target.id == "deps_pip_constraints" for target in class_attr.targets):
|
|
382
381
|
for dep_pip in class_attr.value.elts:
|
|
383
382
|
if type(dep_pip.value) == str:
|
|
384
383
|
deps_pip_constraints.append(dep_pip.value)
|
|
385
384
|
# apt dependencies
|
|
386
|
-
elif any(
|
|
385
|
+
elif any(target.id == "deps_apt" for target in class_attr.targets):
|
|
387
386
|
for dep_apt in class_attr.value.elts:
|
|
388
387
|
if type(dep_apt.value) == str:
|
|
389
388
|
deps_apt.append(dep_apt.value)
|
|
390
389
|
# bash dependencies
|
|
391
|
-
elif any(
|
|
390
|
+
elif any(target.id == "deps_shell" for target in class_attr.targets):
|
|
392
391
|
for dep_shell in class_attr.value.elts:
|
|
393
392
|
deps_shell.append(ast.literal_eval(dep_shell))
|
|
394
393
|
# ansible playbook
|
|
395
|
-
elif any(
|
|
394
|
+
elif any(target.id == "deps_ansible" for target in class_attr.targets):
|
|
396
395
|
ansible_tasks = ast.literal_eval(class_attr.value)
|
|
397
396
|
# shared/common module dependencies
|
|
398
|
-
elif any(
|
|
397
|
+
elif any(target.id == "deps_common" for target in class_attr.targets):
|
|
399
398
|
for dep_common in class_attr.value.elts:
|
|
400
399
|
if type(dep_common.value) == str:
|
|
401
400
|
deps_common.append(dep_common.value)
|
|
402
401
|
|
|
403
402
|
for task in ansible_tasks:
|
|
404
|
-
if
|
|
403
|
+
if "become" not in task:
|
|
405
404
|
task["become"] = False
|
|
406
405
|
# don't sudo brew
|
|
407
|
-
elif os_platform() == "darwin" and ("package" in task and task.get("become", False)
|
|
406
|
+
elif os_platform() == "darwin" and ("package" in task and task.get("become", False) is True):
|
|
408
407
|
task["become"] = False
|
|
409
408
|
|
|
410
409
|
preloaded_data = {
|
|
410
|
+
"path": str(module_file.resolve()),
|
|
411
411
|
"watched_events": sorted(watched_events),
|
|
412
412
|
"produced_events": sorted(produced_events),
|
|
413
413
|
"flags": sorted(flags),
|
|
@@ -436,8 +436,8 @@ class ModuleLoader:
|
|
|
436
436
|
f'Error while preloading module "{module_file}": No shared dependency named "{dep_common}" (choices: {common_choices})'
|
|
437
437
|
)
|
|
438
438
|
for ansible_task in ansible_task_list:
|
|
439
|
-
if any(x
|
|
440
|
-
x
|
|
439
|
+
if any(x is True for x in search_dict_by_key("become", ansible_task)) or any(
|
|
440
|
+
x is True for x in search_dict_by_key("ansible_become", ansible_tasks)
|
|
441
441
|
):
|
|
442
442
|
preloaded_data["sudo"] = True
|
|
443
443
|
return preloaded_data
|
|
@@ -467,14 +467,23 @@ class ModuleLoader:
|
|
|
467
467
|
>>> isinstance(module, object)
|
|
468
468
|
True
|
|
469
469
|
"""
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
470
|
+
preloaded = self._preloaded[module_name]
|
|
471
|
+
namespace = preloaded["namespace"]
|
|
472
|
+
try:
|
|
473
|
+
module_path = preloaded["path"]
|
|
474
|
+
except KeyError:
|
|
475
|
+
module_path = preloaded["cache_key"][0]
|
|
476
|
+
full_namespace = f"{namespace}.{module_name}"
|
|
477
|
+
|
|
478
|
+
spec = importlib.util.spec_from_file_location(full_namespace, module_path)
|
|
479
|
+
module = importlib.util.module_from_spec(spec)
|
|
480
|
+
sys.modules[full_namespace] = module
|
|
481
|
+
spec.loader.exec_module(module)
|
|
473
482
|
|
|
474
483
|
# for every top-level variable in the .py file
|
|
475
|
-
for variable in
|
|
484
|
+
for variable in module.__dict__.keys():
|
|
476
485
|
# get its value
|
|
477
|
-
value = getattr(
|
|
486
|
+
value = getattr(module, variable)
|
|
478
487
|
with suppress(AttributeError):
|
|
479
488
|
# if it has watched_events and produced_events
|
|
480
489
|
if all(
|
|
@@ -531,7 +540,7 @@ class ModuleLoader:
|
|
|
531
540
|
with suppress(KeyError):
|
|
532
541
|
choices.remove(modname)
|
|
533
542
|
if event_type not in resolve_choices:
|
|
534
|
-
resolve_choices[event_type] =
|
|
543
|
+
resolve_choices[event_type] = {}
|
|
535
544
|
deps = resolve_choices[event_type]
|
|
536
545
|
self.add_or_create(deps, "required_by", modname)
|
|
537
546
|
for c in choices:
|
|
@@ -630,7 +639,7 @@ class ModuleLoader:
|
|
|
630
639
|
def modules_options_table(self, modules=None, mod_type=None):
|
|
631
640
|
table = []
|
|
632
641
|
header = ["Config Option", "Type", "Description", "Default"]
|
|
633
|
-
for
|
|
642
|
+
for module_options in self.modules_options(modules, mod_type).values():
|
|
634
643
|
table += module_options
|
|
635
644
|
return make_table(table, header)
|
|
636
645
|
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import atexit
|
|
3
|
+
from contextlib import suppress
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class SharedInterpreterState:
|
|
7
|
+
"""
|
|
8
|
+
A class to track the primary BBOT process.
|
|
9
|
+
|
|
10
|
+
Used to prevent spawning multiple unwanted processes with multiprocessing.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.main_process_var_name = "_BBOT_MAIN_PID"
|
|
15
|
+
self.scan_process_var_name = "_BBOT_SCAN_PID"
|
|
16
|
+
atexit.register(self.cleanup)
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def is_main_process(self):
|
|
20
|
+
is_main_process = self.main_pid == os.getpid()
|
|
21
|
+
return is_main_process
|
|
22
|
+
|
|
23
|
+
@property
|
|
24
|
+
def is_scan_process(self):
|
|
25
|
+
is_scan_process = os.getpid() == self.scan_pid
|
|
26
|
+
return is_scan_process
|
|
27
|
+
|
|
28
|
+
@property
|
|
29
|
+
def main_pid(self):
|
|
30
|
+
main_pid = int(os.environ.get(self.main_process_var_name, 0))
|
|
31
|
+
if main_pid == 0:
|
|
32
|
+
main_pid = os.getpid()
|
|
33
|
+
# if main PID is not set, set it to the current PID
|
|
34
|
+
os.environ[self.main_process_var_name] = str(main_pid)
|
|
35
|
+
return main_pid
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def scan_pid(self):
|
|
39
|
+
scan_pid = int(os.environ.get(self.scan_process_var_name, 0))
|
|
40
|
+
if scan_pid == 0:
|
|
41
|
+
scan_pid = os.getpid()
|
|
42
|
+
# if scan PID is not set, set it to the current PID
|
|
43
|
+
os.environ[self.scan_process_var_name] = str(scan_pid)
|
|
44
|
+
return scan_pid
|
|
45
|
+
|
|
46
|
+
def update_scan_pid(self):
|
|
47
|
+
os.environ[self.scan_process_var_name] = str(os.getpid())
|
|
48
|
+
|
|
49
|
+
def cleanup(self):
|
|
50
|
+
with suppress(Exception):
|
|
51
|
+
if self.is_main_process:
|
|
52
|
+
with suppress(KeyError):
|
|
53
|
+
del os.environ[self.main_process_var_name]
|
|
54
|
+
with suppress(KeyError):
|
|
55
|
+
del os.environ[self.scan_process_var_name]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
SHARED_INTERPRETER_STATE = SharedInterpreterState()
|