secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
secator/tasks/grype.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import click
|
|
2
|
+
|
|
3
|
+
from secator.config import CONFIG
|
|
4
|
+
from secator.decorators import task
|
|
5
|
+
from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER,
|
|
6
|
+
OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
|
|
7
|
+
THREADS, TIMEOUT, USER_AGENT, PATH, STRING)
|
|
8
|
+
from secator.output_types import Vulnerability
|
|
9
|
+
from secator.tasks._categories import VulnCode
|
|
10
|
+
|
|
11
|
+
GRYPE_MODES = [
|
|
12
|
+
'git',
|
|
13
|
+
'github',
|
|
14
|
+
'gitlab',
|
|
15
|
+
's3',
|
|
16
|
+
'filesystem',
|
|
17
|
+
'gcs',
|
|
18
|
+
'docker',
|
|
19
|
+
'postman',
|
|
20
|
+
'jenkins',
|
|
21
|
+
'elasticsearch',
|
|
22
|
+
'huggingface',
|
|
23
|
+
'syslog',
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def convert_mode(mode):
|
|
28
|
+
return 'fs' if mode == 'filesystem' else 'repo' if mode == 'git' else mode
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@task()
|
|
32
|
+
class grype(VulnCode):
|
|
33
|
+
"""Vulnerability scanner for container images and filesystems."""
|
|
34
|
+
cmd = 'grype --quiet'
|
|
35
|
+
input_types = [PATH, STRING]
|
|
36
|
+
output_types = [Vulnerability]
|
|
37
|
+
tags = ['vuln', 'scan']
|
|
38
|
+
input_flag = ''
|
|
39
|
+
input_chunk_size = 1
|
|
40
|
+
file_flag = None
|
|
41
|
+
json_flag = None
|
|
42
|
+
opt_prefix = '--'
|
|
43
|
+
opts = {
|
|
44
|
+
'mode': {'type': click.Choice(GRYPE_MODES), 'help': f'Scan mode ({", ".join(GRYPE_MODES)})', 'internal': True}
|
|
45
|
+
}
|
|
46
|
+
opt_key_value = {
|
|
47
|
+
'mode': lambda x: convert_mode(x)
|
|
48
|
+
}
|
|
49
|
+
opt_key_map = {
|
|
50
|
+
HEADER: OPT_NOT_SUPPORTED,
|
|
51
|
+
DELAY: OPT_NOT_SUPPORTED,
|
|
52
|
+
FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
|
|
53
|
+
PROXY: OPT_NOT_SUPPORTED,
|
|
54
|
+
RATE_LIMIT: OPT_NOT_SUPPORTED,
|
|
55
|
+
RETRIES: OPT_NOT_SUPPORTED,
|
|
56
|
+
THREADS: OPT_NOT_SUPPORTED,
|
|
57
|
+
TIMEOUT: OPT_NOT_SUPPORTED,
|
|
58
|
+
USER_AGENT: OPT_NOT_SUPPORTED
|
|
59
|
+
}
|
|
60
|
+
install_version = 'v0.91.2'
|
|
61
|
+
install_cmd_pre = {'*': ['curl']}
|
|
62
|
+
install_cmd = (
|
|
63
|
+
f'curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b {CONFIG.dirs.bin}'
|
|
64
|
+
)
|
|
65
|
+
github_handle = 'anchore/grype'
|
|
66
|
+
|
|
67
|
+
@staticmethod
|
|
68
|
+
def item_loader(self, line):
|
|
69
|
+
"""Load vulnerabilty dicts from grype line output."""
|
|
70
|
+
split = [i for i in line.split(' ') if i]
|
|
71
|
+
if len(split) not in [5, 6] or split[0] == 'NAME':
|
|
72
|
+
return
|
|
73
|
+
versions_fixed = None
|
|
74
|
+
if len(split) == 5: # no version fixed
|
|
75
|
+
product, version, product_type, vuln_id, severity = tuple(split)
|
|
76
|
+
elif len(split) == 6:
|
|
77
|
+
product, version, versions_fixed, product_type, vuln_id, severity = tuple(split)
|
|
78
|
+
extra_data = {
|
|
79
|
+
'lang': product_type,
|
|
80
|
+
'product': product,
|
|
81
|
+
'version': version,
|
|
82
|
+
}
|
|
83
|
+
if versions_fixed:
|
|
84
|
+
extra_data['versions_fixed'] = [c.strip() for c in versions_fixed.split(', ')]
|
|
85
|
+
data = {
|
|
86
|
+
'id': vuln_id,
|
|
87
|
+
'name': vuln_id,
|
|
88
|
+
'matched_at': self.inputs[0],
|
|
89
|
+
'confidence': 'medium',
|
|
90
|
+
'severity': severity.lower(),
|
|
91
|
+
'provider': 'grype',
|
|
92
|
+
'cvss_score': -1,
|
|
93
|
+
'tags': [],
|
|
94
|
+
}
|
|
95
|
+
if vuln_id.startswith('GHSA'):
|
|
96
|
+
data['provider'] = 'github.com'
|
|
97
|
+
data['references'] = [f'https://github.com/advisories/{vuln_id}']
|
|
98
|
+
vuln = VulnCode.lookup_cve_from_ghsa(vuln_id)
|
|
99
|
+
if vuln:
|
|
100
|
+
data.update(vuln)
|
|
101
|
+
data['severity'] = data['severity'] or severity.lower()
|
|
102
|
+
extra_data['ghsa_id'] = vuln_id
|
|
103
|
+
elif vuln_id.startswith('CVE'):
|
|
104
|
+
vuln = VulnCode.lookup_cve(vuln_id)
|
|
105
|
+
if vuln:
|
|
106
|
+
data.update(vuln)
|
|
107
|
+
data['severity'] = data['severity'] or severity.lower()
|
|
108
|
+
data['extra_data'] = extra_data
|
|
109
|
+
yield data
|
secator/tasks/h8mail.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import shlex
|
|
4
|
+
|
|
5
|
+
from secator.decorators import task
|
|
6
|
+
from secator.definitions import EMAIL, OUTPUT_PATH
|
|
7
|
+
from secator.tasks._categories import OSInt
|
|
8
|
+
from secator.output_types import UserAccount, Info, Error
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@task()
|
|
12
|
+
class h8mail(OSInt):
|
|
13
|
+
"""Email information and password lookup tool."""
|
|
14
|
+
cmd = 'h8mail'
|
|
15
|
+
input_types = [EMAIL]
|
|
16
|
+
output_types = [UserAccount]
|
|
17
|
+
tags = ['user', 'recon', 'email']
|
|
18
|
+
json_flag = '--json'
|
|
19
|
+
input_flag = '--targets'
|
|
20
|
+
file_flag = '-domain'
|
|
21
|
+
version_flag = '--help'
|
|
22
|
+
opt_prefix = '--'
|
|
23
|
+
opts = {
|
|
24
|
+
'config': {'type': str, 'help': 'Configuration file for API keys'},
|
|
25
|
+
'local_breach': {'type': str, 'short': 'lb', 'help': 'Local breach file'}
|
|
26
|
+
}
|
|
27
|
+
install_version = '2.5.6'
|
|
28
|
+
install_cmd = 'pipx install h8mail==[install_version] --force'
|
|
29
|
+
|
|
30
|
+
@staticmethod
|
|
31
|
+
def on_start(self):
|
|
32
|
+
output_path = self.get_opt_value(OUTPUT_PATH)
|
|
33
|
+
if not output_path:
|
|
34
|
+
output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
35
|
+
self.output_path = output_path
|
|
36
|
+
output_path_quoted = shlex.quote(self.output_path)
|
|
37
|
+
self.cmd = self.cmd.replace('--json ', f'--json {output_path_quoted} ')
|
|
38
|
+
|
|
39
|
+
@staticmethod
|
|
40
|
+
def on_cmd_done(self):
|
|
41
|
+
if not os.path.exists(self.output_path):
|
|
42
|
+
yield Error(message=f'Could not find JSON results in {self.output_path}')
|
|
43
|
+
return
|
|
44
|
+
|
|
45
|
+
yield Info(message=f'JSON results saved to {self.output_path}')
|
|
46
|
+
with open(self.output_path, 'r') as f:
|
|
47
|
+
data = json.load(f)
|
|
48
|
+
|
|
49
|
+
targets = data['targets']
|
|
50
|
+
for target in targets:
|
|
51
|
+
email = target['target']
|
|
52
|
+
target_data = target.get('data', [])
|
|
53
|
+
pwn_num = target['pwn_num']
|
|
54
|
+
if not pwn_num > 0:
|
|
55
|
+
continue
|
|
56
|
+
if len(target_data) > 0:
|
|
57
|
+
entries = target_data[0]
|
|
58
|
+
for entry in entries:
|
|
59
|
+
source, site_name = tuple(entry.split(':'))
|
|
60
|
+
yield UserAccount(**{
|
|
61
|
+
"site_name": site_name,
|
|
62
|
+
"username": email.split('@')[0],
|
|
63
|
+
"email": email,
|
|
64
|
+
"extra_data": {
|
|
65
|
+
'source': source
|
|
66
|
+
},
|
|
67
|
+
})
|
|
68
|
+
else:
|
|
69
|
+
yield UserAccount(**{
|
|
70
|
+
"username": email.split('@')[0],
|
|
71
|
+
"email": email,
|
|
72
|
+
"extra_data": {
|
|
73
|
+
'source': self.get_opt_value('local_breach')
|
|
74
|
+
},
|
|
75
|
+
})
|
secator/tasks/httpx.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shlex
|
|
3
|
+
|
|
4
|
+
from secator.decorators import task
|
|
5
|
+
from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
|
|
6
|
+
HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
|
|
7
|
+
PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, URL, USER_AGENT, HOST, IP, HOST_PORT)
|
|
8
|
+
from secator.config import CONFIG
|
|
9
|
+
from secator.output_types import Url, Subdomain
|
|
10
|
+
from secator.serializers import JSONSerializer
|
|
11
|
+
from secator.tasks._categories import Http
|
|
12
|
+
from secator.utils import (sanitize_url, extract_domain_info, extract_subdomains_from_fqdn)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@task()
|
|
16
|
+
class httpx(Http):
|
|
17
|
+
"""Fast and multi-purpose HTTP toolkit."""
|
|
18
|
+
cmd = 'httpx -irh'
|
|
19
|
+
input_types = [HOST, HOST_PORT, IP, URL]
|
|
20
|
+
output_types = [Url, Subdomain]
|
|
21
|
+
tags = ['url', 'probe']
|
|
22
|
+
file_flag = '-l'
|
|
23
|
+
input_flag = '-u'
|
|
24
|
+
json_flag = '-json'
|
|
25
|
+
opts = {
|
|
26
|
+
# 'silent': {'is_flag': True, 'default': False, 'help': 'Silent mode'},
|
|
27
|
+
# 'irr': {'is_flag': True, 'default': False, 'help': 'Include http request / response'},
|
|
28
|
+
'fep': {'is_flag': True, 'default': False, 'help': 'Error Page Classifier and Filtering'},
|
|
29
|
+
'favicon': {'is_flag': True, 'default': False, 'help': 'Favicon hash'},
|
|
30
|
+
'jarm': {'is_flag': True, 'default': False, 'help': 'Jarm fingerprint'},
|
|
31
|
+
'asn': {'is_flag': True, 'default': False, 'help': 'ASN detection'},
|
|
32
|
+
'cdn': {'is_flag': True, 'default': False, 'help': 'CDN detection'},
|
|
33
|
+
'debug_resp': {'is_flag': True, 'default': False, 'help': 'Debug response'},
|
|
34
|
+
'vhost': {'is_flag': True, 'default': False, 'help': 'Probe and display server supporting VHOST'},
|
|
35
|
+
'store_responses': {'is_flag': True, 'short': 'sr', 'default': CONFIG.http.store_responses, 'help': 'Save HTTP responses'}, # noqa: E501
|
|
36
|
+
'screenshot': {'is_flag': True, 'short': 'ss', 'default': False, 'help': 'Screenshot response'},
|
|
37
|
+
'system_chrome': {'is_flag': True, 'default': False, 'help': 'Use local installed Chrome for screenshot'},
|
|
38
|
+
'headless_options': {'is_flag': False, 'short': 'ho', 'default': None, 'help': 'Headless Chrome additional options'},
|
|
39
|
+
'follow_host_redirects': {'is_flag': True, 'short': 'fhr', 'default': None, 'help': 'Follow redirects on the same host'}, # noqa: E501
|
|
40
|
+
'tech_detect': {'is_flag': True, 'short': 'td', 'default': False, 'help': 'Tech detection'},
|
|
41
|
+
'tls_grab': {'is_flag': True, 'short': 'tlsg', 'default': False, 'help': 'Grab some informations from the tls certificate'}, # noqa: E501
|
|
42
|
+
'rstr': {'type': int, 'default': CONFIG.http.response_max_size_bytes, 'help': 'Max body size to read (bytes)'},
|
|
43
|
+
'rsts': {'type': int, 'default': CONFIG.http.response_max_size_bytes, 'help': 'Max body size to save (bytes)'}
|
|
44
|
+
}
|
|
45
|
+
opt_key_map = {
|
|
46
|
+
HEADER: 'header',
|
|
47
|
+
DELAY: 'delay',
|
|
48
|
+
DEPTH: OPT_NOT_SUPPORTED,
|
|
49
|
+
FILTER_CODES: 'filter-code',
|
|
50
|
+
FILTER_REGEX: 'filter-regex',
|
|
51
|
+
FILTER_SIZE: 'filter-length',
|
|
52
|
+
FILTER_WORDS: 'filter-word-count',
|
|
53
|
+
FOLLOW_REDIRECT: 'follow-redirects',
|
|
54
|
+
MATCH_CODES: 'match-code',
|
|
55
|
+
MATCH_REGEX: 'match-regex',
|
|
56
|
+
MATCH_SIZE: 'match-length',
|
|
57
|
+
MATCH_WORDS: 'match-word-count',
|
|
58
|
+
METHOD: 'x',
|
|
59
|
+
PROXY: 'proxy',
|
|
60
|
+
RATE_LIMIT: 'rate-limit',
|
|
61
|
+
RETRIES: 'retries',
|
|
62
|
+
THREADS: 'threads',
|
|
63
|
+
TIMEOUT: 'timeout',
|
|
64
|
+
USER_AGENT: OPT_NOT_SUPPORTED,
|
|
65
|
+
'store_responses': 'sr',
|
|
66
|
+
}
|
|
67
|
+
opt_value_map = {
|
|
68
|
+
DELAY: lambda x: str(x) + 's' if x else None,
|
|
69
|
+
}
|
|
70
|
+
item_loaders = [JSONSerializer()]
|
|
71
|
+
install_pre = {'apk': ['chromium']}
|
|
72
|
+
install_version = 'v1.7.0'
|
|
73
|
+
install_cmd = 'go install -v github.com/projectdiscovery/httpx/cmd/httpx@[install_version]'
|
|
74
|
+
github_handle = 'projectdiscovery/httpx'
|
|
75
|
+
proxychains = False
|
|
76
|
+
proxy_socks5 = True
|
|
77
|
+
proxy_http = True
|
|
78
|
+
profile = 'io'
|
|
79
|
+
profile = lambda opts: httpx.dynamic_profile(opts) # noqa: E731
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
def dynamic_profile(opts):
|
|
83
|
+
screenshot = httpx._get_opt_value(
|
|
84
|
+
opts,
|
|
85
|
+
'screenshot',
|
|
86
|
+
opts_conf=dict(httpx.opts, **httpx.meta_opts),
|
|
87
|
+
opt_aliases=opts.get('aliases', [])
|
|
88
|
+
)
|
|
89
|
+
return 'cpu' if screenshot is True else 'io'
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def on_init(self):
|
|
93
|
+
debug_resp = self.get_opt_value('debug_resp')
|
|
94
|
+
if debug_resp:
|
|
95
|
+
self.cmd = self.cmd.replace('-silent', '')
|
|
96
|
+
screenshot = self.get_opt_value('screenshot')
|
|
97
|
+
store_responses = self.get_opt_value('store_responses')
|
|
98
|
+
if store_responses or screenshot:
|
|
99
|
+
reports_folder_outputs = f'{self.reports_folder}/.outputs'
|
|
100
|
+
self.cmd += f' -srd {shlex.quote(reports_folder_outputs)}'
|
|
101
|
+
if screenshot:
|
|
102
|
+
self.cmd += ' -esb -ehb'
|
|
103
|
+
self.domains = []
|
|
104
|
+
|
|
105
|
+
@staticmethod
|
|
106
|
+
def on_json_loaded(self, item):
|
|
107
|
+
item = self._preprocess_url(item)
|
|
108
|
+
yield item
|
|
109
|
+
tls = item.get('tls', None)
|
|
110
|
+
if tls:
|
|
111
|
+
subject_cn = tls.get('subject_cn', None)
|
|
112
|
+
subject_an = tls.get('subject_an', [])
|
|
113
|
+
cert_domains = subject_an
|
|
114
|
+
if subject_cn:
|
|
115
|
+
cert_domains.append(subject_cn)
|
|
116
|
+
for cert_domain in cert_domains:
|
|
117
|
+
subdomain = self._create_subdomain_from_tls_cert(cert_domain, item['url'])
|
|
118
|
+
if subdomain:
|
|
119
|
+
yield subdomain
|
|
120
|
+
|
|
121
|
+
@staticmethod
|
|
122
|
+
def on_end(self):
|
|
123
|
+
store_responses = self.get_opt_value('store_responses') or CONFIG.http.store_responses
|
|
124
|
+
response_dir = f'{self.reports_folder}/.outputs'
|
|
125
|
+
if store_responses:
|
|
126
|
+
index_rpath = f'{response_dir}/response/index.txt'
|
|
127
|
+
index_spath = f'{response_dir}/screenshot/index_screenshot.txt'
|
|
128
|
+
index_spath2 = f'{response_dir}/screenshot/screenshot.html'
|
|
129
|
+
if os.path.exists(index_rpath):
|
|
130
|
+
os.remove(index_rpath)
|
|
131
|
+
if os.path.exists(index_spath):
|
|
132
|
+
os.remove(index_spath)
|
|
133
|
+
if os.path.exists(index_spath2):
|
|
134
|
+
os.remove(index_spath2)
|
|
135
|
+
|
|
136
|
+
def _preprocess_url(self, item):
|
|
137
|
+
"""Replace time string by float, sanitize URL, get final redirect URL."""
|
|
138
|
+
for k, v in item.items():
|
|
139
|
+
if k == 'time':
|
|
140
|
+
response_time = float(''.join(ch for ch in v if not ch.isalpha()))
|
|
141
|
+
if v[-2:] == 'ms':
|
|
142
|
+
response_time = response_time / 1000
|
|
143
|
+
item[k] = response_time
|
|
144
|
+
elif k == URL:
|
|
145
|
+
item[k] = sanitize_url(v)
|
|
146
|
+
item[URL] = item.get('final_url') or item[URL]
|
|
147
|
+
item['request_headers'] = self.get_opt_value('header', preprocess=True)
|
|
148
|
+
item['response_headers'] = item.get('header', {})
|
|
149
|
+
return item
|
|
150
|
+
|
|
151
|
+
def _create_subdomain_from_tls_cert(self, domain, url):
|
|
152
|
+
"""Extract subdomains from TLS certificate."""
|
|
153
|
+
if domain.startswith('*.'):
|
|
154
|
+
domain = domain.lstrip('*.')
|
|
155
|
+
if domain in self.domains:
|
|
156
|
+
return None
|
|
157
|
+
url_domain = extract_domain_info(url)
|
|
158
|
+
url_domains = extract_subdomains_from_fqdn(url_domain.fqdn, url_domain.domain, url_domain.suffix)
|
|
159
|
+
if not url_domain or domain not in url_domains:
|
|
160
|
+
return None
|
|
161
|
+
self.domains.append(domain)
|
|
162
|
+
return Subdomain(
|
|
163
|
+
host=domain,
|
|
164
|
+
domain=extract_domain_info(domain, domain_only=True),
|
|
165
|
+
verified=True,
|
|
166
|
+
sources=['tls'],
|
|
167
|
+
)
|
secator/tasks/jswhois.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from secator.decorators import task
|
|
2
|
+
from secator.runners import Command
|
|
3
|
+
from secator.definitions import HOST
|
|
4
|
+
from secator.output_types import Tag
|
|
5
|
+
from secator.serializers import JSONSerializer
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@task()
|
|
9
|
+
class jswhois(Command):
|
|
10
|
+
"""WHOIS in JSON format"""
|
|
11
|
+
cmd = 'jswhois'
|
|
12
|
+
input_types = [HOST]
|
|
13
|
+
output_types = [Tag]
|
|
14
|
+
item_loaders = [JSONSerializer(list=True)]
|
|
15
|
+
tags = ['domain', 'info']
|
|
16
|
+
input_flag = None
|
|
17
|
+
file_flag = None
|
|
18
|
+
version_flag = '-V'
|
|
19
|
+
install_version = '69af013b99d49191c9674cde2e2b57986f6b6bf8'
|
|
20
|
+
install_cmd = 'go install -v github.com/jschauma/jswhois@[install_version]'
|
|
21
|
+
install_github_bin = False
|
|
22
|
+
github_handle = 'jschauma/jswhois'
|
|
23
|
+
|
|
24
|
+
@staticmethod
|
|
25
|
+
def on_json_loaded(self, item):
|
|
26
|
+
last_chain = item['chain'][-1]
|
|
27
|
+
last_elem = item[last_chain]
|
|
28
|
+
raw = last_elem.pop('raw')
|
|
29
|
+
tag = Tag(
|
|
30
|
+
name='whois',
|
|
31
|
+
category='info',
|
|
32
|
+
match=self.inputs[0],
|
|
33
|
+
value=raw,
|
|
34
|
+
extra_data={'chain': last_chain}
|
|
35
|
+
)
|
|
36
|
+
yield tag
|
secator/tasks/katana.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shlex
|
|
3
|
+
|
|
4
|
+
from urllib.parse import urlparse, urlunparse
|
|
5
|
+
|
|
6
|
+
from secator.decorators import task
|
|
7
|
+
from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS,
|
|
8
|
+
FOLLOW_REDIRECT, HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS,
|
|
9
|
+
METHOD, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, URL, USER_AGENT)
|
|
10
|
+
from secator.config import CONFIG
|
|
11
|
+
from secator.output_types import Url, Tag
|
|
12
|
+
from secator.serializers import JSONSerializer
|
|
13
|
+
from secator.tasks._categories import HttpCrawler
|
|
14
|
+
|
|
15
|
+
EXCLUDED_PARAMS = ['v']
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@task()
|
|
19
|
+
class katana(HttpCrawler):
|
|
20
|
+
"""Next-generation crawling and spidering framework."""
|
|
21
|
+
cmd = 'katana'
|
|
22
|
+
input_types = [URL]
|
|
23
|
+
output_types = [Url, Tag]
|
|
24
|
+
tags = ['url', 'crawl']
|
|
25
|
+
file_flag = '-list'
|
|
26
|
+
input_flag = '-u'
|
|
27
|
+
json_flag = '-jsonl'
|
|
28
|
+
opts = {
|
|
29
|
+
'headless': {'is_flag': True, 'short': 'hl', 'help': 'Headless mode'},
|
|
30
|
+
'system_chrome': {'is_flag': True, 'short': 'sc', 'help': 'Use local installed chrome browser'},
|
|
31
|
+
'form_extraction': {'is_flag': True, 'short': 'fx', 'help': 'Detect forms'},
|
|
32
|
+
'store_responses': {'is_flag': True, 'short': 'sr', 'default': CONFIG.http.store_responses, 'help': 'Store responses'}, # noqa: E501
|
|
33
|
+
'form_fill': {'is_flag': True, 'short': 'ff', 'help': 'Enable form filling'},
|
|
34
|
+
'js_crawl': {'is_flag': True, 'short': 'jc', 'default': False, 'help': 'Enable endpoint parsing / crawling in javascript file'}, # noqa: E501
|
|
35
|
+
'jsluice': {'is_flag': True, 'short': 'jsl', 'default': False, 'help': 'Enable jsluice parsing in javascript file (memory intensive)'}, # noqa: E501
|
|
36
|
+
'known_files': {'type': str, 'short': 'kf', 'default': 'all', 'help': 'Enable crawling of known files (all, robotstxt, sitemapxml)'}, # noqa: E501
|
|
37
|
+
'omit_raw': {'is_flag': True, 'short': 'or', 'default': True, 'help': 'Omit raw requests/responses from jsonl output'}, # noqa: E501
|
|
38
|
+
'omit_body': {'is_flag': True, 'short': 'ob', 'default': True, 'help': 'Omit response body from jsonl output'},
|
|
39
|
+
'no_sandbox': {'is_flag': True, 'short': 'ns', 'default': False, 'help': 'Disable sandboxing'},
|
|
40
|
+
}
|
|
41
|
+
opt_key_map = {
|
|
42
|
+
HEADER: 'headers',
|
|
43
|
+
DELAY: 'delay',
|
|
44
|
+
DEPTH: 'depth',
|
|
45
|
+
FILTER_CODES: OPT_NOT_SUPPORTED,
|
|
46
|
+
FILTER_REGEX: OPT_NOT_SUPPORTED,
|
|
47
|
+
FILTER_SIZE: OPT_NOT_SUPPORTED,
|
|
48
|
+
FILTER_WORDS: OPT_NOT_SUPPORTED,
|
|
49
|
+
FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
|
|
50
|
+
MATCH_CODES: OPT_NOT_SUPPORTED,
|
|
51
|
+
MATCH_REGEX: OPT_NOT_SUPPORTED,
|
|
52
|
+
MATCH_SIZE: OPT_NOT_SUPPORTED,
|
|
53
|
+
MATCH_WORDS: OPT_NOT_SUPPORTED,
|
|
54
|
+
METHOD: OPT_NOT_SUPPORTED,
|
|
55
|
+
PROXY: 'proxy',
|
|
56
|
+
RATE_LIMIT: 'rate-limit',
|
|
57
|
+
RETRIES: 'retry',
|
|
58
|
+
THREADS: 'concurrency',
|
|
59
|
+
TIMEOUT: 'timeout',
|
|
60
|
+
USER_AGENT: OPT_NOT_SUPPORTED,
|
|
61
|
+
'store_responses': 'sr',
|
|
62
|
+
'form_fill': 'aff'
|
|
63
|
+
}
|
|
64
|
+
opt_value_map = {
|
|
65
|
+
DELAY: lambda x: int(x) if isinstance(x, float) else x
|
|
66
|
+
}
|
|
67
|
+
item_loaders = [JSONSerializer()]
|
|
68
|
+
install_pre = {'apk': ['libc6-compat']}
|
|
69
|
+
install_version = 'v1.1.3'
|
|
70
|
+
install_cmd = 'go install -v github.com/projectdiscovery/katana/cmd/katana@[install_version]'
|
|
71
|
+
github_handle = 'projectdiscovery/katana'
|
|
72
|
+
proxychains = False
|
|
73
|
+
proxy_socks5 = True
|
|
74
|
+
proxy_http = True
|
|
75
|
+
profile = lambda opts: katana.dynamic_profile(opts) # noqa: E731
|
|
76
|
+
|
|
77
|
+
@staticmethod
|
|
78
|
+
def dynamic_profile(opts):
|
|
79
|
+
headless = katana._get_opt_value(
|
|
80
|
+
opts,
|
|
81
|
+
'headless',
|
|
82
|
+
opts_conf=dict(katana.opts, **katana.meta_opts),
|
|
83
|
+
opt_aliases=opts.get('aliases', [])
|
|
84
|
+
)
|
|
85
|
+
return 'cpu' if headless is True else 'io'
|
|
86
|
+
|
|
87
|
+
@staticmethod
|
|
88
|
+
def on_init(self):
|
|
89
|
+
form_fill = self.get_opt_value('form_fill')
|
|
90
|
+
form_extraction = self.get_opt_value('form_extraction')
|
|
91
|
+
store_responses = self.get_opt_value('store_responses')
|
|
92
|
+
if form_fill or form_extraction or store_responses:
|
|
93
|
+
reports_folder_outputs = f'{self.reports_folder}/.outputs'
|
|
94
|
+
self.cmd += f' -srd {shlex.quote(reports_folder_outputs)}'
|
|
95
|
+
self.tags = []
|
|
96
|
+
self.urls = []
|
|
97
|
+
|
|
98
|
+
@staticmethod
|
|
99
|
+
def on_json_loaded(self, item):
|
|
100
|
+
# form detection
|
|
101
|
+
response = item.get('response', {})
|
|
102
|
+
forms = response.get('forms', [])
|
|
103
|
+
parsed_url = urlparse(item['request']['endpoint'])
|
|
104
|
+
url_without_params = urlunparse(parsed_url._replace(query=''))
|
|
105
|
+
params = parsed_url.query.split('&')
|
|
106
|
+
if forms:
|
|
107
|
+
for form in forms:
|
|
108
|
+
method = form['method']
|
|
109
|
+
url = Url(
|
|
110
|
+
form['action'],
|
|
111
|
+
host=parsed_url.hostname,
|
|
112
|
+
method=method,
|
|
113
|
+
stored_response_path=response["stored_response_path"],
|
|
114
|
+
request_headers=self.get_opt_value('header', preprocess=True)
|
|
115
|
+
)
|
|
116
|
+
if url not in self.urls:
|
|
117
|
+
self.urls.append(url)
|
|
118
|
+
yield url
|
|
119
|
+
params = form.get('parameters', [])
|
|
120
|
+
yield Tag(
|
|
121
|
+
category='info',
|
|
122
|
+
name='form',
|
|
123
|
+
value=form['action'],
|
|
124
|
+
match=form['action'],
|
|
125
|
+
stored_response_path=response["stored_response_path"],
|
|
126
|
+
extra_data={
|
|
127
|
+
'method': form['method'],
|
|
128
|
+
'enctype': form.get('enctype', ''),
|
|
129
|
+
'parameters': params
|
|
130
|
+
}
|
|
131
|
+
)
|
|
132
|
+
for param in params:
|
|
133
|
+
yield Tag(
|
|
134
|
+
category='info',
|
|
135
|
+
name='url_param',
|
|
136
|
+
match=form['action'],
|
|
137
|
+
value=param,
|
|
138
|
+
extra_data={'url': url, 'value': 'FUZZ'}
|
|
139
|
+
)
|
|
140
|
+
response = item.get('response')
|
|
141
|
+
if not response:
|
|
142
|
+
return item
|
|
143
|
+
url = Url(
|
|
144
|
+
url=item['request']['endpoint'],
|
|
145
|
+
host=parsed_url.hostname,
|
|
146
|
+
method=item['request']['method'],
|
|
147
|
+
request_headers=self.get_opt_value('header', preprocess=True),
|
|
148
|
+
time=item['timestamp'],
|
|
149
|
+
status_code=item['response'].get('status_code'),
|
|
150
|
+
content_type=item['response'].get('headers', {}).get('content_type', ';').split(';')[0],
|
|
151
|
+
content_length=item['response'].get('headers', {}).get('content_length', 0),
|
|
152
|
+
webserver=item['response'].get('headers', {}).get('server', ''),
|
|
153
|
+
tech=item['response'].get('technologies', []),
|
|
154
|
+
stored_response_path=item['response'].get('stored_response_path', ''),
|
|
155
|
+
response_headers=item['response'].get('headers', {}),
|
|
156
|
+
)
|
|
157
|
+
if url not in self.urls:
|
|
158
|
+
self.urls.append(url)
|
|
159
|
+
yield url
|
|
160
|
+
for param in params:
|
|
161
|
+
if not param:
|
|
162
|
+
continue
|
|
163
|
+
split_param = param.split('=')
|
|
164
|
+
param_name = split_param[0]
|
|
165
|
+
param_value = None
|
|
166
|
+
if len(split_param) > 1:
|
|
167
|
+
param_value = split_param[1]
|
|
168
|
+
if param_name in EXCLUDED_PARAMS:
|
|
169
|
+
continue
|
|
170
|
+
tag = Tag(
|
|
171
|
+
category='info',
|
|
172
|
+
name='url_param',
|
|
173
|
+
value=param_name,
|
|
174
|
+
match=url_without_params,
|
|
175
|
+
extra_data={'value': param_value, 'url': item['request']['endpoint']}
|
|
176
|
+
)
|
|
177
|
+
if tag not in self.tags:
|
|
178
|
+
self.tags.append(tag)
|
|
179
|
+
yield tag
|
|
180
|
+
|
|
181
|
+
@staticmethod
|
|
182
|
+
def on_item(self, item):
|
|
183
|
+
if not isinstance(item, (Url, Tag)):
|
|
184
|
+
return item
|
|
185
|
+
store_responses = self.get_opt_value('store_responses')
|
|
186
|
+
if store_responses and os.path.exists(item.stored_response_path):
|
|
187
|
+
with open(item.stored_response_path, 'r', encoding='latin-1') as fin:
|
|
188
|
+
data = fin.read().splitlines(True)
|
|
189
|
+
if not data:
|
|
190
|
+
return item
|
|
191
|
+
first_line = data[0]
|
|
192
|
+
with open(item.stored_response_path, 'w', encoding='latin-1') as fout:
|
|
193
|
+
fout.writelines(data[1:])
|
|
194
|
+
fout.writelines('\n')
|
|
195
|
+
fout.writelines(first_line)
|
|
196
|
+
return item
|
|
197
|
+
|
|
198
|
+
@staticmethod
|
|
199
|
+
def on_end(self):
|
|
200
|
+
store_responses = self.get_opt_value('store_responses')
|
|
201
|
+
index_rpath = f'{self.reports_folder}/.outputs/index.txt'
|
|
202
|
+
if store_responses and os.path.exists(index_rpath):
|
|
203
|
+
os.remove(index_rpath)
|
secator/tasks/maigret.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
|
|
6
|
+
from secator.decorators import task
|
|
7
|
+
from secator.definitions import (DELAY, EXTRA_DATA, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY,
|
|
8
|
+
RATE_LIMIT, RETRIES, SITE_NAME, THREADS,
|
|
9
|
+
TIMEOUT, URL, STRING, SLUG)
|
|
10
|
+
from secator.output_types import UserAccount, Info, Error
|
|
11
|
+
from secator.tasks._categories import ReconUser
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@task()
|
|
17
|
+
class maigret(ReconUser):
|
|
18
|
+
"""Collect a dossier on a person by username."""
|
|
19
|
+
cmd = 'maigret'
|
|
20
|
+
input_types = [SLUG, STRING]
|
|
21
|
+
output_types = [UserAccount]
|
|
22
|
+
tags = ['user', 'recon', 'username']
|
|
23
|
+
file_flag = None
|
|
24
|
+
input_flag = None
|
|
25
|
+
json_flag = '--json ndjson'
|
|
26
|
+
opt_prefix = '--'
|
|
27
|
+
opts = {
|
|
28
|
+
'site': {'type': str, 'help': 'Sites to check'},
|
|
29
|
+
}
|
|
30
|
+
opt_key_map = {
|
|
31
|
+
DELAY: OPT_NOT_SUPPORTED,
|
|
32
|
+
PROXY: 'proxy',
|
|
33
|
+
RATE_LIMIT: OPT_NOT_SUPPORTED,
|
|
34
|
+
RETRIES: 'retries',
|
|
35
|
+
TIMEOUT: 'timeout',
|
|
36
|
+
THREADS: OPT_NOT_SUPPORTED
|
|
37
|
+
}
|
|
38
|
+
output_map = {
|
|
39
|
+
UserAccount: {
|
|
40
|
+
SITE_NAME: 'sitename',
|
|
41
|
+
URL: lambda x: x['status']['url'],
|
|
42
|
+
EXTRA_DATA: lambda x: x['status'].get('ids', {})
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
install_version = '0.5.0'
|
|
46
|
+
# install_pre = {
|
|
47
|
+
# 'apt': ['libcairo2-dev'],
|
|
48
|
+
# 'yum|zypper': ['cairo-devel'],
|
|
49
|
+
# '*': ['cairo']
|
|
50
|
+
# }
|
|
51
|
+
install_cmd = 'pipx install maigret==[install_version] --force'
|
|
52
|
+
socks5_proxy = True
|
|
53
|
+
profile = 'io'
|
|
54
|
+
|
|
55
|
+
@staticmethod
|
|
56
|
+
def on_init(self):
|
|
57
|
+
self.output_path = self.get_opt_value(OUTPUT_PATH)
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
def on_cmd_done(self):
|
|
61
|
+
# Search output path in cmd output
|
|
62
|
+
if not self.output_path:
|
|
63
|
+
matches = re.findall('JSON ndjson report for .* saved in (.*)', self.output)
|
|
64
|
+
if not matches:
|
|
65
|
+
yield Error(message='JSON output file not found in command output.')
|
|
66
|
+
return
|
|
67
|
+
self.output_path = matches
|
|
68
|
+
|
|
69
|
+
if not isinstance(self.output_path, list):
|
|
70
|
+
self.output_path = [self.output_path]
|
|
71
|
+
|
|
72
|
+
for path in self.output_path:
|
|
73
|
+
if not os.path.exists(path):
|
|
74
|
+
yield Error(message=f'Could not find JSON results in {path}')
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
yield Info(message=f'JSON results saved to {path}')
|
|
78
|
+
with open(path, 'r') as f:
|
|
79
|
+
data = [json.loads(line) for line in f.read().splitlines()]
|
|
80
|
+
for item in data:
|
|
81
|
+
yield item
|
|
82
|
+
|
|
83
|
+
@staticmethod
|
|
84
|
+
def validate_item(self, item):
|
|
85
|
+
if isinstance(item, dict):
|
|
86
|
+
return item['http_status'] == 200
|
|
87
|
+
return True
|