secator 0.0.1__py3-none-any.whl → 0.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/.gitignore +162 -0
- secator/celery.py +8 -68
- secator/cli.py +631 -274
- secator/decorators.py +42 -6
- secator/definitions.py +104 -33
- secator/exporters/csv.py +1 -2
- secator/exporters/gdrive.py +1 -1
- secator/exporters/json.py +1 -2
- secator/exporters/txt.py +1 -2
- secator/hooks/mongodb.py +12 -12
- secator/installer.py +335 -0
- secator/report.py +2 -14
- secator/rich.py +3 -10
- secator/runners/_base.py +106 -34
- secator/runners/_helpers.py +18 -17
- secator/runners/command.py +91 -55
- secator/runners/scan.py +3 -1
- secator/runners/task.py +6 -4
- secator/runners/workflow.py +13 -11
- secator/tasks/_categories.py +14 -19
- secator/tasks/cariddi.py +2 -1
- secator/tasks/dalfox.py +2 -0
- secator/tasks/dirsearch.py +5 -7
- secator/tasks/dnsx.py +1 -0
- secator/tasks/dnsxbrute.py +1 -0
- secator/tasks/feroxbuster.py +6 -7
- secator/tasks/ffuf.py +4 -7
- secator/tasks/gau.py +1 -4
- secator/tasks/gf.py +2 -1
- secator/tasks/gospider.py +1 -0
- secator/tasks/grype.py +47 -47
- secator/tasks/h8mail.py +5 -6
- secator/tasks/httpx.py +24 -18
- secator/tasks/katana.py +11 -15
- secator/tasks/maigret.py +3 -3
- secator/tasks/mapcidr.py +1 -0
- secator/tasks/msfconsole.py +3 -1
- secator/tasks/naabu.py +2 -1
- secator/tasks/nmap.py +14 -17
- secator/tasks/nuclei.py +4 -3
- secator/tasks/searchsploit.py +4 -2
- secator/tasks/subfinder.py +1 -0
- secator/tasks/wpscan.py +11 -13
- secator/utils.py +64 -82
- secator/utils_test.py +3 -2
- secator-0.3.5.dist-info/METADATA +411 -0
- secator-0.3.5.dist-info/RECORD +100 -0
- {secator-0.0.1.dist-info → secator-0.3.5.dist-info}/WHEEL +1 -2
- secator-0.0.1.dist-info/METADATA +0 -199
- secator-0.0.1.dist-info/RECORD +0 -114
- secator-0.0.1.dist-info/top_level.txt +0 -2
- tests/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/inputs.py +0 -42
- tests/integration/outputs.py +0 -392
- tests/integration/test_scans.py +0 -82
- tests/integration/test_tasks.py +0 -103
- tests/integration/test_workflows.py +0 -163
- tests/performance/__init__.py +0 -0
- tests/performance/loadtester.py +0 -56
- tests/unit/__init__.py +0 -0
- tests/unit/test_celery.py +0 -39
- tests/unit/test_scans.py +0 -0
- tests/unit/test_serializers.py +0 -51
- tests/unit/test_tasks.py +0 -348
- tests/unit/test_workflows.py +0 -96
- {secator-0.0.1.dist-info → secator-0.3.5.dist-info}/entry_points.txt +0 -0
- {secator-0.0.1.dist-info → secator-0.3.5.dist-info/licenses}/LICENSE +0 -0
secator/runners/task.py
CHANGED
|
@@ -24,7 +24,7 @@ class Task(Runner):
|
|
|
24
24
|
# Get task class
|
|
25
25
|
task_cls = Task.get_task_class(self.config.name)
|
|
26
26
|
|
|
27
|
-
#
|
|
27
|
+
# Run opts
|
|
28
28
|
run_opts = self.run_opts.copy()
|
|
29
29
|
run_opts.pop('output', None)
|
|
30
30
|
dry_run = run_opts.get('show', False)
|
|
@@ -39,7 +39,8 @@ class Task(Runner):
|
|
|
39
39
|
'print_input_file': DEBUG > 0,
|
|
40
40
|
'print_item': True,
|
|
41
41
|
'print_item_count': not self.sync and not dry_run,
|
|
42
|
-
'print_line':
|
|
42
|
+
'print_line': True
|
|
43
|
+
# 'print_line': self.sync and not self.output_quiet,
|
|
43
44
|
}
|
|
44
45
|
# self.print_item = not self.sync # enable print_item for base Task only if running remote
|
|
45
46
|
run_opts.update(fmt_opts)
|
|
@@ -51,6 +52,7 @@ class Task(Runner):
|
|
|
51
52
|
hooks = {task_cls: self.hooks}
|
|
52
53
|
run_opts['hooks'] = hooks
|
|
53
54
|
run_opts['context'] = self.context
|
|
55
|
+
run_opts['reports_folder'] = self.reports_folder
|
|
54
56
|
|
|
55
57
|
# Run task
|
|
56
58
|
if self.sync:
|
|
@@ -58,9 +60,9 @@ class Task(Runner):
|
|
|
58
60
|
if dry_run: # don't run
|
|
59
61
|
return
|
|
60
62
|
else:
|
|
61
|
-
|
|
63
|
+
self.celery_result = task_cls.delay(self.targets, **run_opts)
|
|
62
64
|
task = self.process_live_tasks(
|
|
63
|
-
|
|
65
|
+
self.celery_result,
|
|
64
66
|
description=False,
|
|
65
67
|
results_only=True,
|
|
66
68
|
print_remote_status=self.print_remote_status)
|
secator/runners/workflow.py
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
from celery import chain, chord
|
|
2
|
-
|
|
3
1
|
from secator.definitions import DEBUG
|
|
4
2
|
from secator.exporters import CsvExporter, JsonExporter
|
|
5
3
|
from secator.output_types import Target
|
|
@@ -33,32 +31,34 @@ class Workflow(Runner):
|
|
|
33
31
|
for target in self.targets:
|
|
34
32
|
yield Target(name=target, _source=self.config.name, _type='target', _context=self.context)
|
|
35
33
|
|
|
36
|
-
# Task
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
'json':
|
|
34
|
+
# Task opts
|
|
35
|
+
task_run_opts = self.run_opts.copy()
|
|
36
|
+
task_fmt_opts = {
|
|
37
|
+
'json': task_run_opts.get('json', False),
|
|
40
38
|
'print_cmd': True,
|
|
41
39
|
'print_cmd_prefix': not self.sync,
|
|
42
40
|
'print_description': self.sync,
|
|
43
|
-
'print_input_file': DEBUG,
|
|
41
|
+
'print_input_file': DEBUG > 0,
|
|
44
42
|
'print_item': True,
|
|
45
43
|
'print_item_count': True,
|
|
46
44
|
'print_line': not self.sync,
|
|
45
|
+
'print_progress': self.sync,
|
|
47
46
|
}
|
|
48
47
|
|
|
49
48
|
# Construct run opts
|
|
50
|
-
|
|
51
|
-
|
|
49
|
+
task_run_opts['hooks'] = self._hooks.get(Task, {})
|
|
50
|
+
task_run_opts['reports_folder'] = self.reports_folder
|
|
51
|
+
task_run_opts.update(task_fmt_opts)
|
|
52
52
|
|
|
53
53
|
# Build Celery workflow
|
|
54
|
-
workflow = self.build_celery_workflow(run_opts=
|
|
54
|
+
workflow = self.build_celery_workflow(run_opts=task_run_opts, results=self.results)
|
|
55
55
|
|
|
56
56
|
# Run Celery workflow and get results
|
|
57
57
|
if self.sync:
|
|
58
58
|
results = workflow.apply().get()
|
|
59
59
|
else:
|
|
60
60
|
result = workflow()
|
|
61
|
-
self.
|
|
61
|
+
self.celery_result = result
|
|
62
62
|
results = self.process_live_tasks(result, results_only=True, print_remote_status=self.print_remote_status)
|
|
63
63
|
|
|
64
64
|
# Get workflow results
|
|
@@ -70,6 +70,7 @@ class Workflow(Runner):
|
|
|
70
70
|
Returns:
|
|
71
71
|
celery.chain: Celery task chain.
|
|
72
72
|
"""
|
|
73
|
+
from celery import chain
|
|
73
74
|
from secator.celery import forward_results
|
|
74
75
|
sigs = self.get_tasks(
|
|
75
76
|
self.config.tasks.toDict(),
|
|
@@ -93,6 +94,7 @@ class Workflow(Runner):
|
|
|
93
94
|
Returns:
|
|
94
95
|
list: List of signatures.
|
|
95
96
|
"""
|
|
97
|
+
from celery import chain, chord
|
|
96
98
|
from secator.celery import forward_results
|
|
97
99
|
sigs = []
|
|
98
100
|
for task_name, task_opts in obj.items():
|
secator/tasks/_categories.py
CHANGED
|
@@ -6,17 +6,13 @@ import requests
|
|
|
6
6
|
from bs4 import BeautifulSoup
|
|
7
7
|
from cpe import CPE
|
|
8
8
|
|
|
9
|
-
from secator.definitions import (CIDR_RANGE, CONFIDENCE, CVSS_SCORE,
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
SEVERITY, TAGS, DATA_FOLDER, THREADS, TIMEOUT,
|
|
17
|
-
URL, USER_AGENT, USERNAME, WORDLIST)
|
|
18
|
-
from secator.output_types import (Ip, Port, Subdomain, Tag, Url, UserAccount,
|
|
19
|
-
Vulnerability)
|
|
9
|
+
from secator.definitions import (CIDR_RANGE, CONFIDENCE, CVSS_SCORE, DATA_FOLDER, DEFAULT_HTTP_WORDLIST,
|
|
10
|
+
DEFAULT_SKIP_CVE_SEARCH, DELAY, DEPTH, DESCRIPTION, FILTER_CODES, FILTER_REGEX,
|
|
11
|
+
FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID, MATCH_CODES, MATCH_REGEX,
|
|
12
|
+
MATCH_SIZE, MATCH_WORDS, METHOD, NAME, PATH, PROVIDER, PROXY, RATE_LIMIT, REFERENCES,
|
|
13
|
+
RETRIES, SEVERITY, TAGS, THREADS, TIMEOUT, URL, USER_AGENT, USERNAME, WORDLIST)
|
|
14
|
+
from secator.output_types import Ip, Port, Subdomain, Tag, Url, UserAccount, Vulnerability
|
|
15
|
+
from secator.rich import console
|
|
20
16
|
from secator.runners import Command
|
|
21
17
|
|
|
22
18
|
logger = logging.getLogger(__name__)
|
|
@@ -63,10 +59,6 @@ OPTS_VULN = [
|
|
|
63
59
|
HEADER, DELAY, FOLLOW_REDIRECT, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, USER_AGENT
|
|
64
60
|
]
|
|
65
61
|
|
|
66
|
-
OPTS_OSINT = [
|
|
67
|
-
|
|
68
|
-
]
|
|
69
|
-
|
|
70
62
|
|
|
71
63
|
#---------------#
|
|
72
64
|
# HTTP category #
|
|
@@ -159,14 +151,17 @@ class Vuln(Command):
|
|
|
159
151
|
"""
|
|
160
152
|
cve_info = Vuln.lookup_local_cve(cve_id)
|
|
161
153
|
if not cve_info:
|
|
162
|
-
|
|
154
|
+
if DEFAULT_SKIP_CVE_SEARCH:
|
|
155
|
+
logger.debug(f'{cve_id} not found locally, and DEFAULT_SKIP_CVE_SEARCH is set: ignoring online search.')
|
|
156
|
+
return None
|
|
157
|
+
# logger.debug(f'{cve_id} not found locally. Use `secator install cves` to install CVEs locally.')
|
|
163
158
|
try:
|
|
164
159
|
cve_info = requests.get(f'https://cve.circl.lu/api/cve/{cve_id}', timeout=5).json()
|
|
165
160
|
if not cve_info:
|
|
166
|
-
|
|
167
|
-
return
|
|
161
|
+
console.print(f'Could not fetch CVE info for cve {cve_id}. Skipping.', highlight=False)
|
|
162
|
+
return None
|
|
168
163
|
except Exception:
|
|
169
|
-
|
|
164
|
+
console.print(f'Could not fetch CVE info for cve {cve_id}. Skipping.', highlight=False)
|
|
170
165
|
return None
|
|
171
166
|
|
|
172
167
|
# Match the CPE string against the affected products CPE FS strings from the CVE data if a CPE was passed.
|
secator/tasks/cariddi.py
CHANGED
|
@@ -43,6 +43,7 @@ class cariddi(HttpCrawler):
|
|
|
43
43
|
}
|
|
44
44
|
item_loaders = []
|
|
45
45
|
install_cmd = 'go install -v github.com/edoardottt/cariddi/cmd/cariddi@latest'
|
|
46
|
+
install_github_handle = 'edoardottt/cariddi'
|
|
46
47
|
encoding = 'ansi'
|
|
47
48
|
proxychains = False
|
|
48
49
|
proxy_socks5 = True # with leaks... https://github.com/edoardottt/cariddi/issues/122
|
|
@@ -88,7 +89,7 @@ class cariddi(HttpCrawler):
|
|
|
88
89
|
items.append(secret)
|
|
89
90
|
|
|
90
91
|
for info in infos:
|
|
91
|
-
CARIDDI_IGNORE_LIST = ['BTC address']
|
|
92
|
+
CARIDDI_IGNORE_LIST = ['BTC address'] # TODO: make this a config option
|
|
92
93
|
if info['name'] in CARIDDI_IGNORE_LIST:
|
|
93
94
|
continue
|
|
94
95
|
match = info['match']
|
secator/tasks/dalfox.py
CHANGED
|
@@ -24,6 +24,7 @@ class dalfox(VulnHttp):
|
|
|
24
24
|
input_flag = 'url'
|
|
25
25
|
file_flag = 'file'
|
|
26
26
|
json_flag = '--format json'
|
|
27
|
+
version_flag = 'version'
|
|
27
28
|
opt_prefix = '--'
|
|
28
29
|
opt_key_map = {
|
|
29
30
|
HEADER: 'header',
|
|
@@ -52,6 +53,7 @@ class dalfox(VulnHttp):
|
|
|
52
53
|
}
|
|
53
54
|
}
|
|
54
55
|
install_cmd = 'go install -v github.com/hahwul/dalfox/v2@latest'
|
|
56
|
+
install_github_handle = 'hahwul/dalfox'
|
|
55
57
|
encoding = 'ansi'
|
|
56
58
|
proxychains = False
|
|
57
59
|
proxychains_flavor = 'proxychains4'
|
secator/tasks/dirsearch.py
CHANGED
|
@@ -7,12 +7,11 @@ from secator.definitions import (CONTENT_LENGTH, CONTENT_TYPE, DELAY, DEPTH,
|
|
|
7
7
|
FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
|
|
8
8
|
FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
|
|
9
9
|
MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
|
|
10
|
-
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
|
|
11
|
-
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
10
|
+
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY,
|
|
11
|
+
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
12
12
|
THREADS, TIMEOUT, USER_AGENT, WORDLIST)
|
|
13
13
|
from secator.output_types import Url
|
|
14
14
|
from secator.tasks._categories import HttpFuzzer
|
|
15
|
-
from secator.utils import get_file_timestamp
|
|
16
15
|
|
|
17
16
|
|
|
18
17
|
@task()
|
|
@@ -53,7 +52,7 @@ class dirsearch(HttpFuzzer):
|
|
|
53
52
|
STATUS_CODE: 'status'
|
|
54
53
|
}
|
|
55
54
|
}
|
|
56
|
-
install_cmd = '
|
|
55
|
+
install_cmd = 'pipx install dirsearch'
|
|
57
56
|
proxychains = True
|
|
58
57
|
proxy_socks5 = True
|
|
59
58
|
proxy_http = True
|
|
@@ -83,8 +82,7 @@ class dirsearch(HttpFuzzer):
|
|
|
83
82
|
|
|
84
83
|
@staticmethod
|
|
85
84
|
def on_init(self):
|
|
86
|
-
self.output_path = self.get_opt_value(
|
|
85
|
+
self.output_path = self.get_opt_value(OUTPUT_PATH)
|
|
87
86
|
if not self.output_path:
|
|
88
|
-
|
|
89
|
-
self.output_path = f'{DATA_FOLDER}/dirsearch_{timestr}.json'
|
|
87
|
+
self.output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
90
88
|
self.cmd += f' -o {self.output_path}'
|
secator/tasks/dnsx.py
CHANGED
secator/tasks/dnsxbrute.py
CHANGED
secator/tasks/feroxbuster.py
CHANGED
|
@@ -6,12 +6,11 @@ from secator.definitions import (CONTENT_TYPE, DELAY, DEPTH, FILTER_CODES,
|
|
|
6
6
|
FILTER_REGEX, FILTER_SIZE, FILTER_WORDS,
|
|
7
7
|
FOLLOW_REDIRECT, HEADER, LINES, MATCH_CODES,
|
|
8
8
|
MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD,
|
|
9
|
-
OPT_NOT_SUPPORTED, OPT_PIPE_INPUT, PROXY,
|
|
10
|
-
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
9
|
+
OPT_NOT_SUPPORTED, OPT_PIPE_INPUT, OUTPUT_PATH, PROXY,
|
|
10
|
+
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
11
11
|
THREADS, TIMEOUT, USER_AGENT, WORDLIST, WORDS, DEFAULT_FEROXBUSTER_FLAGS)
|
|
12
12
|
from secator.output_types import Url
|
|
13
13
|
from secator.tasks._categories import HttpFuzzer
|
|
14
|
-
from secator.utils import get_file_timestamp
|
|
15
14
|
|
|
16
15
|
|
|
17
16
|
@task()
|
|
@@ -61,10 +60,11 @@ class feroxbuster(HttpFuzzer):
|
|
|
61
60
|
}
|
|
62
61
|
}
|
|
63
62
|
install_cmd = (
|
|
64
|
-
'sudo apt install -y unzip && '
|
|
63
|
+
'sudo apt install -y unzip curl && '
|
|
65
64
|
'curl -sL https://raw.githubusercontent.com/epi052/feroxbuster/master/install-nix.sh | '
|
|
66
65
|
'bash && sudo mv feroxbuster /usr/local/bin'
|
|
67
66
|
)
|
|
67
|
+
install_github_handle = 'epi052/feroxbuster'
|
|
68
68
|
proxychains = False
|
|
69
69
|
proxy_socks5 = True
|
|
70
70
|
proxy_http = True
|
|
@@ -72,10 +72,9 @@ class feroxbuster(HttpFuzzer):
|
|
|
72
72
|
|
|
73
73
|
@staticmethod
|
|
74
74
|
def on_init(self):
|
|
75
|
-
self.output_path = self.get_opt_value(
|
|
75
|
+
self.output_path = self.get_opt_value(OUTPUT_PATH)
|
|
76
76
|
if not self.output_path:
|
|
77
|
-
|
|
78
|
-
self.output_path = f'{DATA_FOLDER}/feroxbuster_{timestr}.json'
|
|
77
|
+
self.output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
79
78
|
Path(self.output_path).touch()
|
|
80
79
|
self.cmd += f' --output {self.output_path}'
|
|
81
80
|
|
secator/tasks/ffuf.py
CHANGED
|
@@ -7,7 +7,7 @@ from secator.definitions import (AUTO_CALIBRATION, CONTENT_LENGTH,
|
|
|
7
7
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
|
|
8
8
|
PERCENT, PROXY, RATE_LIMIT, RETRIES,
|
|
9
9
|
STATUS_CODE, THREADS, TIME, TIMEOUT,
|
|
10
|
-
USER_AGENT, WORDLIST)
|
|
10
|
+
USER_AGENT, WORDLIST, WORDLISTS_FOLDER)
|
|
11
11
|
from secator.output_types import Progress, Url
|
|
12
12
|
from secator.serializers import JSONSerializer, RegexSerializer
|
|
13
13
|
from secator.tasks._categories import HttpFuzzer
|
|
@@ -23,6 +23,7 @@ class ffuf(HttpFuzzer):
|
|
|
23
23
|
input_chunk_size = 1
|
|
24
24
|
file_flag = None
|
|
25
25
|
json_flag = '-json'
|
|
26
|
+
version_flag = '-V'
|
|
26
27
|
item_loaders = [
|
|
27
28
|
JSONSerializer(),
|
|
28
29
|
RegexSerializer(FFUF_PROGRESS_REGEX, fields=['count', 'total', 'rps', 'duration', 'errors'])
|
|
@@ -69,10 +70,8 @@ class ffuf(HttpFuzzer):
|
|
|
69
70
|
},
|
|
70
71
|
}
|
|
71
72
|
encoding = 'ansi'
|
|
72
|
-
install_cmd =
|
|
73
|
-
|
|
74
|
-
'sudo git clone https://github.com/danielmiessler/SecLists /usr/share/seclists || true'
|
|
75
|
-
)
|
|
73
|
+
install_cmd = f'go install -v github.com/ffuf/ffuf@latest && sudo git clone https://github.com/danielmiessler/SecLists {WORDLISTS_FOLDER}/seclists || true' # noqa: E501
|
|
74
|
+
install_github_handle = 'ffuf/ffuf'
|
|
76
75
|
proxychains = False
|
|
77
76
|
proxy_socks5 = True
|
|
78
77
|
proxy_http = True
|
|
@@ -82,5 +81,3 @@ class ffuf(HttpFuzzer):
|
|
|
82
81
|
def on_item(self, item):
|
|
83
82
|
item.method = self.get_opt_value(METHOD) or 'GET'
|
|
84
83
|
return item
|
|
85
|
-
|
|
86
|
-
# TODO: write custom item_loader to pick up Progress items too
|
secator/tasks/gau.py
CHANGED
|
@@ -37,11 +37,8 @@ class gau(HttpCrawler):
|
|
|
37
37
|
USER_AGENT: OPT_NOT_SUPPORTED,
|
|
38
38
|
}
|
|
39
39
|
install_cmd = 'go install -v github.com/lc/gau/v2/cmd/gau@latest'
|
|
40
|
+
install_github_handle = 'lc/gau'
|
|
40
41
|
proxychains = False
|
|
41
42
|
proxy_socks5 = True
|
|
42
43
|
proxy_http = True
|
|
43
44
|
profile = 'io'
|
|
44
|
-
|
|
45
|
-
# @staticmethod
|
|
46
|
-
# def validate_item(self, item):
|
|
47
|
-
# return item['url'] == 'response'
|
secator/tasks/gf.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from secator.decorators import task
|
|
2
|
-
from secator.definitions import OPT_PIPE_INPUT, URL
|
|
2
|
+
from secator.definitions import OPT_PIPE_INPUT, OPT_NOT_SUPPORTED, URL
|
|
3
3
|
from secator.output_types import Tag
|
|
4
4
|
from secator.tasks._categories import Tagger
|
|
5
5
|
|
|
@@ -10,6 +10,7 @@ class gf(Tagger):
|
|
|
10
10
|
cmd = 'gf'
|
|
11
11
|
file_flag = OPT_PIPE_INPUT
|
|
12
12
|
input_flag = OPT_PIPE_INPUT
|
|
13
|
+
version_flag = OPT_NOT_SUPPORTED
|
|
13
14
|
opts = {
|
|
14
15
|
'pattern': {'type': str, 'help': 'Pattern names to match against (comma-delimited)'}
|
|
15
16
|
}
|
secator/tasks/gospider.py
CHANGED
|
@@ -52,6 +52,7 @@ class gospider(HttpCrawler):
|
|
|
52
52
|
}
|
|
53
53
|
}
|
|
54
54
|
install_cmd = 'go install -v github.com/jaeles-project/gospider@latest'
|
|
55
|
+
install_github_handle = 'jaeles-project/gospider'
|
|
55
56
|
ignore_return_code = True
|
|
56
57
|
proxychains = False
|
|
57
58
|
proxy_socks5 = True # with leaks... https://github.com/jaeles-project/gospider/issues/61
|
secator/tasks/grype.py
CHANGED
|
@@ -7,52 +7,6 @@ from secator.output_types import Vulnerability
|
|
|
7
7
|
from secator.tasks._categories import VulnCode
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
def grype_item_loader(self, line):
|
|
11
|
-
"""Load vulnerabilty dicts from grype line output."""
|
|
12
|
-
split = [i for i in line.split(' ') if i]
|
|
13
|
-
if not len(split) in [5, 6] or split[0] == 'NAME':
|
|
14
|
-
return None
|
|
15
|
-
version_fixed = None
|
|
16
|
-
if len(split) == 5: # no version fixed
|
|
17
|
-
product, version, product_type, vuln_id, severity = tuple(split)
|
|
18
|
-
elif len(split) == 6:
|
|
19
|
-
product, version, version_fixed, product_type, vuln_id, severity = tuple(split)
|
|
20
|
-
extra_data = {
|
|
21
|
-
'lang': product_type,
|
|
22
|
-
'product': product,
|
|
23
|
-
'version': version,
|
|
24
|
-
}
|
|
25
|
-
if version_fixed:
|
|
26
|
-
extra_data['version_fixed'] = version_fixed
|
|
27
|
-
data = {
|
|
28
|
-
'id': vuln_id,
|
|
29
|
-
'name': vuln_id,
|
|
30
|
-
'matched_at': self.input,
|
|
31
|
-
'confidence': 'medium',
|
|
32
|
-
'severity': severity.lower(),
|
|
33
|
-
'provider': 'grype',
|
|
34
|
-
'cvss_score': -1,
|
|
35
|
-
'tags': [],
|
|
36
|
-
}
|
|
37
|
-
if vuln_id.startswith('GHSA'):
|
|
38
|
-
data['provider'] = 'github.com'
|
|
39
|
-
data['references'] = [f'https://github.com/advisories/{vuln_id}']
|
|
40
|
-
data['tags'].extend(['cve', 'ghsa'])
|
|
41
|
-
vuln = VulnCode.lookup_ghsa(vuln_id)
|
|
42
|
-
if vuln:
|
|
43
|
-
data.update(vuln)
|
|
44
|
-
data['severity'] = data['severity'] or severity.lower()
|
|
45
|
-
extra_data['ghsa_id'] = vuln_id
|
|
46
|
-
elif vuln_id.startswith('CVE'):
|
|
47
|
-
vuln = VulnCode.lookup_cve(vuln_id)
|
|
48
|
-
if vuln:
|
|
49
|
-
vuln['tags'].append('cve')
|
|
50
|
-
data.update(vuln)
|
|
51
|
-
data['severity'] = data['severity'] or severity.lower()
|
|
52
|
-
data['extra_data'] = extra_data
|
|
53
|
-
return data
|
|
54
|
-
|
|
55
|
-
|
|
56
10
|
@task()
|
|
57
11
|
class grype(VulnCode):
|
|
58
12
|
"""Vulnerability scanner for container images and filesystems."""
|
|
@@ -76,4 +30,50 @@ class grype(VulnCode):
|
|
|
76
30
|
install_cmd = (
|
|
77
31
|
'curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sudo sh -s -- -b /usr/local/bin'
|
|
78
32
|
)
|
|
79
|
-
|
|
33
|
+
install_github_handle = 'anchore/grype'
|
|
34
|
+
|
|
35
|
+
@staticmethod
|
|
36
|
+
def item_loader(self, line):
|
|
37
|
+
"""Load vulnerabilty dicts from grype line output."""
|
|
38
|
+
split = [i for i in line.split(' ') if i]
|
|
39
|
+
if not len(split) in [5, 6] or split[0] == 'NAME':
|
|
40
|
+
return None
|
|
41
|
+
version_fixed = None
|
|
42
|
+
if len(split) == 5: # no version fixed
|
|
43
|
+
product, version, product_type, vuln_id, severity = tuple(split)
|
|
44
|
+
elif len(split) == 6:
|
|
45
|
+
product, version, version_fixed, product_type, vuln_id, severity = tuple(split)
|
|
46
|
+
extra_data = {
|
|
47
|
+
'lang': product_type,
|
|
48
|
+
'product': product,
|
|
49
|
+
'version': version,
|
|
50
|
+
}
|
|
51
|
+
if version_fixed:
|
|
52
|
+
extra_data['version_fixed'] = version_fixed
|
|
53
|
+
data = {
|
|
54
|
+
'id': vuln_id,
|
|
55
|
+
'name': vuln_id,
|
|
56
|
+
'matched_at': self.input,
|
|
57
|
+
'confidence': 'medium',
|
|
58
|
+
'severity': severity.lower(),
|
|
59
|
+
'provider': 'grype',
|
|
60
|
+
'cvss_score': -1,
|
|
61
|
+
'tags': [],
|
|
62
|
+
}
|
|
63
|
+
if vuln_id.startswith('GHSA'):
|
|
64
|
+
data['provider'] = 'github.com'
|
|
65
|
+
data['references'] = [f'https://github.com/advisories/{vuln_id}']
|
|
66
|
+
data['tags'].extend(['cve', 'ghsa'])
|
|
67
|
+
vuln = VulnCode.lookup_ghsa(vuln_id)
|
|
68
|
+
if vuln:
|
|
69
|
+
data.update(vuln)
|
|
70
|
+
data['severity'] = data['severity'] or severity.lower()
|
|
71
|
+
extra_data['ghsa_id'] = vuln_id
|
|
72
|
+
elif vuln_id.startswith('CVE'):
|
|
73
|
+
vuln = VulnCode.lookup_cve(vuln_id)
|
|
74
|
+
if vuln:
|
|
75
|
+
vuln['tags'].append('cve')
|
|
76
|
+
data.update(vuln)
|
|
77
|
+
data['severity'] = data['severity'] or severity.lower()
|
|
78
|
+
data['extra_data'] = extra_data
|
|
79
|
+
return data
|
secator/tasks/h8mail.py
CHANGED
|
@@ -2,9 +2,8 @@ import os
|
|
|
2
2
|
import json
|
|
3
3
|
|
|
4
4
|
from secator.decorators import task
|
|
5
|
-
from secator.definitions import
|
|
5
|
+
from secator.definitions import EMAIL, OUTPUT_PATH
|
|
6
6
|
from secator.tasks._categories import OSInt
|
|
7
|
-
from secator.utils import get_file_timestamp
|
|
8
7
|
from secator.output_types import UserAccount
|
|
9
8
|
|
|
10
9
|
|
|
@@ -16,6 +15,7 @@ class h8mail(OSInt):
|
|
|
16
15
|
input_flag = '--targets'
|
|
17
16
|
input_type = EMAIL
|
|
18
17
|
file_flag = '-domain'
|
|
18
|
+
version_flag = '--help'
|
|
19
19
|
opt_prefix = '--'
|
|
20
20
|
opt_key_map = {
|
|
21
21
|
|
|
@@ -27,14 +27,13 @@ class h8mail(OSInt):
|
|
|
27
27
|
output_map = {
|
|
28
28
|
}
|
|
29
29
|
|
|
30
|
-
install_cmd = '
|
|
30
|
+
install_cmd = 'pipx install h8mail'
|
|
31
31
|
|
|
32
32
|
@staticmethod
|
|
33
33
|
def on_start(self):
|
|
34
|
-
output_path = self.get_opt_value(
|
|
34
|
+
output_path = self.get_opt_value(OUTPUT_PATH)
|
|
35
35
|
if not output_path:
|
|
36
|
-
|
|
37
|
-
output_path = f'{DATA_FOLDER}/h8mail_{timestr}.json'
|
|
36
|
+
output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
|
|
38
37
|
self.output_path = output_path
|
|
39
38
|
self.cmd = self.cmd.replace('--json', f'--json {self.output_path}')
|
|
40
39
|
|
secator/tasks/httpx.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import os
|
|
2
|
-
import uuid
|
|
3
2
|
|
|
4
3
|
from secator.decorators import task
|
|
5
4
|
from secator.definitions import (DEFAULT_HTTPX_FLAGS,
|
|
@@ -8,7 +7,7 @@ from secator.definitions import (DEFAULT_HTTPX_FLAGS,
|
|
|
8
7
|
FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
|
|
9
8
|
MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
|
|
10
9
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
|
|
11
|
-
RATE_LIMIT, RETRIES,
|
|
10
|
+
RATE_LIMIT, RETRIES, THREADS,
|
|
12
11
|
TIMEOUT, URL, USER_AGENT)
|
|
13
12
|
from secator.tasks._categories import Http
|
|
14
13
|
from secator.utils import sanitize_url
|
|
@@ -24,14 +23,17 @@ class httpx(Http):
|
|
|
24
23
|
opts = {
|
|
25
24
|
# 'silent': {'is_flag': True, 'default': False, 'help': 'Silent mode'},
|
|
26
25
|
# 'td': {'is_flag': True, 'default': True, 'help': 'Tech detection'},
|
|
27
|
-
'irr': {'is_flag': True, 'default': False, 'help': 'Include http request / response'},
|
|
26
|
+
# 'irr': {'is_flag': True, 'default': False, 'help': 'Include http request / response'},
|
|
28
27
|
'fep': {'is_flag': True, 'default': False, 'help': 'Error Page Classifier and Filtering'},
|
|
29
28
|
'favicon': {'is_flag': True, 'default': False, 'help': 'Favicon hash'},
|
|
30
29
|
'jarm': {'is_flag': True, 'default': False, 'help': 'Jarm fingerprint'},
|
|
31
30
|
'asn': {'is_flag': True, 'default': False, 'help': 'ASN detection'},
|
|
32
31
|
'cdn': {'is_flag': True, 'default': False, 'help': 'CDN detection'},
|
|
33
32
|
'debug_resp': {'is_flag': True, 'default': False, 'help': 'Debug response'},
|
|
34
|
-
'
|
|
33
|
+
'vhost': {'is_flag': True, 'default': False, 'help': 'Probe and display server supporting VHOST'},
|
|
34
|
+
'screenshot': {'is_flag': True, 'short': 'ss', 'default': False, 'help': 'Screenshot response'},
|
|
35
|
+
'system_chrome': {'is_flag': True, 'default': False, 'help': 'Use local installed Chrome for screenshot'},
|
|
36
|
+
'headless_options': {'is_flag': False, 'short': 'ho', 'default': None, 'help': 'Headless Chrome additional options'},
|
|
35
37
|
}
|
|
36
38
|
opt_key_map = {
|
|
37
39
|
HEADER: 'header',
|
|
@@ -58,11 +60,29 @@ class httpx(Http):
|
|
|
58
60
|
DELAY: lambda x: str(x) + 's' if x else None,
|
|
59
61
|
}
|
|
60
62
|
install_cmd = 'go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest'
|
|
63
|
+
install_github_handle = 'projectdiscovery/httpx'
|
|
61
64
|
proxychains = False
|
|
62
65
|
proxy_socks5 = True
|
|
63
66
|
proxy_http = True
|
|
64
67
|
profile = 'cpu'
|
|
65
68
|
|
|
69
|
+
@staticmethod
|
|
70
|
+
def on_init(self):
|
|
71
|
+
debug_resp = self.get_opt_value('debug_resp')
|
|
72
|
+
if debug_resp:
|
|
73
|
+
self.cmd = self.cmd.replace('-silent', '')
|
|
74
|
+
if DEFAULT_STORE_HTTP_RESPONSES:
|
|
75
|
+
self.output_response_path = f'{self.reports_folder}/response'
|
|
76
|
+
self.output_screenshot_path = f'{self.reports_folder}/screenshot'
|
|
77
|
+
os.makedirs(self.output_response_path, exist_ok=True)
|
|
78
|
+
os.makedirs(self.output_screenshot_path, exist_ok=True)
|
|
79
|
+
self.cmd += f' -sr -srd {self.reports_folder}'
|
|
80
|
+
|
|
81
|
+
# Remove screenshot bytes and body bytes when screenshot
|
|
82
|
+
screenshot = self.get_opt_value('screenshot')
|
|
83
|
+
if screenshot:
|
|
84
|
+
self.cmd += ' -esb -ehb'
|
|
85
|
+
|
|
66
86
|
@staticmethod
|
|
67
87
|
def on_item_pre_convert(self, item):
|
|
68
88
|
for k, v in item.items():
|
|
@@ -76,20 +96,6 @@ class httpx(Http):
|
|
|
76
96
|
item[URL] = item.get('final_url') or item[URL]
|
|
77
97
|
return item
|
|
78
98
|
|
|
79
|
-
@staticmethod
|
|
80
|
-
def on_init(self):
|
|
81
|
-
debug_resp = self.get_opt_value('debug_resp')
|
|
82
|
-
if debug_resp:
|
|
83
|
-
self.cmd = self.cmd.replace('-silent', '')
|
|
84
|
-
if DEFAULT_STORE_HTTP_RESPONSES:
|
|
85
|
-
_id = uuid.uuid4()
|
|
86
|
-
output_path = f'{TASKS_FOLDER}/{_id}'
|
|
87
|
-
self.output_response_path = f'{output_path}/response'
|
|
88
|
-
self.output_screenshot_path = f'{output_path}/screenshot'
|
|
89
|
-
os.makedirs(self.output_response_path, exist_ok=True)
|
|
90
|
-
os.makedirs(self.output_screenshot_path, exist_ok=True)
|
|
91
|
-
self.cmd += f' -sr -srd {output_path}'
|
|
92
|
-
|
|
93
99
|
@staticmethod
|
|
94
100
|
def on_end(self):
|
|
95
101
|
if DEFAULT_STORE_HTTP_RESPONSES:
|
secator/tasks/katana.py
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import json
|
|
3
|
-
import uuid
|
|
4
3
|
from urllib.parse import urlparse
|
|
5
4
|
|
|
6
5
|
from secator.decorators import task
|
|
@@ -11,7 +10,7 @@ from secator.definitions import (CONTENT_TYPE, DEFAULT_KATANA_FLAGS,
|
|
|
11
10
|
MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
|
|
12
11
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
|
|
13
12
|
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
14
|
-
STORED_RESPONSE_PATH,
|
|
13
|
+
STORED_RESPONSE_PATH, TECH,
|
|
15
14
|
THREADS, TIME, TIMEOUT, URL, USER_AGENT, WEBSERVER, CONTENT_LENGTH)
|
|
16
15
|
from secator.output_types import Url, Tag
|
|
17
16
|
from secator.tasks._categories import HttpCrawler
|
|
@@ -71,7 +70,8 @@ class katana(HttpCrawler):
|
|
|
71
70
|
}
|
|
72
71
|
}
|
|
73
72
|
item_loaders = []
|
|
74
|
-
install_cmd = 'go install -v github.com/projectdiscovery/katana/cmd/katana@latest'
|
|
73
|
+
install_cmd = 'sudo apt install build-essential && go install -v github.com/projectdiscovery/katana/cmd/katana@latest'
|
|
74
|
+
install_github_handle = 'projectdiscovery/katana'
|
|
75
75
|
proxychains = False
|
|
76
76
|
proxy_socks5 = True
|
|
77
77
|
proxy_http = True
|
|
@@ -107,27 +107,23 @@ class katana(HttpCrawler):
|
|
|
107
107
|
if debug_resp:
|
|
108
108
|
self.cmd = self.cmd.replace('-silent', '')
|
|
109
109
|
if DEFAULT_STORE_HTTP_RESPONSES:
|
|
110
|
-
|
|
111
|
-
output_path = f'{TASKS_FOLDER}/{_id}'
|
|
112
|
-
self.output_response_path = output_path
|
|
113
|
-
os.makedirs(self.output_response_path, exist_ok=True)
|
|
114
|
-
self.cmd += f' -sr -srd {output_path}'
|
|
115
|
-
|
|
116
|
-
@staticmethod
|
|
117
|
-
def on_end(self):
|
|
118
|
-
if DEFAULT_STORE_HTTP_RESPONSES and os.path.exists(self.output_response_path + '/index.txt'):
|
|
119
|
-
os.remove(self.output_response_path + '/index.txt')
|
|
110
|
+
self.cmd += f' -sr -srd {self.reports_folder}'
|
|
120
111
|
|
|
121
112
|
@staticmethod
|
|
122
113
|
def on_item(self, item):
|
|
123
114
|
if not isinstance(item, Url):
|
|
124
115
|
return item
|
|
125
116
|
if DEFAULT_STORE_HTTP_RESPONSES and os.path.exists(item.stored_response_path):
|
|
126
|
-
with open(item.stored_response_path, 'r') as fin:
|
|
117
|
+
with open(item.stored_response_path, 'r', encoding='latin-1') as fin:
|
|
127
118
|
data = fin.read().splitlines(True)
|
|
128
119
|
first_line = data[0]
|
|
129
|
-
with open(item.stored_response_path, 'w') as fout:
|
|
120
|
+
with open(item.stored_response_path, 'w', encoding='latin-1') as fout:
|
|
130
121
|
fout.writelines(data[1:])
|
|
131
122
|
fout.writelines('\n')
|
|
132
123
|
fout.writelines(first_line)
|
|
133
124
|
return item
|
|
125
|
+
|
|
126
|
+
@staticmethod
|
|
127
|
+
def on_end(self):
|
|
128
|
+
if DEFAULT_STORE_HTTP_RESPONSES and os.path.exists(self.reports_folder + '/index.txt'):
|
|
129
|
+
os.remove(self.reports_folder + '/index.txt')
|