secator 0.3.5__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +16 -21
- secator/cli.py +163 -81
- secator/config.py +555 -122
- secator/decorators.py +17 -10
- secator/definitions.py +4 -77
- secator/exporters/gdrive.py +10 -10
- secator/hooks/mongodb.py +3 -4
- secator/installer.py +10 -6
- secator/output_types/vulnerability.py +3 -1
- secator/runners/_base.py +12 -11
- secator/runners/_helpers.py +52 -34
- secator/runners/command.py +26 -30
- secator/runners/scan.py +4 -8
- secator/runners/task.py +2 -2
- secator/runners/workflow.py +3 -7
- secator/tasks/_categories.py +95 -44
- secator/tasks/dnsxbrute.py +3 -2
- secator/tasks/ffuf.py +2 -2
- secator/tasks/httpx.py +4 -4
- secator/tasks/katana.py +5 -4
- secator/tasks/msfconsole.py +3 -4
- secator/tasks/nmap.py +95 -48
- secator/tasks/nuclei.py +4 -0
- secator/tasks/searchsploit.py +0 -1
- secator/template.py +137 -0
- secator/utils.py +3 -7
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/METADATA +12 -6
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/RECORD +31 -30
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/WHEEL +1 -1
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/entry_points.txt +0 -0
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/licenses/LICENSE +0 -0
secator/runners/scan.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
|
|
3
|
-
from secator.
|
|
4
|
-
from secator.
|
|
3
|
+
from secator.template import TemplateLoader
|
|
4
|
+
from secator.config import CONFIG
|
|
5
5
|
from secator.runners._base import Runner
|
|
6
6
|
from secator.runners._helpers import run_extractors
|
|
7
7
|
from secator.runners.workflow import Workflow
|
|
@@ -13,10 +13,7 @@ logger = logging.getLogger(__name__)
|
|
|
13
13
|
|
|
14
14
|
class Scan(Runner):
|
|
15
15
|
|
|
16
|
-
default_exporters =
|
|
17
|
-
JsonExporter,
|
|
18
|
-
CsvExporter
|
|
19
|
-
]
|
|
16
|
+
default_exporters = CONFIG.scans.exporters
|
|
20
17
|
|
|
21
18
|
@classmethod
|
|
22
19
|
def delay(cls, *args, **kwargs):
|
|
@@ -44,7 +41,6 @@ class Scan(Runner):
|
|
|
44
41
|
|
|
45
42
|
# Workflow opts
|
|
46
43
|
run_opts = self.run_opts.copy()
|
|
47
|
-
run_opts['reports_folder'] = self.reports_folder
|
|
48
44
|
fmt_opts = {
|
|
49
45
|
'json': run_opts.get('json', False),
|
|
50
46
|
'print_item': False,
|
|
@@ -56,7 +52,7 @@ class Scan(Runner):
|
|
|
56
52
|
|
|
57
53
|
# Run workflow
|
|
58
54
|
workflow = Workflow(
|
|
59
|
-
|
|
55
|
+
TemplateLoader(name=f'workflows/{name}'),
|
|
60
56
|
targets,
|
|
61
57
|
results=[],
|
|
62
58
|
run_opts=run_opts,
|
secator/runners/task.py
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from secator.definitions import DEBUG
|
|
2
2
|
from secator.output_types import Target
|
|
3
|
+
from secator.config import CONFIG
|
|
3
4
|
from secator.runners import Runner
|
|
4
5
|
from secator.utils import discover_tasks
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
class Task(Runner):
|
|
8
|
-
default_exporters =
|
|
9
|
+
default_exporters = CONFIG.tasks.exporters
|
|
9
10
|
enable_hooks = False
|
|
10
11
|
|
|
11
12
|
def delay(cls, *args, **kwargs):
|
|
@@ -52,7 +53,6 @@ class Task(Runner):
|
|
|
52
53
|
hooks = {task_cls: self.hooks}
|
|
53
54
|
run_opts['hooks'] = hooks
|
|
54
55
|
run_opts['context'] = self.context
|
|
55
|
-
run_opts['reports_folder'] = self.reports_folder
|
|
56
56
|
|
|
57
57
|
# Run task
|
|
58
58
|
if self.sync:
|
secator/runners/workflow.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from secator.definitions import DEBUG
|
|
2
|
-
from secator.exporters import CsvExporter, JsonExporter
|
|
3
2
|
from secator.output_types import Target
|
|
3
|
+
from secator.config import CONFIG
|
|
4
4
|
from secator.runners._base import Runner
|
|
5
5
|
from secator.runners.task import Task
|
|
6
6
|
from secator.utils import merge_opts
|
|
@@ -8,10 +8,7 @@ from secator.utils import merge_opts
|
|
|
8
8
|
|
|
9
9
|
class Workflow(Runner):
|
|
10
10
|
|
|
11
|
-
default_exporters =
|
|
12
|
-
JsonExporter,
|
|
13
|
-
CsvExporter
|
|
14
|
-
]
|
|
11
|
+
default_exporters = CONFIG.workflows.exporters
|
|
15
12
|
|
|
16
13
|
@classmethod
|
|
17
14
|
def delay(cls, *args, **kwargs):
|
|
@@ -47,7 +44,6 @@ class Workflow(Runner):
|
|
|
47
44
|
|
|
48
45
|
# Construct run opts
|
|
49
46
|
task_run_opts['hooks'] = self._hooks.get(Task, {})
|
|
50
|
-
task_run_opts['reports_folder'] = self.reports_folder
|
|
51
47
|
task_run_opts.update(task_fmt_opts)
|
|
52
48
|
|
|
53
49
|
# Build Celery workflow
|
|
@@ -85,7 +81,7 @@ class Workflow(Runner):
|
|
|
85
81
|
"""Get tasks recursively as Celery chains / chords.
|
|
86
82
|
|
|
87
83
|
Args:
|
|
88
|
-
obj (secator.config.
|
|
84
|
+
obj (secator.config.TemplateLoader): Config.
|
|
89
85
|
targets (list): List of targets.
|
|
90
86
|
workflow_opts (dict): Workflow options.
|
|
91
87
|
run_opts (dict): Run options.
|
secator/tasks/_categories.py
CHANGED
|
@@ -1,21 +1,20 @@
|
|
|
1
1
|
import json
|
|
2
|
-
import logging
|
|
3
2
|
import os
|
|
4
3
|
|
|
5
4
|
import requests
|
|
6
5
|
from bs4 import BeautifulSoup
|
|
7
6
|
from cpe import CPE
|
|
8
7
|
|
|
9
|
-
from secator.definitions import (CIDR_RANGE,
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
8
|
+
from secator.definitions import (CIDR_RANGE, CVSS_SCORE, DELAY, DEPTH, DESCRIPTION, FILTER_CODES,
|
|
9
|
+
FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID,
|
|
10
|
+
MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, NAME, PATH, PROVIDER, PROXY,
|
|
11
|
+
RATE_LIMIT, REFERENCES, RETRIES, SEVERITY, TAGS, THREADS, TIMEOUT, URL, USER_AGENT,
|
|
12
|
+
USERNAME, WORDLIST)
|
|
14
13
|
from secator.output_types import Ip, Port, Subdomain, Tag, Url, UserAccount, Vulnerability
|
|
15
|
-
from secator.
|
|
14
|
+
from secator.config import CONFIG
|
|
16
15
|
from secator.runners import Command
|
|
16
|
+
from secator.utils import debug
|
|
17
17
|
|
|
18
|
-
logger = logging.getLogger(__name__)
|
|
19
18
|
|
|
20
19
|
OPTS = {
|
|
21
20
|
HEADER: {'type': str, 'help': 'Custom header to add to each request in the form "KEY1:VALUE1; KEY2:VALUE2"'},
|
|
@@ -37,7 +36,7 @@ OPTS = {
|
|
|
37
36
|
THREADS: {'type': int, 'help': 'Number of threads to run', 'default': 50},
|
|
38
37
|
TIMEOUT: {'type': int, 'help': 'Request timeout'},
|
|
39
38
|
USER_AGENT: {'type': str, 'short': 'ua', 'help': 'User agent, e.g "Mozilla Firefox 1.0"'},
|
|
40
|
-
WORDLIST: {'type': str, 'short': 'w', 'default':
|
|
39
|
+
WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.http, 'help': 'Wordlist to use'}
|
|
41
40
|
}
|
|
42
41
|
|
|
43
42
|
OPTS_HTTP = [
|
|
@@ -121,7 +120,7 @@ class Vuln(Command):
|
|
|
121
120
|
|
|
122
121
|
@staticmethod
|
|
123
122
|
def lookup_local_cve(cve_id):
|
|
124
|
-
cve_path = f'{
|
|
123
|
+
cve_path = f'{CONFIG.dirs.data}/cves/{cve_id}.json'
|
|
125
124
|
if os.path.exists(cve_path):
|
|
126
125
|
with open(cve_path, 'r') as f:
|
|
127
126
|
return json.load(f)
|
|
@@ -131,13 +130,54 @@ class Vuln(Command):
|
|
|
131
130
|
# def lookup_exploitdb(exploit_id):
|
|
132
131
|
# print('looking up exploit')
|
|
133
132
|
# try:
|
|
134
|
-
#
|
|
135
|
-
#
|
|
136
|
-
#
|
|
133
|
+
# resp = requests.get(f'https://exploit-db.com/exploits/{exploit_id}', timeout=5)
|
|
134
|
+
# resp.raise_for_status()
|
|
135
|
+
# content = resp.content
|
|
136
|
+
# except requests.RequestException as e:
|
|
137
|
+
# debug(f'Failed remote query for {exploit_id} ({str(e)}).', sub='cve')
|
|
137
138
|
# logger.error(f'Could not fetch exploit info for exploit {exploit_id}. Skipping.')
|
|
138
139
|
# return None
|
|
139
140
|
# return cve_info
|
|
140
141
|
|
|
142
|
+
@staticmethod
|
|
143
|
+
def create_cpe_string(product_name, version):
|
|
144
|
+
"""
|
|
145
|
+
Generate a CPE string for a given product and version.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
product_name (str): The name of the product.
|
|
149
|
+
version (str): The version of the product.
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
str: A CPE string formatted according to the CPE 2.3 specification.
|
|
153
|
+
"""
|
|
154
|
+
cpe_version = "2.3" # CPE Specification version
|
|
155
|
+
part = "a" # 'a' for application
|
|
156
|
+
vendor = product_name.lower() # Vendor name, using product name
|
|
157
|
+
product = product_name.lower() # Product name
|
|
158
|
+
version = version # Product version
|
|
159
|
+
cpe_string = f"cpe:{cpe_version}:{part}:{vendor}:{product}:{version}:*:*:*:*:*:*:*"
|
|
160
|
+
return cpe_string
|
|
161
|
+
|
|
162
|
+
@staticmethod
|
|
163
|
+
def match_cpes(fs1, fs2):
|
|
164
|
+
"""Check if two CPEs match. Partial matches consisting of <vendor>:<product>:<version> are considered a match.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
fs1 (str): Format string 1.
|
|
168
|
+
fs2 (str): Format string 2.
|
|
169
|
+
|
|
170
|
+
Returns:
|
|
171
|
+
bool: True if the two CPEs match, False otherwise.
|
|
172
|
+
"""
|
|
173
|
+
if fs1 == fs2:
|
|
174
|
+
return True
|
|
175
|
+
split_fs1 = fs1.split(':')
|
|
176
|
+
split_fs2 = fs2.split(':')
|
|
177
|
+
tup1 = split_fs1[3], split_fs1[4], split_fs1[5]
|
|
178
|
+
tup2 = split_fs2[3], split_fs2[4], split_fs2[5]
|
|
179
|
+
return tup1 == tup2
|
|
180
|
+
|
|
141
181
|
@staticmethod
|
|
142
182
|
def lookup_cve(cve_id, cpes=[]):
|
|
143
183
|
"""Search for a CVE in local db or using cve.circl.lu and return vulnerability data.
|
|
@@ -150,18 +190,21 @@ class Vuln(Command):
|
|
|
150
190
|
dict: vulnerability data.
|
|
151
191
|
"""
|
|
152
192
|
cve_info = Vuln.lookup_local_cve(cve_id)
|
|
193
|
+
|
|
194
|
+
# Online CVE lookup
|
|
153
195
|
if not cve_info:
|
|
154
|
-
if
|
|
155
|
-
|
|
196
|
+
if CONFIG.runners.skip_cve_search:
|
|
197
|
+
debug(f'Skip remote query for {cve_id} since config.runners.skip_cve_search is set.', sub='cve')
|
|
198
|
+
return None
|
|
199
|
+
if CONFIG.offline_mode:
|
|
200
|
+
debug(f'Skip remote query for {cve_id} since config.offline_mode is set.', sub='cve')
|
|
156
201
|
return None
|
|
157
|
-
# logger.debug(f'{cve_id} not found locally. Use `secator install cves` to install CVEs locally.')
|
|
158
202
|
try:
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
console.print(f'Could not fetch CVE info for cve {cve_id}. Skipping.', highlight=False)
|
|
203
|
+
resp = requests.get(f'https://cve.circl.lu/api/cve/{cve_id}', timeout=5)
|
|
204
|
+
resp.raise_for_status()
|
|
205
|
+
cve_info = resp.json()
|
|
206
|
+
except requests.RequestException as e:
|
|
207
|
+
debug(f'Failed remote query for {cve_id} ({str(e)}).', sub='cve')
|
|
165
208
|
return None
|
|
166
209
|
|
|
167
210
|
# Match the CPE string against the affected products CPE FS strings from the CVE data if a CPE was passed.
|
|
@@ -177,14 +220,15 @@ class Vuln(Command):
|
|
|
177
220
|
cpe_fs = cpe_obj.as_fs()
|
|
178
221
|
# cpe_version = cpe_obj.get_version()[0]
|
|
179
222
|
vulnerable_fs = cve_info['vulnerable_product']
|
|
180
|
-
# logger.debug(f'Matching CPE {cpe} against {len(vulnerable_fs)} vulnerable products for {cve_id}')
|
|
181
223
|
for fs in vulnerable_fs:
|
|
182
|
-
|
|
183
|
-
|
|
224
|
+
# debug(f'{cve_id}: Testing {cpe_fs} against {fs}', sub='cve') # for hardcore debugging
|
|
225
|
+
if Vuln.match_cpes(cpe_fs, fs):
|
|
226
|
+
debug(f'{cve_id}: CPE match found for {cpe}.', sub='cve')
|
|
184
227
|
cpe_match = True
|
|
185
228
|
tags.append('cpe-match')
|
|
186
|
-
|
|
187
|
-
|
|
229
|
+
break
|
|
230
|
+
if not cpe_match:
|
|
231
|
+
debug(f'{cve_id}: no CPE match found for {cpe}.', sub='cve')
|
|
188
232
|
|
|
189
233
|
# Parse CVE id and CVSS
|
|
190
234
|
name = id = cve_info['id']
|
|
@@ -223,17 +267,9 @@ class Vuln(Command):
|
|
|
223
267
|
# Set vulnerability severity based on CVSS score
|
|
224
268
|
severity = None
|
|
225
269
|
if cvss:
|
|
226
|
-
|
|
227
|
-
severity = 'low'
|
|
228
|
-
elif cvss < 7:
|
|
229
|
-
severity = 'medium'
|
|
230
|
-
elif cvss < 9:
|
|
231
|
-
severity = 'high'
|
|
232
|
-
else:
|
|
233
|
-
severity = 'critical'
|
|
270
|
+
severity = Vuln.cvss_to_severity(cvss)
|
|
234
271
|
|
|
235
272
|
# Set confidence
|
|
236
|
-
confidence = 'low' if not cpe_match else 'high'
|
|
237
273
|
vuln = {
|
|
238
274
|
ID: id,
|
|
239
275
|
NAME: name,
|
|
@@ -243,7 +279,6 @@ class Vuln(Command):
|
|
|
243
279
|
TAGS: tags,
|
|
244
280
|
REFERENCES: [f'https://cve.circl.lu/cve/{id}'] + references,
|
|
245
281
|
DESCRIPTION: description,
|
|
246
|
-
CONFIDENCE: confidence
|
|
247
282
|
}
|
|
248
283
|
return vuln
|
|
249
284
|
|
|
@@ -257,17 +292,33 @@ class Vuln(Command):
|
|
|
257
292
|
Returns:
|
|
258
293
|
dict: vulnerability data.
|
|
259
294
|
"""
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
295
|
+
try:
|
|
296
|
+
resp = requests.get(f'https://github.com/advisories/{ghsa_id}', timeout=5)
|
|
297
|
+
resp.raise_for_status()
|
|
298
|
+
except requests.RequestException as e:
|
|
299
|
+
debug(f'Failed remote query for {ghsa_id} ({str(e)}).', sub='cve')
|
|
300
|
+
return None
|
|
301
|
+
soup = BeautifulSoup(resp.text, 'lxml')
|
|
263
302
|
sidebar_items = soup.find_all('div', {'class': 'discussion-sidebar-item'})
|
|
264
303
|
cve_id = sidebar_items[2].find('div').text.strip()
|
|
265
|
-
|
|
266
|
-
if
|
|
267
|
-
|
|
268
|
-
return
|
|
304
|
+
vuln = Vuln.lookup_cve(cve_id)
|
|
305
|
+
if vuln:
|
|
306
|
+
vuln[TAGS].append('ghsa')
|
|
307
|
+
return vuln
|
|
269
308
|
return None
|
|
270
309
|
|
|
310
|
+
@staticmethod
|
|
311
|
+
def cvss_to_severity(cvss):
|
|
312
|
+
if cvss < 4:
|
|
313
|
+
severity = 'low'
|
|
314
|
+
elif cvss < 7:
|
|
315
|
+
severity = 'medium'
|
|
316
|
+
elif cvss < 9:
|
|
317
|
+
severity = 'high'
|
|
318
|
+
else:
|
|
319
|
+
severity = 'critical'
|
|
320
|
+
return severity
|
|
321
|
+
|
|
271
322
|
|
|
272
323
|
class VulnHttp(Vuln):
|
|
273
324
|
input_type = HOST
|
secator/tasks/dnsxbrute.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from secator.decorators import task
|
|
2
|
-
from secator.definitions import (
|
|
2
|
+
from secator.definitions import (DOMAIN, HOST, RATE_LIMIT, RETRIES, THREADS, WORDLIST, EXTRA_DATA)
|
|
3
|
+
from secator.config import CONFIG
|
|
3
4
|
from secator.output_types import Subdomain
|
|
4
5
|
from secator.tasks._categories import ReconDns
|
|
5
6
|
|
|
@@ -17,7 +18,7 @@ class dnsxbrute(ReconDns):
|
|
|
17
18
|
THREADS: 'threads',
|
|
18
19
|
}
|
|
19
20
|
opts = {
|
|
20
|
-
WORDLIST: {'type': str, 'short': 'w', 'default':
|
|
21
|
+
WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.dns, 'help': 'Wordlist'},
|
|
21
22
|
'trace': {'is_flag': True, 'default': False, 'help': 'Perform dns tracing'},
|
|
22
23
|
}
|
|
23
24
|
output_map = {
|
secator/tasks/ffuf.py
CHANGED
|
@@ -7,7 +7,7 @@ from secator.definitions import (AUTO_CALIBRATION, CONTENT_LENGTH,
|
|
|
7
7
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
|
|
8
8
|
PERCENT, PROXY, RATE_LIMIT, RETRIES,
|
|
9
9
|
STATUS_CODE, THREADS, TIME, TIMEOUT,
|
|
10
|
-
USER_AGENT, WORDLIST
|
|
10
|
+
USER_AGENT, WORDLIST)
|
|
11
11
|
from secator.output_types import Progress, Url
|
|
12
12
|
from secator.serializers import JSONSerializer, RegexSerializer
|
|
13
13
|
from secator.tasks._categories import HttpFuzzer
|
|
@@ -70,7 +70,7 @@ class ffuf(HttpFuzzer):
|
|
|
70
70
|
},
|
|
71
71
|
}
|
|
72
72
|
encoding = 'ansi'
|
|
73
|
-
install_cmd =
|
|
73
|
+
install_cmd = 'go install -v github.com/ffuf/ffuf@latest'
|
|
74
74
|
install_github_handle = 'ffuf/ffuf'
|
|
75
75
|
proxychains = False
|
|
76
76
|
proxy_socks5 = True
|
secator/tasks/httpx.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import os
|
|
2
2
|
|
|
3
3
|
from secator.decorators import task
|
|
4
|
-
from secator.definitions import (DEFAULT_HTTPX_FLAGS,
|
|
5
|
-
DEFAULT_STORE_HTTP_RESPONSES, DELAY, DEPTH,
|
|
4
|
+
from secator.definitions import (DEFAULT_HTTPX_FLAGS, DELAY, DEPTH,
|
|
6
5
|
FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
|
|
7
6
|
FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
|
|
8
7
|
MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
|
|
9
8
|
MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
|
|
10
9
|
RATE_LIMIT, RETRIES, THREADS,
|
|
11
10
|
TIMEOUT, URL, USER_AGENT)
|
|
11
|
+
from secator.config import CONFIG
|
|
12
12
|
from secator.tasks._categories import Http
|
|
13
13
|
from secator.utils import sanitize_url
|
|
14
14
|
|
|
@@ -71,7 +71,7 @@ class httpx(Http):
|
|
|
71
71
|
debug_resp = self.get_opt_value('debug_resp')
|
|
72
72
|
if debug_resp:
|
|
73
73
|
self.cmd = self.cmd.replace('-silent', '')
|
|
74
|
-
if
|
|
74
|
+
if CONFIG.http.store_responses:
|
|
75
75
|
self.output_response_path = f'{self.reports_folder}/response'
|
|
76
76
|
self.output_screenshot_path = f'{self.reports_folder}/screenshot'
|
|
77
77
|
os.makedirs(self.output_response_path, exist_ok=True)
|
|
@@ -98,7 +98,7 @@ class httpx(Http):
|
|
|
98
98
|
|
|
99
99
|
@staticmethod
|
|
100
100
|
def on_end(self):
|
|
101
|
-
if
|
|
101
|
+
if CONFIG.http.store_responses:
|
|
102
102
|
if os.path.exists(self.output_response_path + '/index.txt'):
|
|
103
103
|
os.remove(self.output_response_path + '/index.txt')
|
|
104
104
|
if os.path.exists(self.output_screenshot_path + '/index.txt'):
|
secator/tasks/katana.py
CHANGED
|
@@ -4,7 +4,7 @@ from urllib.parse import urlparse
|
|
|
4
4
|
|
|
5
5
|
from secator.decorators import task
|
|
6
6
|
from secator.definitions import (CONTENT_TYPE, DEFAULT_KATANA_FLAGS,
|
|
7
|
-
|
|
7
|
+
DELAY, DEPTH,
|
|
8
8
|
FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
|
|
9
9
|
FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST,
|
|
10
10
|
MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
|
|
@@ -12,6 +12,7 @@ from secator.definitions import (CONTENT_TYPE, DEFAULT_KATANA_FLAGS,
|
|
|
12
12
|
RATE_LIMIT, RETRIES, STATUS_CODE,
|
|
13
13
|
STORED_RESPONSE_PATH, TECH,
|
|
14
14
|
THREADS, TIME, TIMEOUT, URL, USER_AGENT, WEBSERVER, CONTENT_LENGTH)
|
|
15
|
+
from secator.config import CONFIG
|
|
15
16
|
from secator.output_types import Url, Tag
|
|
16
17
|
from secator.tasks._categories import HttpCrawler
|
|
17
18
|
|
|
@@ -106,14 +107,14 @@ class katana(HttpCrawler):
|
|
|
106
107
|
debug_resp = self.get_opt_value('debug_resp')
|
|
107
108
|
if debug_resp:
|
|
108
109
|
self.cmd = self.cmd.replace('-silent', '')
|
|
109
|
-
if
|
|
110
|
+
if CONFIG.http.store_responses:
|
|
110
111
|
self.cmd += f' -sr -srd {self.reports_folder}'
|
|
111
112
|
|
|
112
113
|
@staticmethod
|
|
113
114
|
def on_item(self, item):
|
|
114
115
|
if not isinstance(item, Url):
|
|
115
116
|
return item
|
|
116
|
-
if
|
|
117
|
+
if CONFIG.http.store_responses and os.path.exists(item.stored_response_path):
|
|
117
118
|
with open(item.stored_response_path, 'r', encoding='latin-1') as fin:
|
|
118
119
|
data = fin.read().splitlines(True)
|
|
119
120
|
first_line = data[0]
|
|
@@ -125,5 +126,5 @@ class katana(HttpCrawler):
|
|
|
125
126
|
|
|
126
127
|
@staticmethod
|
|
127
128
|
def on_end(self):
|
|
128
|
-
if
|
|
129
|
+
if CONFIG.http.store_responses and os.path.exists(self.reports_folder + '/index.txt'):
|
|
129
130
|
os.remove(self.reports_folder + '/index.txt')
|
secator/tasks/msfconsole.py
CHANGED
|
@@ -5,9 +5,8 @@ import logging
|
|
|
5
5
|
from rich.panel import Panel
|
|
6
6
|
|
|
7
7
|
from secator.decorators import task
|
|
8
|
-
from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, HOST,
|
|
9
|
-
|
|
10
|
-
DATA_FOLDER, THREADS, TIMEOUT, USER_AGENT)
|
|
8
|
+
from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, HOST, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
|
|
9
|
+
THREADS, TIMEOUT, USER_AGENT)
|
|
11
10
|
from secator.tasks._categories import VulnMulti
|
|
12
11
|
from secator.utils import get_file_timestamp
|
|
13
12
|
|
|
@@ -84,7 +83,7 @@ class msfconsole(VulnMulti):
|
|
|
84
83
|
|
|
85
84
|
# Make a copy and replace vars inside by env vars passed on the CLI
|
|
86
85
|
timestr = get_file_timestamp()
|
|
87
|
-
out_path = f'{
|
|
86
|
+
out_path = f'{self.reports_folder}/.inputs/msfconsole_{timestr}.rc'
|
|
88
87
|
logger.debug(
|
|
89
88
|
f'Writing formatted resource script to new temp file {out_path}'
|
|
90
89
|
)
|