secator 0.3.6__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

@@ -10,19 +10,13 @@ from time import sleep
10
10
 
11
11
  from fp.fp import FreeProxy
12
12
 
13
- from secator.config import ConfigLoader
14
- from secator.definitions import (DEFAULT_HTTP_PROXY,
15
- DEFAULT_FREEPROXY_TIMEOUT,
16
- DEFAULT_PROXYCHAINS_COMMAND,
17
- DEFAULT_SOCKS5_PROXY, OPT_NOT_SUPPORTED,
18
- OPT_PIPE_INPUT, DEFAULT_INPUT_CHUNK_SIZE)
13
+ from secator.template import TemplateLoader
14
+ from secator.definitions import OPT_NOT_SUPPORTED, OPT_PIPE_INPUT
15
+ from secator.config import CONFIG
19
16
  from secator.runners import Runner
20
17
  from secator.serializers import JSONSerializer
21
18
  from secator.utils import debug
22
19
 
23
- # from rich.markup import escape
24
- # from rich.text import Text
25
-
26
20
 
27
21
  logger = logging.getLogger(__name__)
28
22
 
@@ -69,7 +63,7 @@ class Command(Runner):
69
63
  input_path = None
70
64
 
71
65
  # Input chunk size (default None)
72
- input_chunk_size = DEFAULT_INPUT_CHUNK_SIZE
66
+ input_chunk_size = CONFIG.runners.input_chunk_size
73
67
 
74
68
  # Flag to take a file as input
75
69
  file_flag = None
@@ -110,7 +104,7 @@ class Command(Runner):
110
104
 
111
105
  def __init__(self, input=None, **run_opts):
112
106
  # Build runnerconfig on-the-fly
113
- config = ConfigLoader(input={
107
+ config = TemplateLoader(input={
114
108
  'name': self.__class__.__name__,
115
109
  'type': 'task',
116
110
  'description': run_opts.get('description', None)
@@ -270,14 +264,16 @@ class Command(Runner):
270
264
  secator.runners.Command: instance of the Command.
271
265
  """
272
266
  name = name or cmd.split(' ')[0]
273
- kwargs['no_process'] = True
267
+ kwargs['no_process'] = kwargs.get('no_process', True)
274
268
  kwargs['print_cmd'] = not kwargs.get('quiet', False)
275
269
  kwargs['print_item'] = not kwargs.get('quiet', False)
276
270
  kwargs['print_line'] = not kwargs.get('quiet', False)
271
+ delay_run = kwargs.pop('delay_run', False)
277
272
  cmd_instance = type(name, (Command,), {'cmd': cmd})(**kwargs)
278
273
  for k, v in cls_attributes.items():
279
274
  setattr(cmd_instance, k, v)
280
- cmd_instance.run()
275
+ if not delay_run:
276
+ cmd_instance.run()
281
277
  return cmd_instance
282
278
 
283
279
  def configure_proxy(self):
@@ -290,7 +286,7 @@ class Command(Runner):
290
286
  opt_key_map = self.opt_key_map
291
287
  proxy_opt = opt_key_map.get('proxy', False)
292
288
  support_proxy_opt = proxy_opt and proxy_opt != OPT_NOT_SUPPORTED
293
- proxychains_flavor = getattr(self, 'proxychains_flavor', DEFAULT_PROXYCHAINS_COMMAND)
289
+ proxychains_flavor = getattr(self, 'proxychains_flavor', CONFIG.http.proxychains_command)
294
290
  proxy = False
295
291
 
296
292
  if self.proxy in ['auto', 'proxychains'] and self.proxychains:
@@ -298,12 +294,12 @@ class Command(Runner):
298
294
  proxy = 'proxychains'
299
295
 
300
296
  elif self.proxy and support_proxy_opt:
301
- if self.proxy in ['auto', 'socks5'] and self.proxy_socks5 and DEFAULT_SOCKS5_PROXY:
302
- proxy = DEFAULT_SOCKS5_PROXY
303
- elif self.proxy in ['auto', 'http'] and self.proxy_http and DEFAULT_HTTP_PROXY:
304
- proxy = DEFAULT_HTTP_PROXY
297
+ if self.proxy in ['auto', 'socks5'] and self.proxy_socks5 and CONFIG.http.socks5_proxy:
298
+ proxy = CONFIG.http.socks5_proxy
299
+ elif self.proxy in ['auto', 'http'] and self.proxy_http and CONFIG.http.http_proxy:
300
+ proxy = CONFIG.http.http_proxy
305
301
  elif self.proxy == 'random':
306
- proxy = FreeProxy(timeout=DEFAULT_FREEPROXY_TIMEOUT, rand=True, anonym=True).get()
302
+ proxy = FreeProxy(timeout=CONFIG.http.freeproxy_timeout, rand=True, anonym=True).get()
307
303
  elif self.proxy.startswith(('http://', 'socks5://')):
308
304
  proxy = self.proxy
309
305
 
@@ -354,7 +350,7 @@ class Command(Runner):
354
350
  try:
355
351
  env = os.environ
356
352
  env.update(self.env)
357
- process = subprocess.Popen(
353
+ self.process = subprocess.Popen(
358
354
  command,
359
355
  stdin=subprocess.PIPE if sudo_password else None,
360
356
  stdout=sys.stdout if self.no_capture else subprocess.PIPE,
@@ -366,8 +362,8 @@ class Command(Runner):
366
362
 
367
363
  # If sudo password is provided, send it to stdin
368
364
  if sudo_password:
369
- process.stdin.write(f"{sudo_password}\n")
370
- process.stdin.flush()
365
+ self.process.stdin.write(f"{sudo_password}\n")
366
+ self.process.stdin.flush()
371
367
 
372
368
  except FileNotFoundError as e:
373
369
  if self.config.name in str(e):
@@ -386,11 +382,11 @@ class Command(Runner):
386
382
  try:
387
383
  # No capture mode, wait for command to finish and return
388
384
  if self.no_capture:
389
- self._wait_for_end(process)
385
+ self._wait_for_end()
390
386
  return
391
387
 
392
388
  # Process the output in real-time
393
- for line in iter(lambda: process.stdout.readline(), b''):
389
+ for line in iter(lambda: self.process.stdout.readline(), b''):
394
390
  sleep(0) # for async to give up control
395
391
  if not line:
396
392
  break
@@ -430,11 +426,11 @@ class Command(Runner):
430
426
  yield from items
431
427
 
432
428
  except KeyboardInterrupt:
433
- process.kill()
429
+ self.process.kill()
434
430
  self.killed = True
435
431
 
436
432
  # Retrieve the return code and output
437
- self._wait_for_end(process)
433
+ self._wait_for_end()
438
434
 
439
435
  def run_item_loaders(self, line):
440
436
  """Run item loaders on a string."""
@@ -493,16 +489,16 @@ class Command(Runner):
493
489
  self._print("Sudo password verification failed after 3 attempts.")
494
490
  return None
495
491
 
496
- def _wait_for_end(self, process):
492
+ def _wait_for_end(self):
497
493
  """Wait for process to finish and process output and return code."""
498
- process.wait()
499
- self.return_code = process.returncode
494
+ self.process.wait()
495
+ self.return_code = self.process.returncode
500
496
 
501
497
  if self.no_capture:
502
498
  self.output = ''
503
499
  else:
504
500
  self.output = self.output.strip()
505
- process.stdout.close()
501
+ self.process.stdout.close()
506
502
 
507
503
  if self.ignore_return_code:
508
504
  self.return_code = 0
secator/runners/scan.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
 
3
- from secator.config import ConfigLoader
4
- from secator.exporters import CsvExporter, JsonExporter
3
+ from secator.template import TemplateLoader
4
+ from secator.config import CONFIG
5
5
  from secator.runners._base import Runner
6
6
  from secator.runners._helpers import run_extractors
7
7
  from secator.runners.workflow import Workflow
@@ -13,10 +13,7 @@ logger = logging.getLogger(__name__)
13
13
 
14
14
  class Scan(Runner):
15
15
 
16
- default_exporters = [
17
- JsonExporter,
18
- CsvExporter
19
- ]
16
+ default_exporters = CONFIG.scans.exporters
20
17
 
21
18
  @classmethod
22
19
  def delay(cls, *args, **kwargs):
@@ -55,7 +52,7 @@ class Scan(Runner):
55
52
 
56
53
  # Run workflow
57
54
  workflow = Workflow(
58
- ConfigLoader(name=f'workflows/{name}'),
55
+ TemplateLoader(name=f'workflows/{name}'),
59
56
  targets,
60
57
  results=[],
61
58
  run_opts=run_opts,
secator/runners/task.py CHANGED
@@ -1,11 +1,12 @@
1
1
  from secator.definitions import DEBUG
2
2
  from secator.output_types import Target
3
+ from secator.config import CONFIG
3
4
  from secator.runners import Runner
4
5
  from secator.utils import discover_tasks
5
6
 
6
7
 
7
8
  class Task(Runner):
8
- default_exporters = []
9
+ default_exporters = CONFIG.tasks.exporters
9
10
  enable_hooks = False
10
11
 
11
12
  def delay(cls, *args, **kwargs):
@@ -1,6 +1,6 @@
1
1
  from secator.definitions import DEBUG
2
- from secator.exporters import CsvExporter, JsonExporter
3
2
  from secator.output_types import Target
3
+ from secator.config import CONFIG
4
4
  from secator.runners._base import Runner
5
5
  from secator.runners.task import Task
6
6
  from secator.utils import merge_opts
@@ -8,10 +8,7 @@ from secator.utils import merge_opts
8
8
 
9
9
  class Workflow(Runner):
10
10
 
11
- default_exporters = [
12
- JsonExporter,
13
- CsvExporter
14
- ]
11
+ default_exporters = CONFIG.workflows.exporters
15
12
 
16
13
  @classmethod
17
14
  def delay(cls, *args, **kwargs):
@@ -84,7 +81,7 @@ class Workflow(Runner):
84
81
  """Get tasks recursively as Celery chains / chords.
85
82
 
86
83
  Args:
87
- obj (secator.config.ConfigLoader): Config.
84
+ obj (secator.config.TemplateLoader): Config.
88
85
  targets (list): List of targets.
89
86
  workflow_opts (dict): Workflow options.
90
87
  run_opts (dict): Run options.
@@ -1,21 +1,20 @@
1
1
  import json
2
- import logging
3
2
  import os
4
3
 
5
4
  import requests
6
5
  from bs4 import BeautifulSoup
7
6
  from cpe import CPE
8
7
 
9
- from secator.definitions import (CIDR_RANGE, CONFIDENCE, CVSS_SCORE, DATA_FOLDER, DEFAULT_HTTP_WORDLIST,
10
- DEFAULT_SKIP_CVE_SEARCH, DELAY, DEPTH, DESCRIPTION, FILTER_CODES, FILTER_REGEX,
11
- FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID, MATCH_CODES, MATCH_REGEX,
12
- MATCH_SIZE, MATCH_WORDS, METHOD, NAME, PATH, PROVIDER, PROXY, RATE_LIMIT, REFERENCES,
13
- RETRIES, SEVERITY, TAGS, THREADS, TIMEOUT, URL, USER_AGENT, USERNAME, WORDLIST)
8
+ from secator.definitions import (CIDR_RANGE, CVSS_SCORE, DELAY, DEPTH, DESCRIPTION, FILTER_CODES,
9
+ FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID,
10
+ MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, NAME, PATH, PROVIDER, PROXY,
11
+ RATE_LIMIT, REFERENCES, RETRIES, SEVERITY, TAGS, THREADS, TIMEOUT, URL, USER_AGENT,
12
+ USERNAME, WORDLIST)
14
13
  from secator.output_types import Ip, Port, Subdomain, Tag, Url, UserAccount, Vulnerability
15
- from secator.rich import console
14
+ from secator.config import CONFIG
16
15
  from secator.runners import Command
16
+ from secator.utils import debug
17
17
 
18
- logger = logging.getLogger(__name__)
19
18
 
20
19
  OPTS = {
21
20
  HEADER: {'type': str, 'help': 'Custom header to add to each request in the form "KEY1:VALUE1; KEY2:VALUE2"'},
@@ -37,7 +36,7 @@ OPTS = {
37
36
  THREADS: {'type': int, 'help': 'Number of threads to run', 'default': 50},
38
37
  TIMEOUT: {'type': int, 'help': 'Request timeout'},
39
38
  USER_AGENT: {'type': str, 'short': 'ua', 'help': 'User agent, e.g "Mozilla Firefox 1.0"'},
40
- WORDLIST: {'type': str, 'short': 'w', 'default': DEFAULT_HTTP_WORDLIST, 'help': 'Wordlist to use'}
39
+ WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.http, 'help': 'Wordlist to use'}
41
40
  }
42
41
 
43
42
  OPTS_HTTP = [
@@ -121,7 +120,7 @@ class Vuln(Command):
121
120
 
122
121
  @staticmethod
123
122
  def lookup_local_cve(cve_id):
124
- cve_path = f'{DATA_FOLDER}/cves/{cve_id}.json'
123
+ cve_path = f'{CONFIG.dirs.data}/cves/{cve_id}.json'
125
124
  if os.path.exists(cve_path):
126
125
  with open(cve_path, 'r') as f:
127
126
  return json.load(f)
@@ -131,13 +130,54 @@ class Vuln(Command):
131
130
  # def lookup_exploitdb(exploit_id):
132
131
  # print('looking up exploit')
133
132
  # try:
134
- # cve_info = requests.get(f'https://exploit-db.com/exploits/{exploit_id}', timeout=5).content
135
- # print(cve_info)
136
- # except Exception:
133
+ # resp = requests.get(f'https://exploit-db.com/exploits/{exploit_id}', timeout=5)
134
+ # resp.raise_for_status()
135
+ # content = resp.content
136
+ # except requests.RequestException as e:
137
+ # debug(f'Failed remote query for {exploit_id} ({str(e)}).', sub='cve')
137
138
  # logger.error(f'Could not fetch exploit info for exploit {exploit_id}. Skipping.')
138
139
  # return None
139
140
  # return cve_info
140
141
 
142
+ @staticmethod
143
+ def create_cpe_string(product_name, version):
144
+ """
145
+ Generate a CPE string for a given product and version.
146
+
147
+ Args:
148
+ product_name (str): The name of the product.
149
+ version (str): The version of the product.
150
+
151
+ Returns:
152
+ str: A CPE string formatted according to the CPE 2.3 specification.
153
+ """
154
+ cpe_version = "2.3" # CPE Specification version
155
+ part = "a" # 'a' for application
156
+ vendor = product_name.lower() # Vendor name, using product name
157
+ product = product_name.lower() # Product name
158
+ version = version # Product version
159
+ cpe_string = f"cpe:{cpe_version}:{part}:{vendor}:{product}:{version}:*:*:*:*:*:*:*"
160
+ return cpe_string
161
+
162
+ @staticmethod
163
+ def match_cpes(fs1, fs2):
164
+ """Check if two CPEs match. Partial matches consisting of <vendor>:<product>:<version> are considered a match.
165
+
166
+ Args:
167
+ fs1 (str): Format string 1.
168
+ fs2 (str): Format string 2.
169
+
170
+ Returns:
171
+ bool: True if the two CPEs match, False otherwise.
172
+ """
173
+ if fs1 == fs2:
174
+ return True
175
+ split_fs1 = fs1.split(':')
176
+ split_fs2 = fs2.split(':')
177
+ tup1 = split_fs1[3], split_fs1[4], split_fs1[5]
178
+ tup2 = split_fs2[3], split_fs2[4], split_fs2[5]
179
+ return tup1 == tup2
180
+
141
181
  @staticmethod
142
182
  def lookup_cve(cve_id, cpes=[]):
143
183
  """Search for a CVE in local db or using cve.circl.lu and return vulnerability data.
@@ -150,18 +190,21 @@ class Vuln(Command):
150
190
  dict: vulnerability data.
151
191
  """
152
192
  cve_info = Vuln.lookup_local_cve(cve_id)
193
+
194
+ # Online CVE lookup
153
195
  if not cve_info:
154
- if DEFAULT_SKIP_CVE_SEARCH:
155
- logger.debug(f'{cve_id} not found locally, and DEFAULT_SKIP_CVE_SEARCH is set: ignoring online search.')
196
+ if CONFIG.runners.skip_cve_search:
197
+ debug(f'Skip remote query for {cve_id} since config.runners.skip_cve_search is set.', sub='cve')
198
+ return None
199
+ if CONFIG.offline_mode:
200
+ debug(f'Skip remote query for {cve_id} since config.offline_mode is set.', sub='cve')
156
201
  return None
157
- # logger.debug(f'{cve_id} not found locally. Use `secator install cves` to install CVEs locally.')
158
202
  try:
159
- cve_info = requests.get(f'https://cve.circl.lu/api/cve/{cve_id}', timeout=5).json()
160
- if not cve_info:
161
- console.print(f'Could not fetch CVE info for cve {cve_id}. Skipping.', highlight=False)
162
- return None
163
- except Exception:
164
- console.print(f'Could not fetch CVE info for cve {cve_id}. Skipping.', highlight=False)
203
+ resp = requests.get(f'https://cve.circl.lu/api/cve/{cve_id}', timeout=5)
204
+ resp.raise_for_status()
205
+ cve_info = resp.json()
206
+ except requests.RequestException as e:
207
+ debug(f'Failed remote query for {cve_id} ({str(e)}).', sub='cve')
165
208
  return None
166
209
 
167
210
  # Match the CPE string against the affected products CPE FS strings from the CVE data if a CPE was passed.
@@ -177,14 +220,15 @@ class Vuln(Command):
177
220
  cpe_fs = cpe_obj.as_fs()
178
221
  # cpe_version = cpe_obj.get_version()[0]
179
222
  vulnerable_fs = cve_info['vulnerable_product']
180
- # logger.debug(f'Matching CPE {cpe} against {len(vulnerable_fs)} vulnerable products for {cve_id}')
181
223
  for fs in vulnerable_fs:
182
- if fs == cpe_fs:
183
- # logger.debug(f'Found matching CPE FS {cpe_fs} ! The CPE is vulnerable to CVE {cve_id}')
224
+ # debug(f'{cve_id}: Testing {cpe_fs} against {fs}', sub='cve') # for hardcore debugging
225
+ if Vuln.match_cpes(cpe_fs, fs):
226
+ debug(f'{cve_id}: CPE match found for {cpe}.', sub='cve')
184
227
  cpe_match = True
185
228
  tags.append('cpe-match')
186
- if not cpe_match:
187
- return None
229
+ break
230
+ if not cpe_match:
231
+ debug(f'{cve_id}: no CPE match found for {cpe}.', sub='cve')
188
232
 
189
233
  # Parse CVE id and CVSS
190
234
  name = id = cve_info['id']
@@ -223,17 +267,9 @@ class Vuln(Command):
223
267
  # Set vulnerability severity based on CVSS score
224
268
  severity = None
225
269
  if cvss:
226
- if cvss < 4:
227
- severity = 'low'
228
- elif cvss < 7:
229
- severity = 'medium'
230
- elif cvss < 9:
231
- severity = 'high'
232
- else:
233
- severity = 'critical'
270
+ severity = Vuln.cvss_to_severity(cvss)
234
271
 
235
272
  # Set confidence
236
- confidence = 'low' if not cpe_match else 'high'
237
273
  vuln = {
238
274
  ID: id,
239
275
  NAME: name,
@@ -243,7 +279,6 @@ class Vuln(Command):
243
279
  TAGS: tags,
244
280
  REFERENCES: [f'https://cve.circl.lu/cve/{id}'] + references,
245
281
  DESCRIPTION: description,
246
- CONFIDENCE: confidence
247
282
  }
248
283
  return vuln
249
284
 
@@ -257,17 +292,33 @@ class Vuln(Command):
257
292
  Returns:
258
293
  dict: vulnerability data.
259
294
  """
260
- reference = f'https://github.com/advisories/{ghsa_id}'
261
- response = requests.get(reference)
262
- soup = BeautifulSoup(response.text, 'lxml')
295
+ try:
296
+ resp = requests.get(f'https://github.com/advisories/{ghsa_id}', timeout=5)
297
+ resp.raise_for_status()
298
+ except requests.RequestException as e:
299
+ debug(f'Failed remote query for {ghsa_id} ({str(e)}).', sub='cve')
300
+ return None
301
+ soup = BeautifulSoup(resp.text, 'lxml')
263
302
  sidebar_items = soup.find_all('div', {'class': 'discussion-sidebar-item'})
264
303
  cve_id = sidebar_items[2].find('div').text.strip()
265
- data = Vuln.lookup_cve(cve_id)
266
- if data:
267
- data[TAGS].append('ghsa')
268
- return data
304
+ vuln = Vuln.lookup_cve(cve_id)
305
+ if vuln:
306
+ vuln[TAGS].append('ghsa')
307
+ return vuln
269
308
  return None
270
309
 
310
+ @staticmethod
311
+ def cvss_to_severity(cvss):
312
+ if cvss < 4:
313
+ severity = 'low'
314
+ elif cvss < 7:
315
+ severity = 'medium'
316
+ elif cvss < 9:
317
+ severity = 'high'
318
+ else:
319
+ severity = 'critical'
320
+ return severity
321
+
271
322
 
272
323
  class VulnHttp(Vuln):
273
324
  input_type = HOST
@@ -1,5 +1,6 @@
1
1
  from secator.decorators import task
2
- from secator.definitions import (DEFAULT_DNS_WORDLIST, DOMAIN, HOST, RATE_LIMIT, RETRIES, THREADS, WORDLIST, EXTRA_DATA)
2
+ from secator.definitions import (DOMAIN, HOST, RATE_LIMIT, RETRIES, THREADS, WORDLIST, EXTRA_DATA)
3
+ from secator.config import CONFIG
3
4
  from secator.output_types import Subdomain
4
5
  from secator.tasks._categories import ReconDns
5
6
 
@@ -17,7 +18,7 @@ class dnsxbrute(ReconDns):
17
18
  THREADS: 'threads',
18
19
  }
19
20
  opts = {
20
- WORDLIST: {'type': str, 'short': 'w', 'default': DEFAULT_DNS_WORDLIST, 'help': 'Wordlist'},
21
+ WORDLIST: {'type': str, 'short': 'w', 'default': CONFIG.wordlists.defaults.dns, 'help': 'Wordlist'},
21
22
  'trace': {'is_flag': True, 'default': False, 'help': 'Perform dns tracing'},
22
23
  }
23
24
  output_map = {
secator/tasks/ffuf.py CHANGED
@@ -7,7 +7,7 @@ from secator.definitions import (AUTO_CALIBRATION, CONTENT_LENGTH,
7
7
  MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
8
8
  PERCENT, PROXY, RATE_LIMIT, RETRIES,
9
9
  STATUS_CODE, THREADS, TIME, TIMEOUT,
10
- USER_AGENT, WORDLIST, WORDLISTS_FOLDER)
10
+ USER_AGENT, WORDLIST)
11
11
  from secator.output_types import Progress, Url
12
12
  from secator.serializers import JSONSerializer, RegexSerializer
13
13
  from secator.tasks._categories import HttpFuzzer
@@ -70,7 +70,7 @@ class ffuf(HttpFuzzer):
70
70
  },
71
71
  }
72
72
  encoding = 'ansi'
73
- install_cmd = f'go install -v github.com/ffuf/ffuf@latest && sudo git clone https://github.com/danielmiessler/SecLists {WORDLISTS_FOLDER}/seclists || true' # noqa: E501
73
+ install_cmd = 'go install -v github.com/ffuf/ffuf@latest'
74
74
  install_github_handle = 'ffuf/ffuf'
75
75
  proxychains = False
76
76
  proxy_socks5 = True
secator/tasks/httpx.py CHANGED
@@ -1,14 +1,14 @@
1
1
  import os
2
2
 
3
3
  from secator.decorators import task
4
- from secator.definitions import (DEFAULT_HTTPX_FLAGS,
5
- DEFAULT_STORE_HTTP_RESPONSES, DELAY, DEPTH,
4
+ from secator.definitions import (DEFAULT_HTTPX_FLAGS, DELAY, DEPTH,
6
5
  FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
7
6
  FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
8
7
  MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
9
8
  MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
10
9
  RATE_LIMIT, RETRIES, THREADS,
11
10
  TIMEOUT, URL, USER_AGENT)
11
+ from secator.config import CONFIG
12
12
  from secator.tasks._categories import Http
13
13
  from secator.utils import sanitize_url
14
14
 
@@ -71,7 +71,7 @@ class httpx(Http):
71
71
  debug_resp = self.get_opt_value('debug_resp')
72
72
  if debug_resp:
73
73
  self.cmd = self.cmd.replace('-silent', '')
74
- if DEFAULT_STORE_HTTP_RESPONSES:
74
+ if CONFIG.http.store_responses:
75
75
  self.output_response_path = f'{self.reports_folder}/response'
76
76
  self.output_screenshot_path = f'{self.reports_folder}/screenshot'
77
77
  os.makedirs(self.output_response_path, exist_ok=True)
@@ -98,7 +98,7 @@ class httpx(Http):
98
98
 
99
99
  @staticmethod
100
100
  def on_end(self):
101
- if DEFAULT_STORE_HTTP_RESPONSES:
101
+ if CONFIG.http.store_responses:
102
102
  if os.path.exists(self.output_response_path + '/index.txt'):
103
103
  os.remove(self.output_response_path + '/index.txt')
104
104
  if os.path.exists(self.output_screenshot_path + '/index.txt'):
secator/tasks/katana.py CHANGED
@@ -4,7 +4,7 @@ from urllib.parse import urlparse
4
4
 
5
5
  from secator.decorators import task
6
6
  from secator.definitions import (CONTENT_TYPE, DEFAULT_KATANA_FLAGS,
7
- DEFAULT_STORE_HTTP_RESPONSES, DELAY, DEPTH,
7
+ DELAY, DEPTH,
8
8
  FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
9
9
  FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST,
10
10
  MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
@@ -12,6 +12,7 @@ from secator.definitions import (CONTENT_TYPE, DEFAULT_KATANA_FLAGS,
12
12
  RATE_LIMIT, RETRIES, STATUS_CODE,
13
13
  STORED_RESPONSE_PATH, TECH,
14
14
  THREADS, TIME, TIMEOUT, URL, USER_AGENT, WEBSERVER, CONTENT_LENGTH)
15
+ from secator.config import CONFIG
15
16
  from secator.output_types import Url, Tag
16
17
  from secator.tasks._categories import HttpCrawler
17
18
 
@@ -106,14 +107,14 @@ class katana(HttpCrawler):
106
107
  debug_resp = self.get_opt_value('debug_resp')
107
108
  if debug_resp:
108
109
  self.cmd = self.cmd.replace('-silent', '')
109
- if DEFAULT_STORE_HTTP_RESPONSES:
110
+ if CONFIG.http.store_responses:
110
111
  self.cmd += f' -sr -srd {self.reports_folder}'
111
112
 
112
113
  @staticmethod
113
114
  def on_item(self, item):
114
115
  if not isinstance(item, Url):
115
116
  return item
116
- if DEFAULT_STORE_HTTP_RESPONSES and os.path.exists(item.stored_response_path):
117
+ if CONFIG.http.store_responses and os.path.exists(item.stored_response_path):
117
118
  with open(item.stored_response_path, 'r', encoding='latin-1') as fin:
118
119
  data = fin.read().splitlines(True)
119
120
  first_line = data[0]
@@ -125,5 +126,5 @@ class katana(HttpCrawler):
125
126
 
126
127
  @staticmethod
127
128
  def on_end(self):
128
- if DEFAULT_STORE_HTTP_RESPONSES and os.path.exists(self.reports_folder + '/index.txt'):
129
+ if CONFIG.http.store_responses and os.path.exists(self.reports_folder + '/index.txt'):
129
130
  os.remove(self.reports_folder + '/index.txt')
@@ -5,9 +5,8 @@ import logging
5
5
  from rich.panel import Panel
6
6
 
7
7
  from secator.decorators import task
8
- from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, HOST,
9
- OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
10
- DATA_FOLDER, THREADS, TIMEOUT, USER_AGENT)
8
+ from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, HOST, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
9
+ THREADS, TIMEOUT, USER_AGENT)
11
10
  from secator.tasks._categories import VulnMulti
12
11
  from secator.utils import get_file_timestamp
13
12
 
@@ -84,7 +83,7 @@ class msfconsole(VulnMulti):
84
83
 
85
84
  # Make a copy and replace vars inside by env vars passed on the CLI
86
85
  timestr = get_file_timestamp()
87
- out_path = f'{DATA_FOLDER}/msfconsole_{timestr}.rc'
86
+ out_path = f'{self.reports_folder}/.inputs/msfconsole_{timestr}.rc'
88
87
  logger.debug(
89
88
  f'Writing formatted resource script to new temp file {out_path}'
90
89
  )