secator 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (90) hide show
  1. secator/celery.py +160 -185
  2. secator/celery_utils.py +268 -0
  3. secator/cli.py +427 -176
  4. secator/config.py +114 -68
  5. secator/configs/workflows/host_recon.yaml +5 -3
  6. secator/configs/workflows/port_scan.yaml +7 -3
  7. secator/configs/workflows/subdomain_recon.yaml +2 -2
  8. secator/configs/workflows/url_bypass.yaml +10 -0
  9. secator/configs/workflows/url_dirsearch.yaml +1 -1
  10. secator/configs/workflows/url_vuln.yaml +1 -1
  11. secator/decorators.py +170 -92
  12. secator/definitions.py +11 -4
  13. secator/exporters/__init__.py +7 -5
  14. secator/exporters/console.py +10 -0
  15. secator/exporters/csv.py +27 -19
  16. secator/exporters/gdrive.py +16 -11
  17. secator/exporters/json.py +3 -1
  18. secator/exporters/table.py +30 -2
  19. secator/exporters/txt.py +20 -16
  20. secator/hooks/gcs.py +53 -0
  21. secator/hooks/mongodb.py +53 -27
  22. secator/installer.py +277 -60
  23. secator/output_types/__init__.py +29 -11
  24. secator/output_types/_base.py +11 -1
  25. secator/output_types/error.py +36 -0
  26. secator/output_types/exploit.py +12 -8
  27. secator/output_types/info.py +24 -0
  28. secator/output_types/ip.py +8 -1
  29. secator/output_types/port.py +9 -2
  30. secator/output_types/progress.py +5 -0
  31. secator/output_types/record.py +5 -3
  32. secator/output_types/stat.py +33 -0
  33. secator/output_types/subdomain.py +1 -1
  34. secator/output_types/tag.py +8 -6
  35. secator/output_types/target.py +2 -2
  36. secator/output_types/url.py +14 -11
  37. secator/output_types/user_account.py +6 -6
  38. secator/output_types/vulnerability.py +8 -6
  39. secator/output_types/warning.py +24 -0
  40. secator/report.py +56 -23
  41. secator/rich.py +44 -39
  42. secator/runners/_base.py +629 -638
  43. secator/runners/_helpers.py +5 -91
  44. secator/runners/celery.py +18 -0
  45. secator/runners/command.py +404 -214
  46. secator/runners/scan.py +8 -24
  47. secator/runners/task.py +21 -55
  48. secator/runners/workflow.py +41 -40
  49. secator/scans/__init__.py +28 -0
  50. secator/serializers/dataclass.py +6 -0
  51. secator/serializers/json.py +10 -5
  52. secator/serializers/regex.py +12 -4
  53. secator/tasks/_categories.py +147 -42
  54. secator/tasks/bbot.py +295 -0
  55. secator/tasks/bup.py +99 -0
  56. secator/tasks/cariddi.py +38 -49
  57. secator/tasks/dalfox.py +3 -0
  58. secator/tasks/dirsearch.py +14 -25
  59. secator/tasks/dnsx.py +49 -30
  60. secator/tasks/dnsxbrute.py +4 -1
  61. secator/tasks/feroxbuster.py +10 -20
  62. secator/tasks/ffuf.py +3 -2
  63. secator/tasks/fping.py +4 -4
  64. secator/tasks/gau.py +5 -0
  65. secator/tasks/gf.py +2 -2
  66. secator/tasks/gospider.py +4 -0
  67. secator/tasks/grype.py +11 -13
  68. secator/tasks/h8mail.py +32 -42
  69. secator/tasks/httpx.py +58 -21
  70. secator/tasks/katana.py +19 -23
  71. secator/tasks/maigret.py +27 -25
  72. secator/tasks/mapcidr.py +2 -3
  73. secator/tasks/msfconsole.py +22 -19
  74. secator/tasks/naabu.py +18 -2
  75. secator/tasks/nmap.py +82 -55
  76. secator/tasks/nuclei.py +13 -3
  77. secator/tasks/searchsploit.py +26 -11
  78. secator/tasks/subfinder.py +5 -1
  79. secator/tasks/wpscan.py +91 -94
  80. secator/template.py +61 -45
  81. secator/thread.py +24 -0
  82. secator/utils.py +417 -78
  83. secator/utils_test.py +48 -23
  84. secator/workflows/__init__.py +28 -0
  85. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/METADATA +59 -48
  86. secator-0.8.0.dist-info/RECORD +115 -0
  87. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/WHEEL +1 -1
  88. secator-0.6.0.dist-info/RECORD +0 -101
  89. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/entry_points.txt +0 -0
  90. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/licenses/LICENSE +0 -0
secator/tasks/httpx.py CHANGED
@@ -1,28 +1,25 @@
1
1
  import os
2
2
 
3
3
  from secator.decorators import task
4
- from secator.definitions import (DEFAULT_HTTPX_FLAGS, DELAY, DEPTH,
5
- FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
6
- FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
7
- MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
8
- MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
9
- RATE_LIMIT, RETRIES, THREADS,
10
- TIMEOUT, URL, USER_AGENT)
4
+ from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
5
+ HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
6
+ PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, URL, USER_AGENT)
11
7
  from secator.config import CONFIG
8
+ from secator.output_types import Url, Subdomain
9
+ from secator.serializers import JSONSerializer
12
10
  from secator.tasks._categories import Http
13
- from secator.utils import sanitize_url
11
+ from secator.utils import (sanitize_url, extract_domain_info, extract_subdomains_from_fqdn)
14
12
 
15
13
 
16
14
  @task()
17
15
  class httpx(Http):
18
16
  """Fast and multi-purpose HTTP toolkit."""
19
- cmd = f'httpx {DEFAULT_HTTPX_FLAGS}'
17
+ cmd = 'httpx'
20
18
  file_flag = '-l'
21
19
  input_flag = '-u'
22
20
  json_flag = '-json'
23
21
  opts = {
24
22
  # 'silent': {'is_flag': True, 'default': False, 'help': 'Silent mode'},
25
- # 'td': {'is_flag': True, 'default': True, 'help': 'Tech detection'},
26
23
  # 'irr': {'is_flag': True, 'default': False, 'help': 'Include http request / response'},
27
24
  'fep': {'is_flag': True, 'default': False, 'help': 'Error Page Classifier and Filtering'},
28
25
  'favicon': {'is_flag': True, 'default': False, 'help': 'Favicon hash'},
@@ -35,6 +32,11 @@ class httpx(Http):
35
32
  'screenshot': {'is_flag': True, 'short': 'ss', 'default': False, 'help': 'Screenshot response'},
36
33
  'system_chrome': {'is_flag': True, 'default': False, 'help': 'Use local installed Chrome for screenshot'},
37
34
  'headless_options': {'is_flag': False, 'short': 'ho', 'default': None, 'help': 'Headless Chrome additional options'},
35
+ 'follow_host_redirects': {'is_flag': True, 'short': 'fhr', 'default': None, 'help': 'Follow redirects on the same host'}, # noqa: E501
36
+ 'tech_detect': {'is_flag': True, 'short': 'td', 'default': True, 'help': 'Tech detection'},
37
+ 'tls_grab': {'is_flag': True, 'short': 'tlsg', 'default': False, 'help': 'Grab some informations from the tls certificate'}, # noqa: E501
38
+ 'rstr': {'type': int, 'default': CONFIG.http.response_max_size_bytes, 'help': 'Max body size to read (bytes)'},
39
+ 'rsts': {'type': int, 'default': CONFIG.http.response_max_size_bytes, 'help': 'Max body size to save (bytes)'}
38
40
  }
39
41
  opt_key_map = {
40
42
  HEADER: 'header',
@@ -61,6 +63,8 @@ class httpx(Http):
61
63
  opt_value_map = {
62
64
  DELAY: lambda x: str(x) + 's' if x else None,
63
65
  }
66
+ item_loaders = [JSONSerializer()]
67
+ output_types = [Url, Subdomain]
64
68
  install_cmd = 'go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest'
65
69
  install_github_handle = 'projectdiscovery/httpx'
66
70
  proxychains = False
@@ -79,19 +83,23 @@ class httpx(Http):
79
83
  self.cmd += f' -srd {self.reports_folder}/.outputs'
80
84
  if screenshot:
81
85
  self.cmd += ' -esb -ehb'
86
+ self.domains = []
82
87
 
83
88
  @staticmethod
84
- def on_item_pre_convert(self, item):
85
- for k, v in item.items():
86
- if k == 'time':
87
- response_time = float(''.join(ch for ch in v if not ch.isalpha()))
88
- if v[-2:] == 'ms':
89
- response_time = response_time / 1000
90
- item[k] = response_time
91
- elif k == URL:
92
- item[k] = sanitize_url(v)
93
- item[URL] = item.get('final_url') or item[URL]
94
- return item
89
+ def on_json_loaded(self, item):
90
+ item = self._preprocess_url(item)
91
+ yield item
92
+ tls = item.get('tls', None)
93
+ if tls:
94
+ subject_cn = tls.get('subject_cn', None)
95
+ subject_an = tls.get('subject_an', [])
96
+ cert_domains = subject_an
97
+ if subject_cn:
98
+ cert_domains.append(subject_cn)
99
+ for cert_domain in cert_domains:
100
+ subdomain = self._create_subdomain_from_tls_cert(cert_domain, item['url'])
101
+ if subdomain:
102
+ yield subdomain
95
103
 
96
104
  @staticmethod
97
105
  def on_end(self):
@@ -107,3 +115,32 @@ class httpx(Http):
107
115
  os.remove(index_spath)
108
116
  if os.path.exists(index_spath2):
109
117
  os.remove(index_spath2)
118
+
119
+ def _preprocess_url(self, item):
120
+ """Replace time string by float, sanitize URL, get final redirect URL."""
121
+ for k, v in item.items():
122
+ if k == 'time':
123
+ response_time = float(''.join(ch for ch in v if not ch.isalpha()))
124
+ if v[-2:] == 'ms':
125
+ response_time = response_time / 1000
126
+ item[k] = response_time
127
+ elif k == URL:
128
+ item[k] = sanitize_url(v)
129
+ item[URL] = item.get('final_url') or item[URL]
130
+ return item
131
+
132
+ def _create_subdomain_from_tls_cert(self, domain, url):
133
+ """Extract subdomains from TLS certificate."""
134
+ if domain.startswith('*.'):
135
+ domain = domain.lstrip('*.')
136
+ if domain in self.domains:
137
+ return None
138
+ url_domain = extract_domain_info(url)
139
+ url_domains = extract_subdomains_from_fqdn(url_domain.fqdn, url_domain.domain, url_domain.suffix)
140
+ if not url_domain or domain not in url_domains:
141
+ return None
142
+ self.domains.append(domain)
143
+ return Subdomain(
144
+ host=domain,
145
+ domain=extract_domain_info(domain, domain_only=True)
146
+ )
secator/tasks/katana.py CHANGED
@@ -1,28 +1,22 @@
1
1
  import os
2
- import json
3
2
  from urllib.parse import urlparse
4
3
 
5
4
  from secator.decorators import task
6
- from secator.definitions import (CONTENT_TYPE, DEFAULT_KATANA_FLAGS,
7
- DELAY, DEPTH,
8
- FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
9
- FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST,
10
- MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
11
- MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
12
- RATE_LIMIT, RETRIES, STATUS_CODE,
13
- STORED_RESPONSE_PATH, TECH,
14
- THREADS, TIME, TIMEOUT, URL, USER_AGENT, WEBSERVER, CONTENT_LENGTH)
5
+ from secator.definitions import (CONTENT_TYPE, DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS,
6
+ FOLLOW_REDIRECT, HEADER, HOST, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS,
7
+ METHOD, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES, STATUS_CODE,
8
+ STORED_RESPONSE_PATH, TECH, THREADS, TIME, TIMEOUT, URL, USER_AGENT, WEBSERVER,
9
+ CONTENT_LENGTH)
15
10
  from secator.config import CONFIG
16
11
  from secator.output_types import Url, Tag
12
+ from secator.serializers import JSONSerializer
17
13
  from secator.tasks._categories import HttpCrawler
18
14
 
19
15
 
20
16
  @task()
21
17
  class katana(HttpCrawler):
22
18
  """Next-generation crawling and spidering framework."""
23
- # TODO: add -fx for form detection and extract 'forms' from the output with custom item_loader
24
- # TODO: add -jsluice for JS parsing
25
- cmd = f'katana {DEFAULT_KATANA_FLAGS}'
19
+ cmd = 'katana'
26
20
  file_flag = '-list'
27
21
  input_flag = '-u'
28
22
  json_flag = '-jsonl'
@@ -30,7 +24,13 @@ class katana(HttpCrawler):
30
24
  'headless': {'is_flag': True, 'short': 'hl', 'help': 'Headless mode'},
31
25
  'system_chrome': {'is_flag': True, 'short': 'sc', 'help': 'Use local installed chrome browser'},
32
26
  'form_extraction': {'is_flag': True, 'short': 'fx', 'help': 'Detect forms'},
33
- 'store_responses': {'is_flag': True, 'short': 'sr', 'default': CONFIG.http.store_responses, 'help': 'Store responses'}
27
+ 'store_responses': {'is_flag': True, 'short': 'sr', 'default': CONFIG.http.store_responses, 'help': 'Store responses'}, # noqa: E501
28
+ 'form_fill': {'is_flag': True, 'short': 'ff', 'help': 'Enable form filling'},
29
+ 'js_crawl': {'is_flag': True, 'short': 'jc', 'default': True, 'help': 'Enable endpoint parsing / crawling in javascript file'}, # noqa: E501
30
+ 'jsluice': {'is_flag': True, 'short': 'jsl', 'default': True, 'help': 'Enable jsluice parsing in javascript file (memory intensive)'}, # noqa: E501
31
+ 'known_files': {'type': str, 'short': 'kf', 'default': 'all', 'help': 'Enable crawling of known files (all, robotstxt, sitemapxml)'}, # noqa: E501
32
+ 'omit_raw': {'is_flag': True, 'short': 'or', 'default': True, 'help': 'Omit raw requests/responses from jsonl output'}, # noqa: E501
33
+ 'omit_body': {'is_flag': True, 'short': 'ob', 'default': True, 'help': 'Omit response body from jsonl output'}
34
34
  }
35
35
  opt_key_map = {
36
36
  HEADER: 'headers',
@@ -52,11 +52,13 @@ class katana(HttpCrawler):
52
52
  THREADS: 'concurrency',
53
53
  TIMEOUT: 'timeout',
54
54
  USER_AGENT: OPT_NOT_SUPPORTED,
55
- 'store_responses': 'sr'
55
+ 'store_responses': 'sr',
56
+ 'form_fill': 'aff'
56
57
  }
57
58
  opt_value_map = {
58
59
  DELAY: lambda x: int(x) if isinstance(x, float) else x
59
60
  }
61
+ item_loaders = [JSONSerializer()]
60
62
  output_map = {
61
63
  Url: {
62
64
  URL: lambda x: x['request']['endpoint'],
@@ -72,8 +74,7 @@ class katana(HttpCrawler):
72
74
  # TAGS: lambda x: x['response'].get('server')
73
75
  }
74
76
  }
75
- item_loaders = []
76
- install_cmd = 'sudo apt install build-essential && go install -v github.com/projectdiscovery/katana/cmd/katana@latest'
77
+ install_cmd = 'go install -v github.com/projectdiscovery/katana/cmd/katana@latest'
77
78
  install_github_handle = 'projectdiscovery/katana'
78
79
  proxychains = False
79
80
  proxy_socks5 = True
@@ -81,12 +82,7 @@ class katana(HttpCrawler):
81
82
  profile = 'io'
82
83
 
83
84
  @staticmethod
84
- def item_loader(self, item):
85
- try:
86
- item = json.loads(item)
87
- except json.JSONDecodeError:
88
- return None
89
-
85
+ def on_json_loaded(self, item):
90
86
  # form detection
91
87
  forms = item.get('response', {}).get('forms', [])
92
88
  if forms:
secator/tasks/maigret.py CHANGED
@@ -7,7 +7,7 @@ from secator.decorators import task
7
7
  from secator.definitions import (DELAY, EXTRA_DATA, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY,
8
8
  RATE_LIMIT, RETRIES, SITE_NAME, THREADS,
9
9
  TIMEOUT, URL, USERNAME)
10
- from secator.output_types import UserAccount
10
+ from secator.output_types import UserAccount, Info, Error
11
11
  from secator.tasks._categories import ReconUser
12
12
 
13
13
  logger = logging.getLogger(__name__)
@@ -41,38 +41,40 @@ class maigret(ReconUser):
41
41
  EXTRA_DATA: lambda x: x['status'].get('ids', {})
42
42
  }
43
43
  }
44
- install_cmd = 'pipx install git+https://github.com/soxoj/maigret@6be2f409e58056b1ca8571a8151e53bef107dedc'
44
+ install_cmd = 'pipx install git+https://github.com/soxoj/maigret'
45
45
  socks5_proxy = True
46
46
  profile = 'io'
47
47
 
48
- def yielder(self):
49
- prev = self.print_item_count
50
- self.print_item_count = False
51
- yield from super().yielder()
52
- if self.return_code != 0:
53
- return
54
- self.results = []
48
+ @staticmethod
49
+ def on_init(self):
50
+ self.output_path = self.get_opt_value(OUTPUT_PATH)
51
+
52
+ @staticmethod
53
+ def on_cmd_done(self):
54
+ # Search output path in cmd output
55
55
  if not self.output_path:
56
- match = re.search('JSON ndjson report for .* saved in (.*)', self.output)
57
- if match is None:
58
- logger.warning('JSON output file not found in command output.')
56
+ matches = re.findall('JSON ndjson report for .* saved in (.*)', self.output)
57
+ if not matches:
58
+ yield Error(message='JSON output file not found in command output.')
59
59
  return
60
- self.output_path = match.group(1)
61
- note = f'maigret JSON results saved to {self.output_path}'
62
- if self.print_line:
63
- self._print(note)
64
- if os.path.exists(self.output_path):
65
- with open(self.output_path, 'r') as f:
60
+ self.output_path = matches
61
+
62
+ if not isinstance(self.output_path, list):
63
+ self.output_path = [self.output_path]
64
+
65
+ for path in self.output_path:
66
+ if not os.path.exists(path):
67
+ yield Error(message=f'Could not find JSON results in {path}')
68
+ return
69
+
70
+ yield Info(message=f'JSON results saved to {path}')
71
+ with open(path, 'r') as f:
66
72
  data = [json.loads(line) for line in f.read().splitlines()]
67
73
  for item in data:
68
74
  yield item
69
- self.print_item_count = prev
70
-
71
- @staticmethod
72
- def on_init(self):
73
- output_path = self.get_opt_value(OUTPUT_PATH)
74
- self.output_path = output_path
75
75
 
76
76
  @staticmethod
77
77
  def validate_item(self, item):
78
- return item['http_status'] == 200
78
+ if isinstance(item, dict):
79
+ return item['http_status'] == 200
80
+ return True
secator/tasks/mapcidr.py CHANGED
@@ -23,11 +23,10 @@ class mapcidr(ReconIp):
23
23
  RATE_LIMIT: OPT_NOT_SUPPORTED,
24
24
  RETRIES: OPT_NOT_SUPPORTED,
25
25
  TIMEOUT: OPT_NOT_SUPPORTED,
26
- THREADS: OPT_NOT_SUPPORTED,
27
26
  }
28
27
 
29
28
  @staticmethod
30
29
  def item_loader(self, line):
31
30
  if validators.ipv4(line) or validators.ipv6(line):
32
- return {'ip': line, 'alive': False}
33
- return None
31
+ yield {'ip': line, 'alive': False}
32
+ return
@@ -4,6 +4,7 @@ import logging
4
4
 
5
5
  from rich.panel import Panel
6
6
 
7
+ from secator.config import CONFIG
7
8
  from secator.decorators import task
8
9
  from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, HOST, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
9
10
  THREADS, TIMEOUT, USER_AGENT)
@@ -21,7 +22,6 @@ class msfconsole(VulnMulti):
21
22
  input_type = HOST
22
23
  input_chunk_size = 1
23
24
  output_types = []
24
- output_return_type = str
25
25
  opt_prefix = '--'
26
26
  opts = {
27
27
  'resource': {'type': str, 'help': 'Metasploit resource script.', 'short': 'r'},
@@ -40,18 +40,24 @@ class msfconsole(VulnMulti):
40
40
  THREADS: OPT_NOT_SUPPORTED,
41
41
  TIMEOUT: OPT_NOT_SUPPORTED,
42
42
  USER_AGENT: OPT_NOT_SUPPORTED,
43
- THREADS: OPT_NOT_SUPPORTED,
44
43
  }
45
44
  encoding = 'ansi'
46
45
  ignore_return_code = True
47
- # install_cmd = 'wget -O - https://raw.githubusercontent.com/freelabz/secator/main/scripts/msfinstall.sh | sh'
48
-
49
- @staticmethod
50
- def validate_input(self, input):
51
- """No list input supported for this command. Pass a single input instead."""
52
- if isinstance(input, list):
53
- return False
54
- return True
46
+ install_pre = {
47
+ 'apt|apk': ['libpq-dev', 'libpcap-dev', 'libffi-dev'],
48
+ 'pacman': ['ruby-erb', 'postgresql-libs'],
49
+ 'yum|zypper': ['postgresql-devel'],
50
+ }
51
+ install_cmd = (
52
+ f'git clone https://github.com/rapid7/metasploit-framework.git {CONFIG.dirs.share}/metasploit-framework || true && '
53
+ f'cd {CONFIG.dirs.share}/metasploit-framework && '
54
+ f'gem install bundler --user-install -n {CONFIG.dirs.bin} && '
55
+ f'gem install xmlrpc --user-install -n {CONFIG.dirs.bin} && '
56
+ f'bundle config set --local path "{CONFIG.dirs.share}" && '
57
+ 'bundle update --bundler && '
58
+ 'bundle install && '
59
+ f'ln -sf $HOME/.local/share/metasploit-framework/msfconsole {CONFIG.dirs.bin}/msfconsole'
60
+ )
55
61
 
56
62
  @staticmethod
57
63
  def on_init(self):
@@ -61,14 +67,14 @@ class msfconsole(VulnMulti):
61
67
  env_vars = {}
62
68
  if environment:
63
69
  env_vars = dict(map(lambda x: x.split('='), environment.strip().split(',')))
64
- env_vars['RHOST'] = self.input
65
- env_vars['RHOSTS'] = self.input
70
+ env_vars['RHOST'] = self.inputs[0]
71
+ env_vars['RHOSTS'] = self.inputs[0]
66
72
 
67
73
  # Passing msfconsole command directly, simply add RHOST / RHOSTS from host input and run then exit
68
74
  if command:
69
75
  self.run_opts['msfconsole.execute_command'] = (
70
- f'setg RHOST {self.input}; '
71
- f'setg RHOSTS {self.input}; '
76
+ f'setg RHOST {self.inputs[0]}; '
77
+ f'setg RHOSTS {self.inputs[0]}; '
72
78
  f'{command.format(**env_vars)}; '
73
79
  f'exit;'
74
80
  )
@@ -92,17 +98,14 @@ class msfconsole(VulnMulti):
92
98
  f.write(content)
93
99
 
94
100
  script_name = script_path.split('/')[-1]
95
- self._print(Panel(content, title=f'[bold magenta]{script_name}', expand=False))
101
+ self._print(Panel(content, title=f'[bold magenta]{script_name}', expand=False), rich=True)
96
102
 
97
103
  # Override original command with new resource script
98
104
  self.run_opts['msfconsole.resource'] = out_path
99
105
 
100
106
  # Nothing passed, error out
101
107
  else:
102
- raise ValueError('At least one of "inline_script" or "resource_script" must be passed.')
103
-
104
- # Clear host input
105
- self.input = ''
108
+ raise ValueError('At least one of "execute_command" or "resource" must be passed.')
106
109
 
107
110
 
108
111
  # TODO: This is better as it goes through an RPC API to communicate with
secator/tasks/naabu.py CHANGED
@@ -3,6 +3,7 @@ from secator.definitions import (DELAY, HOST, OPT_NOT_SUPPORTED, PORT, PORTS,
3
3
  PROXY, RATE_LIMIT, RETRIES, STATE, THREADS,
4
4
  TIMEOUT, TOP_PORTS)
5
5
  from secator.output_types import Port
6
+ from secator.serializers import JSONSerializer
6
7
  from secator.tasks._categories import ReconPort
7
8
 
8
9
 
@@ -16,7 +17,7 @@ class naabu(ReconPort):
16
17
  opts = {
17
18
  PORTS: {'type': str, 'short': 'p', 'help': 'Ports'},
18
19
  TOP_PORTS: {'type': str, 'short': 'tp', 'help': 'Top ports'},
19
- 'scan_type': {'type': str, 'help': 'Scan type (SYN (s)/CONNECT(c))'},
20
+ 'scan_type': {'type': str, 'short': 'st', 'help': 'Scan type (SYN (s)/CONNECT(c))'},
20
21
  # 'health_check': {'is_flag': True, 'short': 'hc', 'help': 'Health check'}
21
22
  }
22
23
  opt_key_map = {
@@ -37,6 +38,7 @@ class naabu(ReconPort):
37
38
  RETRIES: lambda x: 1 if x == 0 else x,
38
39
  PROXY: lambda x: x.replace('socks5://', '')
39
40
  }
41
+ item_loaders = [JSONSerializer()]
40
42
  output_map = {
41
43
  Port: {
42
44
  PORT: lambda x: x['port'],
@@ -45,9 +47,23 @@ class naabu(ReconPort):
45
47
  }
46
48
  }
47
49
  output_types = [Port]
48
- install_cmd = 'sudo apt install -y build-essential libpcap-dev && go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest' # noqa: E501
50
+ install_cmd = 'go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest'
49
51
  install_github_handle = 'projectdiscovery/naabu'
52
+ install_pre = {'apt|apk': ['libpcap-dev'], 'pacman|brew': ['libpcap']}
53
+ install_post = {'arch|alpine': 'sudo ln -s /usr/lib/libpcap.so /usr/lib/libpcap.so.0.8'}
50
54
  proxychains = False
51
55
  proxy_socks5 = True
52
56
  proxy_http = False
53
57
  profile = 'io'
58
+
59
+ @staticmethod
60
+ def before_init(self):
61
+ for ix, input in enumerate(self.inputs):
62
+ if input == 'localhost':
63
+ self.inputs[ix] = '127.0.0.1'
64
+
65
+ @staticmethod
66
+ def on_item(self, item):
67
+ if item.host == '127.0.0.1':
68
+ item.host = 'localhost'
69
+ return item