secator 0.0.1__py3-none-any.whl → 0.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (68) hide show
  1. secator/.gitignore +162 -0
  2. secator/celery.py +8 -68
  3. secator/cli.py +631 -274
  4. secator/decorators.py +42 -6
  5. secator/definitions.py +104 -33
  6. secator/exporters/csv.py +1 -2
  7. secator/exporters/gdrive.py +1 -1
  8. secator/exporters/json.py +1 -2
  9. secator/exporters/txt.py +1 -2
  10. secator/hooks/mongodb.py +12 -12
  11. secator/installer.py +335 -0
  12. secator/report.py +2 -14
  13. secator/rich.py +3 -10
  14. secator/runners/_base.py +106 -34
  15. secator/runners/_helpers.py +18 -17
  16. secator/runners/command.py +91 -55
  17. secator/runners/scan.py +3 -1
  18. secator/runners/task.py +6 -4
  19. secator/runners/workflow.py +13 -11
  20. secator/tasks/_categories.py +14 -19
  21. secator/tasks/cariddi.py +2 -1
  22. secator/tasks/dalfox.py +2 -0
  23. secator/tasks/dirsearch.py +5 -7
  24. secator/tasks/dnsx.py +1 -0
  25. secator/tasks/dnsxbrute.py +1 -0
  26. secator/tasks/feroxbuster.py +6 -7
  27. secator/tasks/ffuf.py +4 -7
  28. secator/tasks/gau.py +1 -4
  29. secator/tasks/gf.py +2 -1
  30. secator/tasks/gospider.py +1 -0
  31. secator/tasks/grype.py +47 -47
  32. secator/tasks/h8mail.py +5 -6
  33. secator/tasks/httpx.py +24 -18
  34. secator/tasks/katana.py +11 -15
  35. secator/tasks/maigret.py +3 -3
  36. secator/tasks/mapcidr.py +1 -0
  37. secator/tasks/msfconsole.py +3 -1
  38. secator/tasks/naabu.py +2 -1
  39. secator/tasks/nmap.py +14 -17
  40. secator/tasks/nuclei.py +4 -3
  41. secator/tasks/searchsploit.py +4 -2
  42. secator/tasks/subfinder.py +1 -0
  43. secator/tasks/wpscan.py +11 -13
  44. secator/utils.py +64 -82
  45. secator/utils_test.py +3 -2
  46. secator-0.3.5.dist-info/METADATA +411 -0
  47. secator-0.3.5.dist-info/RECORD +100 -0
  48. {secator-0.0.1.dist-info → secator-0.3.5.dist-info}/WHEEL +1 -2
  49. secator-0.0.1.dist-info/METADATA +0 -199
  50. secator-0.0.1.dist-info/RECORD +0 -114
  51. secator-0.0.1.dist-info/top_level.txt +0 -2
  52. tests/__init__.py +0 -0
  53. tests/integration/__init__.py +0 -0
  54. tests/integration/inputs.py +0 -42
  55. tests/integration/outputs.py +0 -392
  56. tests/integration/test_scans.py +0 -82
  57. tests/integration/test_tasks.py +0 -103
  58. tests/integration/test_workflows.py +0 -163
  59. tests/performance/__init__.py +0 -0
  60. tests/performance/loadtester.py +0 -56
  61. tests/unit/__init__.py +0 -0
  62. tests/unit/test_celery.py +0 -39
  63. tests/unit/test_scans.py +0 -0
  64. tests/unit/test_serializers.py +0 -51
  65. tests/unit/test_tasks.py +0 -348
  66. tests/unit/test_workflows.py +0 -96
  67. {secator-0.0.1.dist-info → secator-0.3.5.dist-info}/entry_points.txt +0 -0
  68. {secator-0.0.1.dist-info → secator-0.3.5.dist-info/licenses}/LICENSE +0 -0
secator/tasks/maigret.py CHANGED
@@ -4,7 +4,7 @@ import os
4
4
  import re
5
5
 
6
6
  from secator.decorators import task
7
- from secator.definitions import (DELAY, EXTRA_DATA, OPT_NOT_SUPPORTED, PROXY,
7
+ from secator.definitions import (DELAY, EXTRA_DATA, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY,
8
8
  RATE_LIMIT, RETRIES, SITE_NAME, THREADS,
9
9
  TIMEOUT, URL, USERNAME)
10
10
  from secator.output_types import UserAccount
@@ -41,7 +41,7 @@ class maigret(ReconUser):
41
41
  EXTRA_DATA: lambda x: x['status'].get('ids', {})
42
42
  }
43
43
  }
44
- install_cmd = 'pip3 install maigret'
44
+ install_cmd = 'pipx install git+https://github.com/soxoj/maigret@6be2f409e58056b1ca8571a8151e53bef107dedc'
45
45
  socks5_proxy = True
46
46
  profile = 'io'
47
47
 
@@ -70,7 +70,7 @@ class maigret(ReconUser):
70
70
 
71
71
  @staticmethod
72
72
  def on_init(self):
73
- output_path = self.get_opt_value('output_path')
73
+ output_path = self.get_opt_value(OUTPUT_PATH)
74
74
  self.output_path = output_path
75
75
 
76
76
  @staticmethod
secator/tasks/mapcidr.py CHANGED
@@ -14,6 +14,7 @@ class mapcidr(ReconIp):
14
14
  input_flag = '-cidr'
15
15
  file_flag = '-cl'
16
16
  install_cmd = 'go install -v github.com/projectdiscovery/mapcidr/cmd/mapcidr@latest'
17
+ install_github_handle = 'projectdiscovery/mapcidr'
17
18
  input_type = CIDR_RANGE
18
19
  output_types = [Ip]
19
20
  opt_key_map = {
@@ -18,6 +18,7 @@ logger = logging.getLogger(__name__)
18
18
  class msfconsole(VulnMulti):
19
19
  """CLI to access and work with the Metasploit Framework."""
20
20
  cmd = 'msfconsole --quiet'
21
+ version_flag = OPT_NOT_SUPPORTED
21
22
  input_type = HOST
22
23
  input_chunk_size = 1
23
24
  output_types = []
@@ -44,6 +45,7 @@ class msfconsole(VulnMulti):
44
45
  }
45
46
  encoding = 'ansi'
46
47
  ignore_return_code = True
48
+ # install_cmd = 'wget -O - https://raw.githubusercontent.com/freelabz/secator/main/scripts/msfinstall.sh | sh'
47
49
 
48
50
  @staticmethod
49
51
  def validate_input(self, input):
@@ -133,7 +135,7 @@ class msfconsole(VulnMulti):
133
135
  # self.client = MsfRpcClient(pw, ssl=True, **run_opts)
134
136
  #
135
137
  # # def start_msgrpc(self):
136
- # # code, out = run_command(f'msfrpcd -P {self.password}')
138
+ # # code, out = Command.execute(f'msfrpcd -P {self.password}')
137
139
  # # logger.info(out)
138
140
  #
139
141
  # def get_lhost(self):
secator/tasks/naabu.py CHANGED
@@ -45,7 +45,8 @@ class naabu(ReconPort):
45
45
  }
46
46
  }
47
47
  output_types = [Port]
48
- install_cmd = 'sudo apt install -y libpcap-dev && go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest'
48
+ install_cmd = 'sudo apt install -y build-essential libpcap-dev && go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest' # noqa: E501
49
+ install_github_handle = 'projectdiscovery/naabu'
49
50
  proxychains = False
50
51
  proxy_socks5 = True
51
52
  proxy_http = False
secator/tasks/nmap.py CHANGED
@@ -5,16 +5,15 @@ import re
5
5
  import xmltodict
6
6
 
7
7
  from secator.decorators import task
8
- from secator.definitions import (CONFIDENCE, CVSS_SCORE, DATA_FOLDER, DELAY,
8
+ from secator.definitions import (CONFIDENCE, CVSS_SCORE, DELAY,
9
9
  DESCRIPTION, EXTRA_DATA, FOLLOW_REDIRECT,
10
10
  HEADER, HOST, ID, IP, MATCHED_AT, NAME,
11
- OPT_NOT_SUPPORTED, PORT, PORTS, PROVIDER,
11
+ OPT_NOT_SUPPORTED, OUTPUT_PATH, PORT, PORTS, PROVIDER,
12
12
  PROXY, RATE_LIMIT, REFERENCE, REFERENCES,
13
13
  RETRIES, SCRIPT, SERVICE_NAME, STATE, TAGS,
14
14
  THREADS, TIMEOUT, USER_AGENT)
15
15
  from secator.output_types import Exploit, Port, Vulnerability
16
16
  from secator.tasks._categories import VulnMulti
17
- from secator.utils import get_file_timestamp
18
17
 
19
18
  logger = logging.getLogger(__name__)
20
19
 
@@ -31,6 +30,8 @@ class nmap(VulnMulti):
31
30
  opts = {
32
31
  PORTS: {'type': str, 'help': 'Ports to scan', 'short': 'p'},
33
32
  SCRIPT: {'type': str, 'default': 'vulners', 'help': 'NSE scripts'},
33
+ # 'tcp_connect': {'type': bool, 'short': 'sT', 'default': False, 'help': 'TCP Connect scan'},
34
+ 'tcp_syn_stealth': {'is_flag': True, 'short': 'sS', 'default': False, 'help': 'TCP SYN Stealth'},
34
35
  'output_path': {'type': str, 'short': 'oX', 'default': None, 'help': 'Output XML file path'}
35
36
  }
36
37
  opt_key_map = {
@@ -61,6 +62,14 @@ class nmap(VulnMulti):
61
62
  proxy_http = False
62
63
  profile = 'io'
63
64
 
65
+ @staticmethod
66
+ def on_init(self):
67
+ output_path = self.get_opt_value(OUTPUT_PATH)
68
+ if not output_path:
69
+ output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.xml'
70
+ self.output_path = output_path
71
+ self.cmd += f' -oX {self.output_path}'
72
+
64
73
  def yielder(self):
65
74
  yield from super().yielder()
66
75
  if self.return_code != 0:
@@ -86,15 +95,6 @@ class nmap(VulnMulti):
86
95
  results['_host'] = self.input
87
96
  return nmapData(results)
88
97
 
89
- @staticmethod
90
- def on_init(self):
91
- output_path = self.get_opt_value('output_path')
92
- if not output_path:
93
- timestr = get_file_timestamp()
94
- output_path = f'{DATA_FOLDER}/nmap_{timestr}.xml'
95
- self.output_path = output_path
96
- self.cmd += f' -oX {self.output_path}'
97
-
98
98
 
99
99
  class nmapData(dict):
100
100
 
@@ -279,7 +279,7 @@ class nmapData(dict):
279
279
  vuln_data = VulnMulti.lookup_cve(vuln['id'], cpes=cpes)
280
280
  if vuln_data:
281
281
  vuln.update(vuln_data)
282
- yield vuln
282
+ yield vuln
283
283
  else:
284
284
  # logger.debug(f'Vulscan provider {provider_name} is not supported YET.')
285
285
  continue
@@ -334,11 +334,8 @@ class nmapData(dict):
334
334
  vuln_data = VulnMulti.lookup_cve(vuln_id, cpes=cpes)
335
335
  if vuln_data:
336
336
  vuln.update(vuln_data)
337
- yield vuln
337
+ yield vuln
338
338
  else:
339
339
  logger.debug(f'Vulners parser for "{vuln_type}" is not implemented YET.')
340
340
  else:
341
341
  logger.error(f'Unrecognized vulners output: {elems}')
342
-
343
- def _parse_http_csrf_output(self, out, port_data):
344
- pass
secator/tasks/nuclei.py CHANGED
@@ -21,7 +21,7 @@ class nuclei(VulnMulti):
21
21
  'tags': {'type': str, 'help': 'Tags'},
22
22
  'exclude_tags': {'type': str, 'short': 'etags', 'help': 'Exclude tags'},
23
23
  'exclude_severity': {'type': str, 'short': 'es', 'help': 'Exclude severity'},
24
- 'template_id': {'type': str, 'short': 'id', 'help': 'Template id'},
24
+ 'template_id': {'type': str, 'short': 'tid', 'help': 'Template id'},
25
25
  'debug': {'type': str, 'help': 'Debug mode'},
26
26
  }
27
27
  opt_key_map = {
@@ -67,7 +67,8 @@ class nuclei(VulnMulti):
67
67
  }
68
68
  }
69
69
  ignore_return_code = True
70
- install_cmd = 'go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest'
70
+ install_cmd = 'go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest && nuclei update-templates'
71
+ install_github_handle = 'projectdiscovery/nuclei'
71
72
  proxychains = False
72
73
  proxy_socks5 = True # kind of, leaks data when running network / dns templates
73
74
  proxy_http = True # same
@@ -85,7 +86,7 @@ class nuclei(VulnMulti):
85
86
  data = {}
86
87
  data['data'] = item.get('extracted-results', [])
87
88
  data['template_id'] = item['template-id']
88
- data['template_url'] = item['template-url']
89
+ data['template_url'] = item.get('template-url', '')
89
90
  return data
90
91
 
91
92
  @staticmethod
@@ -1,6 +1,6 @@
1
1
  from secator.decorators import task
2
2
  from secator.definitions import (CVES, EXTRA_DATA, ID, MATCHED_AT, NAME,
3
- PROVIDER, REFERENCE, TAGS)
3
+ PROVIDER, REFERENCE, TAGS, OPT_NOT_SUPPORTED)
4
4
  from secator.output_types import Exploit
5
5
  from secator.runners import Command
6
6
 
@@ -11,6 +11,7 @@ class searchsploit(Command):
11
11
  cmd = 'searchsploit'
12
12
  input_flag = None
13
13
  json_flag = '--json'
14
+ version_flag = OPT_NOT_SUPPORTED
14
15
  opts = {
15
16
  'strict': {'short': 's', 'is_flag': True, 'default': False, 'help': 'Strict match'}
16
17
  }
@@ -26,7 +27,8 @@ class searchsploit(Command):
26
27
  EXTRA_DATA: lambda x: {'verified': x['Verified']}
27
28
  }
28
29
  }
29
- install_cmd = 'sudo snap install searchsploit'
30
+ install_cmd = 'sudo git clone https://gitlab.com/exploit-database/exploitdb.git /opt/exploitdb || true && sudo ln -sf /opt/exploitdb/searchsploit /usr/local/bin/searchsploit' # noqa: E501
31
+ install_github_handle = 'rad10/SearchSploit.py'
30
32
  proxychains = False
31
33
  proxy_socks5 = False
32
34
  proxy_http = False
@@ -30,6 +30,7 @@ class subfinder(ReconDns):
30
30
  }
31
31
  output_types = [Subdomain]
32
32
  install_cmd = 'go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest'
33
+ install_github_handle = 'projectdiscovery/subfinder'
33
34
  proxychains = False
34
35
  proxy_http = True
35
36
  proxy_socks5 = False
secator/tasks/wpscan.py CHANGED
@@ -4,13 +4,12 @@ import os
4
4
  from secator.decorators import task
5
5
  from secator.definitions import (CONFIDENCE, CVSS_SCORE, DELAY, DESCRIPTION,
6
6
  EXTRA_DATA, FOLLOW_REDIRECT, HEADER, ID,
7
- MATCHED_AT, NAME, OPT_NOT_SUPPORTED, PROVIDER,
7
+ MATCHED_AT, NAME, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROVIDER,
8
8
  PROXY, RATE_LIMIT, REFERENCES, RETRIES,
9
- SEVERITY, TAGS, DATA_FOLDER, THREADS, TIMEOUT,
9
+ SEVERITY, TAGS, THREADS, TIMEOUT,
10
10
  URL, USER_AGENT)
11
11
  from secator.output_types import Tag, Vulnerability
12
12
  from secator.tasks._categories import VulnHttp
13
- from secator.utils import get_file_timestamp
14
13
 
15
14
 
16
15
  @task()
@@ -67,22 +66,13 @@ class wpscan(VulnHttp):
67
66
  },
68
67
  }
69
68
  output_types = [Vulnerability, Tag]
70
- install_cmd = 'sudo gem install wpscan'
69
+ install_cmd = 'sudo apt install -y build-essential ruby-dev rubygems && sudo gem install wpscan'
71
70
  proxychains = False
72
71
  proxy_http = True
73
72
  proxy_socks5 = False
74
73
  ignore_return_code = True
75
74
  profile = 'io'
76
75
 
77
- @staticmethod
78
- def on_init(self):
79
- output_path = self.get_opt_value('output_path')
80
- if not output_path:
81
- timestr = get_file_timestamp()
82
- output_path = f'{DATA_FOLDER}/wpscan_{timestr}.json'
83
- self.output_path = output_path
84
- self.cmd += f' -o {self.output_path}'
85
-
86
76
  def yielder(self):
87
77
  prev = self.print_item_count
88
78
  self.print_item_count = False
@@ -177,3 +167,11 @@ class wpscan(VulnHttp):
177
167
  )
178
168
 
179
169
  self.print_item_count = prev
170
+
171
+ @staticmethod
172
+ def on_init(self):
173
+ output_path = self.get_opt_value(OUTPUT_PATH)
174
+ if not output_path:
175
+ output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
176
+ self.output_path = output_path
177
+ self.cmd += f' -o {self.output_path}'
secator/utils.py CHANGED
@@ -1,8 +1,6 @@
1
- import importlib
2
1
  import inspect
3
2
  import itertools
4
3
  import logging
5
- import mimetypes
6
4
  import operator
7
5
  import os
8
6
  import re
@@ -14,18 +12,21 @@ from importlib import import_module
14
12
  from inspect import isclass
15
13
  from pathlib import Path
16
14
  from pkgutil import iter_modules
17
- from urllib.parse import urlparse
15
+ from urllib.parse import urlparse, quote
18
16
 
19
- import netifaces
17
+
18
+ import ifaddr
20
19
  import yaml
21
- from furl import furl
22
20
  from rich.markdown import Markdown
23
21
 
24
- from secator.definitions import DEFAULT_STDIN_TIMEOUT, DEBUG, DEBUG_COMPONENT
22
+ from secator.definitions import (DEBUG, DEBUG_COMPONENT, DEFAULT_STDIN_TIMEOUT, VERSION, DEV_PACKAGE, ROOT_FOLDER,
23
+ LIB_FOLDER)
25
24
  from secator.rich import console
26
25
 
27
26
  logger = logging.getLogger(__name__)
28
27
 
28
+ _tasks = []
29
+
29
30
 
30
31
  class TaskError(ValueError):
31
32
  pass
@@ -107,50 +108,6 @@ def sanitize_url(http_url):
107
108
  return url.geturl().rstrip('/')
108
109
 
109
110
 
110
- def match_extensions(response, allowed_ext=['.html']):
111
- """Check if a URL is a file from the HTTP response by looking at the content_type and the URL.
112
-
113
- Args:
114
- response (dict): httpx response.
115
-
116
- Returns:
117
- bool: True if is a file, False otherwise.
118
- """
119
- content_type = response.get('content_type', '').split(';')[0]
120
- url = response.get('final_url') or response['url']
121
- ext = mimetypes.guess_extension(content_type)
122
- ext2 = os.path.splitext(urlparse(url).path)[1]
123
- if (ext and ext in allowed_ext) or (ext2 and ext2 in allowed_ext):
124
- return True
125
- return False
126
-
127
-
128
- def filter_urls(urls, **remove_parts):
129
- """Filter a list of URLs using `furl`.
130
-
131
- Args:
132
- urls (list): List of URLs to filter.
133
- remove_parts (dict): Dict of URL pieces to remove.
134
-
135
- Example:
136
- >>> urls = ['http://localhost/test.js', 'http://localhost/test?a=1&b=2']
137
- >>> filter_urls(urls, filter_ext=True)
138
- ['http://localhost/test']
139
-
140
- Returns:
141
- list: List of filtered URLs.
142
- """
143
- if not remove_parts:
144
- return urls
145
- furl_remove_args = {
146
- k.replace('remove_', ''): v for k, v in remove_parts.items()
147
- }
148
- return [
149
- sanitize_url(furl(url).remove(**furl_remove_args).url)
150
- for url in urls
151
- ]
152
-
153
-
154
111
  def deduplicate(array, attr=None):
155
112
  """Deduplicate list of OutputType items.
156
113
 
@@ -172,17 +129,6 @@ def deduplicate(array, attr=None):
172
129
  return sorted(list(dict.fromkeys(array)))
173
130
 
174
131
 
175
- def setup_logger(level='info', format='%(message)s'):
176
- logger = logging.getLogger('secator')
177
- level = logging.getLevelName(level.upper())
178
- logger.setLevel(level)
179
- handler = logging.StreamHandler()
180
- formatter = logging.Formatter(format)
181
- handler.setFormatter(formatter)
182
- logger.addHandler(handler)
183
- return logger
184
-
185
-
186
132
  def discover_internal_tasks():
187
133
  """Find internal secator tasks."""
188
134
  from secator.runners import Runner
@@ -193,7 +139,9 @@ def discover_internal_tasks():
193
139
  continue
194
140
  try:
195
141
  module = import_module(f'secator.tasks.{module_name}')
196
- except ImportError:
142
+ except ImportError as e:
143
+ console.print(f'[bold red]Could not import secator.tasks.{module_name}:[/]')
144
+ console.print(f'\t[bold red]{type(e).__name__}[/]: {str(e)}')
197
145
  continue
198
146
  for attribute_name in dir(module):
199
147
  attribute = getattr(module, attribute_name)
@@ -228,7 +176,10 @@ def discover_external_tasks():
228
176
 
229
177
  def discover_tasks():
230
178
  """Find all secator tasks (internal + external)."""
231
- return discover_internal_tasks() + discover_external_tasks()
179
+ global _tasks
180
+ if not _tasks:
181
+ _tasks = discover_internal_tasks() + discover_external_tasks()
182
+ return _tasks
232
183
 
233
184
 
234
185
  def import_dynamic(cls_path, cls_root='Command'):
@@ -243,7 +194,7 @@ def import_dynamic(cls_path, cls_root='Command'):
243
194
  """
244
195
  try:
245
196
  package, name = cls_path.rsplit(".", maxsplit=1)
246
- cls = getattr(importlib.import_module(package), name)
197
+ cls = getattr(import_module(package), name)
247
198
  root_cls = inspect.getmro(cls)[-2]
248
199
  if root_cls.__name__ == cls_root:
249
200
  return cls
@@ -262,7 +213,7 @@ def get_command_cls(cls_name):
262
213
  Returns:
263
214
  cls: Class.
264
215
  """
265
- tasks_classes = discover_internal_tasks() + discover_external_tasks()
216
+ tasks_classes = discover_tasks()
266
217
  for task_cls in tasks_classes:
267
218
  if task_cls.__name__ == cls_name:
268
219
  return task_cls
@@ -329,12 +280,6 @@ def pluralize(word):
329
280
  return f'{word}s'
330
281
 
331
282
 
332
- def get_task_name_padding(classes=None):
333
- all_tasks = discover_tasks()
334
- classes = classes or all_tasks
335
- return max([len(cls.__name__) for cls in discover_tasks() if cls in classes]) + 2
336
-
337
-
338
283
  def load_fixture(name, fixtures_dir, ext=None, only_path=False):
339
284
  fixture_path = f'{fixtures_dir}/{name}'
340
285
  exts = ['.json', '.txt', '.xml', '.rc']
@@ -358,17 +303,13 @@ def get_file_timestamp():
358
303
 
359
304
 
360
305
  def detect_host(interface=None):
361
- ifaces = netifaces.interfaces()
362
- host = None
363
- for iface in ifaces:
364
- addrs = netifaces.ifaddresses(iface)
306
+ adapters = ifaddr.get_adapters()
307
+ for adapter in adapters:
308
+ iface = adapter.name
365
309
  if (interface and iface != interface) or iface == 'lo':
366
310
  continue
367
- host = addrs[netifaces.AF_INET][0]['addr']
368
- interface = iface
369
- if 'tun' in iface:
370
- break
371
- return host
311
+ return adapter.ips[0].ip
312
+ return None
372
313
 
373
314
 
374
315
  def find_list_item(array, val, key='id', default=None):
@@ -417,9 +358,10 @@ def rich_to_ansi(text):
417
358
 
418
359
  def debug(msg, sub='', id='', obj=None, obj_after=True, obj_breaklines=False, level=1):
419
360
  """Print debug log if DEBUG >= level."""
420
- if not DEBUG >= level:
361
+ debug_comp_empty = DEBUG_COMPONENT == [""] or not DEBUG_COMPONENT
362
+ if not debug_comp_empty and not any(sub.startswith(s) for s in DEBUG_COMPONENT):
421
363
  return
422
- if DEBUG_COMPONENT and not any(s.startswith(sub) for s in DEBUG_COMPONENT):
364
+ elif debug_comp_empty and not DEBUG >= level:
423
365
  return
424
366
  s = ''
425
367
  if sub:
@@ -443,3 +385,43 @@ def debug(msg, sub='', id='', obj=None, obj_after=True, obj_breaklines=False, le
443
385
  s += f' [italic dim white]\[{id}][/] '
444
386
  s = rich_to_ansi(f'[dim red]\[debug] {s}[/]')
445
387
  print(s)
388
+
389
+
390
+ def escape_mongodb_url(url):
391
+ """Escape username / password from MongoDB URL if any.
392
+
393
+ Args:
394
+ url (str): Full MongoDB URL string.
395
+
396
+ Returns:
397
+ str: Escaped MongoDB URL string.
398
+ """
399
+ match = re.search('mongodb://(?P<userpass>.*)@(?P<url>.*)', url)
400
+ if match:
401
+ url = match.group('url')
402
+ user, password = tuple(match.group('userpass').split(':'))
403
+ user, password = quote(user), quote(password)
404
+ return f'mongodb://{user}:{password}@{url}'
405
+ return url
406
+
407
+
408
+ def print_version():
409
+ """Print secator version information."""
410
+ from secator.installer import get_version_info
411
+ console.print(f'[bold gold3]Current version[/]: {VERSION}', highlight=False, end='')
412
+ info = get_version_info('secator', github_handle='freelabz/secator', version=VERSION)
413
+ latest_version = info['latest_version']
414
+ status = info['status']
415
+ location = info['location']
416
+ if status == 'outdated':
417
+ console.print('[bold red] (outdated)[/]')
418
+ else:
419
+ console.print('')
420
+ console.print(f'[bold gold3]Latest version[/]: {latest_version}', highlight=False)
421
+ console.print(f'[bold gold3]Location[/]: {location}')
422
+ console.print(f'[bold gold3]Python binary[/]: {sys.executable}')
423
+ if DEV_PACKAGE:
424
+ console.print(f'[bold gold3]Root folder[/]: {ROOT_FOLDER}')
425
+ console.print(f'[bold gold3]Lib folder[/]: {LIB_FOLDER}')
426
+ if status == 'outdated':
427
+ console.print('[bold red]secator is outdated, run "secator update" to install the latest version.')
secator/utils_test.py CHANGED
@@ -49,9 +49,9 @@ if TEST_SCANS:
49
49
  else:
50
50
  TEST_SCANS = ALL_SCANS
51
51
 
52
- #-------------#
52
+ #-------------------#
53
53
  # TEST INPUTS_TASKS #
54
- #-------------#
54
+ #-------------------#
55
55
  INPUTS_TASKS = {
56
56
  URL: 'https://fake.com',
57
57
  HOST: 'fake.com',
@@ -92,6 +92,7 @@ META_OPTS = {
92
92
  'msfconsole.resource': load_fixture('msfconsole_input', FIXTURES_DIR, only_path=True),
93
93
  'dirsearch.output_path': load_fixture('dirsearch_output', FIXTURES_DIR, only_path=True),
94
94
  'maigret.output_path': load_fixture('maigret_output', FIXTURES_DIR, only_path=True),
95
+ 'nuclei.template_id': 'prometheus-metrics',
95
96
  'wpscan.output_path': load_fixture('wpscan_output', FIXTURES_DIR, only_path=True),
96
97
  'h8mail.output_path': load_fixture('h8mail_output', FIXTURES_DIR, only_path=True),
97
98
  'h8mail.local_breach': load_fixture('h8mail_breach', FIXTURES_DIR, only_path=True)