secator 0.5.2__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (84) hide show
  1. secator/celery.py +160 -185
  2. secator/celery_utils.py +268 -0
  3. secator/cli.py +327 -106
  4. secator/config.py +27 -11
  5. secator/configs/workflows/host_recon.yaml +5 -3
  6. secator/configs/workflows/port_scan.yaml +7 -3
  7. secator/configs/workflows/url_bypass.yaml +10 -0
  8. secator/configs/workflows/url_vuln.yaml +1 -1
  9. secator/decorators.py +169 -92
  10. secator/definitions.py +10 -3
  11. secator/exporters/__init__.py +7 -5
  12. secator/exporters/console.py +10 -0
  13. secator/exporters/csv.py +27 -19
  14. secator/exporters/gdrive.py +16 -11
  15. secator/exporters/json.py +3 -1
  16. secator/exporters/table.py +30 -2
  17. secator/exporters/txt.py +20 -16
  18. secator/hooks/gcs.py +53 -0
  19. secator/hooks/mongodb.py +54 -28
  20. secator/output_types/__init__.py +29 -11
  21. secator/output_types/_base.py +11 -1
  22. secator/output_types/error.py +36 -0
  23. secator/output_types/exploit.py +1 -1
  24. secator/output_types/info.py +24 -0
  25. secator/output_types/ip.py +7 -0
  26. secator/output_types/port.py +8 -1
  27. secator/output_types/progress.py +6 -1
  28. secator/output_types/record.py +3 -1
  29. secator/output_types/stat.py +33 -0
  30. secator/output_types/tag.py +6 -4
  31. secator/output_types/url.py +6 -3
  32. secator/output_types/vulnerability.py +3 -2
  33. secator/output_types/warning.py +24 -0
  34. secator/report.py +55 -23
  35. secator/rich.py +44 -39
  36. secator/runners/_base.py +622 -635
  37. secator/runners/_helpers.py +5 -91
  38. secator/runners/celery.py +18 -0
  39. secator/runners/command.py +364 -211
  40. secator/runners/scan.py +8 -24
  41. secator/runners/task.py +21 -55
  42. secator/runners/workflow.py +41 -40
  43. secator/scans/__init__.py +28 -0
  44. secator/serializers/dataclass.py +6 -0
  45. secator/serializers/json.py +10 -5
  46. secator/serializers/regex.py +12 -4
  47. secator/tasks/_categories.py +6 -3
  48. secator/tasks/bbot.py +293 -0
  49. secator/tasks/bup.py +98 -0
  50. secator/tasks/cariddi.py +38 -49
  51. secator/tasks/dalfox.py +3 -0
  52. secator/tasks/dirsearch.py +12 -23
  53. secator/tasks/dnsx.py +49 -30
  54. secator/tasks/dnsxbrute.py +2 -0
  55. secator/tasks/feroxbuster.py +8 -17
  56. secator/tasks/ffuf.py +3 -2
  57. secator/tasks/fping.py +3 -3
  58. secator/tasks/gau.py +5 -0
  59. secator/tasks/gf.py +2 -2
  60. secator/tasks/gospider.py +4 -0
  61. secator/tasks/grype.py +9 -9
  62. secator/tasks/h8mail.py +31 -41
  63. secator/tasks/httpx.py +58 -21
  64. secator/tasks/katana.py +18 -22
  65. secator/tasks/maigret.py +26 -24
  66. secator/tasks/mapcidr.py +2 -3
  67. secator/tasks/msfconsole.py +4 -16
  68. secator/tasks/naabu.py +3 -1
  69. secator/tasks/nmap.py +50 -35
  70. secator/tasks/nuclei.py +9 -2
  71. secator/tasks/searchsploit.py +17 -9
  72. secator/tasks/subfinder.py +5 -1
  73. secator/tasks/wpscan.py +79 -93
  74. secator/template.py +61 -45
  75. secator/thread.py +24 -0
  76. secator/utils.py +330 -80
  77. secator/utils_test.py +48 -23
  78. secator/workflows/__init__.py +28 -0
  79. {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/METADATA +12 -6
  80. secator-0.7.0.dist-info/RECORD +115 -0
  81. {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/WHEEL +1 -1
  82. secator-0.5.2.dist-info/RECORD +0 -101
  83. {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/entry_points.txt +0 -0
  84. {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/licenses/LICENSE +0 -0
secator/runners/scan.py CHANGED
@@ -5,8 +5,7 @@ from secator.config import CONFIG
5
5
  from secator.runners._base import Runner
6
6
  from secator.runners._helpers import run_extractors
7
7
  from secator.runners.workflow import Workflow
8
- from secator.rich import console
9
- from secator.output_types import Target
8
+ from secator.utils import merge_opts
10
9
 
11
10
  logger = logging.getLogger(__name__)
12
11
 
@@ -24,38 +23,23 @@ class Scan(Runner):
24
23
  """Run scan.
25
24
 
26
25
  Yields:
27
- dict: Item yielded from individual workflow tasks.
26
+ secator.output_types.OutputType: Secator output type.
28
27
  """
29
- # Yield targets
30
- for target in self.targets:
31
- yield Target(name=target, _source=self.config.name, _type='target', _context=self.context)
32
-
33
- # Run workflows
28
+ scan_opts = self.config.options
29
+ self.print_item = False
34
30
  for name, workflow_opts in self.config.workflows.items():
35
31
 
36
32
  # Extract opts and and expand target from previous workflows results
37
- targets, workflow_opts = run_extractors(self.results, workflow_opts or {}, self.targets)
38
- if not targets:
39
- console.log(f'No targets were specified for workflow {name}. Skipping.')
40
- continue
41
-
42
- # Workflow opts
43
- run_opts = self.run_opts.copy()
44
- fmt_opts = {
45
- 'json': run_opts.get('json', False),
46
- 'print_item': False,
47
- 'print_start': True,
48
- 'print_run_summary': True,
49
- 'print_progress': self.sync
50
- }
51
- run_opts.update(fmt_opts)
33
+ targets, workflow_opts = run_extractors(self.results, workflow_opts or {}, self.inputs)
52
34
 
53
35
  # Run workflow
36
+ run_opts = self.run_opts.copy()
37
+ opts = merge_opts(scan_opts, workflow_opts, run_opts)
54
38
  workflow = Workflow(
55
39
  TemplateLoader(name=f'workflows/{name}'),
56
40
  targets,
57
41
  results=[],
58
- run_opts=run_opts,
42
+ run_opts=opts,
59
43
  hooks=self._hooks,
60
44
  context=self.context.copy())
61
45
 
secator/runners/task.py CHANGED
@@ -1,14 +1,15 @@
1
- from secator.definitions import DEBUG
2
- from secator.output_types import Target
3
1
  from secator.config import CONFIG
4
2
  from secator.runners import Runner
5
3
  from secator.utils import discover_tasks
4
+ from secator.celery_utils import CeleryData
5
+ from secator.output_types import Info
6
6
 
7
7
 
8
8
  class Task(Runner):
9
9
  default_exporters = CONFIG.tasks.exporters
10
10
  enable_hooks = False
11
11
 
12
+ @classmethod
12
13
  def delay(cls, *args, **kwargs):
13
14
  from secator.celery import run_task
14
15
  return run_task.apply_async(kwargs={'args': args, 'kwargs': kwargs}, queue='celery')
@@ -16,11 +17,8 @@ class Task(Runner):
16
17
  def yielder(self):
17
18
  """Run task.
18
19
 
19
- Args:
20
- sync (bool): Run in sync mode (main thread). If False, run in Celery worker in distributed mode.
21
-
22
- Returns:
23
- list: List of results.
20
+ Yields:
21
+ secator.output_types.OutputType: Secator output type.
24
22
  """
25
23
  # Get task class
26
24
  task_cls = Task.get_task_class(self.config.name)
@@ -28,26 +26,10 @@ class Task(Runner):
28
26
  # Run opts
29
27
  run_opts = self.run_opts.copy()
30
28
  run_opts.pop('output', None)
31
- dry_run = run_opts.get('show', False)
32
- if dry_run:
33
- self.print_item_count = False
34
-
35
- # Fmt opts
36
- fmt_opts = {
37
- 'json': run_opts.get('json', False),
38
- 'print_cmd': True,
39
- 'print_cmd_prefix': not self.sync,
40
- 'print_input_file': DEBUG > 0,
41
- 'print_item': True,
42
- 'print_item_count': not self.sync and not dry_run,
43
- 'print_line': True
44
- # 'print_line': self.sync and not self.output_quiet,
45
- }
46
- # self.print_item = not self.sync # enable print_item for base Task only if running remote
47
- run_opts.update(fmt_opts)
48
29
 
49
30
  # Set task output types
50
31
  self.output_types = task_cls.output_types
32
+ self.enable_duplicate_check = False
51
33
 
52
34
  # Get hooks
53
35
  hooks = {task_cls: self.hooks}
@@ -56,23 +38,25 @@ class Task(Runner):
56
38
 
57
39
  # Run task
58
40
  if self.sync:
59
- task = task_cls(self.targets, **run_opts)
60
- if dry_run: # don't run
61
- return
41
+ self.print_item = False
42
+ result = task_cls.si(self.inputs, **run_opts)
43
+ results = result.apply().get()
62
44
  else:
63
- self.celery_result = task_cls.delay(self.targets, **run_opts)
64
- task = self.process_live_tasks(
45
+ self.celery_result = task_cls.delay(self.inputs, **run_opts)
46
+ self.add_subtask(self.celery_result.id, self.config.name, self.config.description or '')
47
+ yield Info(
48
+ message=f'Celery task created: {self.celery_result.id}',
49
+ task_id=self.celery_result.id
50
+ )
51
+ results = CeleryData.iter_results(
65
52
  self.celery_result,
66
- description=False,
67
- results_only=True,
68
- print_remote_status=self.print_remote_status)
53
+ ids_map=self.celery_ids_map,
54
+ description=True,
55
+ print_remote_info=False,
56
+ print_remote_title=f'[bold gold3]{self.__class__.__name__.capitalize()}[/] [bold magenta]{self.name}[/] results')
69
57
 
70
58
  # Yield task results
71
- yield from task
72
-
73
- # Yield targets
74
- for target in self.targets:
75
- yield Target(name=target, _source=self.config.name, _type='target', _context=self.context)
59
+ yield from results
76
60
 
77
61
  @staticmethod
78
62
  def get_task_class(name):
@@ -88,21 +72,3 @@ class Task(Runner):
88
72
  if task_cls.__name__ == name:
89
73
  return task_cls
90
74
  raise ValueError(f'Task {name} not found. Aborting.')
91
-
92
- @staticmethod
93
- def get_tasks_from_conf(config):
94
- """Get task names from config. Ignore hierarchy and keywords.
95
-
96
- TODO: Add hierarchy tree / add make flow diagrams.
97
- """
98
- tasks = []
99
- for name, opts in config.items():
100
- if name == '_group':
101
- tasks.extend(Task.get_tasks_from_conf(opts))
102
- elif name == '_chain':
103
- tasks.extend(Task.get_tasks_from_conf(opts))
104
- else:
105
- if '/' in name:
106
- name = name.split('/')[0]
107
- tasks.append(name)
108
- return tasks
@@ -1,9 +1,11 @@
1
- from secator.definitions import DEBUG
2
- from secator.output_types import Target
1
+ import uuid
2
+
3
3
  from secator.config import CONFIG
4
4
  from secator.runners._base import Runner
5
5
  from secator.runners.task import Task
6
6
  from secator.utils import merge_opts
7
+ from secator.celery_utils import CeleryData
8
+ from secator.output_types import Info
7
9
 
8
10
 
9
11
  class Workflow(Runner):
@@ -18,44 +20,39 @@ class Workflow(Runner):
18
20
  def yielder(self):
19
21
  """Run workflow.
20
22
 
21
- Args:
22
- sync (bool): Run in sync mode (main thread). If False, run in Celery worker in distributed mode.
23
-
24
- Returns:
25
- list: List of results.
23
+ Yields:
24
+ secator.output_types.OutputType: Secator output type.
26
25
  """
27
- # Yield targets
28
- for target in self.targets:
29
- yield Target(name=target, _source=self.config.name, _type='target', _context=self.context)
30
-
31
26
  # Task opts
32
- task_run_opts = self.run_opts.copy()
33
- task_fmt_opts = {
34
- 'json': task_run_opts.get('json', False),
35
- 'print_cmd': True,
36
- 'print_cmd_prefix': not self.sync,
37
- 'print_description': self.sync,
38
- 'print_input_file': DEBUG > 0,
39
- 'print_item': True,
40
- 'print_item_count': True,
41
- 'print_line': not self.sync,
42
- 'print_progress': self.sync,
43
- }
44
-
45
- # Construct run opts
46
- task_run_opts['hooks'] = self._hooks.get(Task, {})
47
- task_run_opts.update(task_fmt_opts)
27
+ run_opts = self.run_opts.copy()
28
+ run_opts['hooks'] = self._hooks.get(Task, {})
48
29
 
49
30
  # Build Celery workflow
50
- workflow = self.build_celery_workflow(run_opts=task_run_opts, results=self.results)
31
+ workflow = self.build_celery_workflow(
32
+ run_opts=run_opts,
33
+ results=self.results
34
+ )
35
+ self.celery_ids = list(self.celery_ids_map.keys())
51
36
 
52
37
  # Run Celery workflow and get results
53
38
  if self.sync:
39
+ self.print_item = False
54
40
  results = workflow.apply().get()
55
41
  else:
56
42
  result = workflow()
43
+ self.celery_ids.append(str(result.id))
57
44
  self.celery_result = result
58
- results = self.process_live_tasks(result, results_only=True, print_remote_status=self.print_remote_status)
45
+ yield Info(
46
+ message=f'Celery task created: {self.celery_result.id}',
47
+ task_id=self.celery_result.id
48
+ )
49
+ results = CeleryData.iter_results(
50
+ self.celery_result,
51
+ ids_map=self.celery_ids_map,
52
+ description=True,
53
+ print_remote_info=self.print_remote_info,
54
+ print_remote_title=f'[bold gold3]{self.__class__.__name__.capitalize()}[/] [bold magenta]{self.name}[/] results'
55
+ )
59
56
 
60
57
  # Get workflow results
61
58
  yield from results
@@ -64,44 +61,44 @@ class Workflow(Runner):
64
61
  """"Build Celery workflow.
65
62
 
66
63
  Returns:
67
- celery.chain: Celery task chain.
64
+ tuple(celery.chain, List[str]): Celery task chain, Celery task ids.
68
65
  """
69
66
  from celery import chain
70
67
  from secator.celery import forward_results
71
68
  sigs = self.get_tasks(
72
69
  self.config.tasks.toDict(),
73
- self.targets,
70
+ self.inputs,
74
71
  self.config.options,
75
72
  run_opts)
76
73
  sigs = [forward_results.si(results).set(queue='io')] + sigs + [forward_results.s().set(queue='io')]
77
74
  workflow = chain(*sigs)
78
75
  return workflow
79
76
 
80
- def get_tasks(self, obj, targets, workflow_opts, run_opts):
77
+ def get_tasks(self, config, inputs, workflow_opts, run_opts):
81
78
  """Get tasks recursively as Celery chains / chords.
82
79
 
83
80
  Args:
84
- obj (secator.config.TemplateLoader): Config.
85
- targets (list): List of targets.
81
+ config (dict): Tasks config dict.
82
+ inputs (list): Inputs.
86
83
  workflow_opts (dict): Workflow options.
87
84
  run_opts (dict): Run options.
88
85
  sync (bool): Synchronous mode (chain of tasks, no chords).
89
86
 
90
87
  Returns:
91
- list: List of signatures.
88
+ tuple (List[celery.Signature], List[str]): Celery signatures, Celery task ids.
92
89
  """
93
90
  from celery import chain, chord
94
91
  from secator.celery import forward_results
95
92
  sigs = []
96
- for task_name, task_opts in obj.items():
93
+ for task_name, task_opts in config.items():
97
94
  # Task opts can be None
98
95
  task_opts = task_opts or {}
99
96
 
100
97
  # If it's a group, process the sublevel tasks as a Celery chord.
101
- if task_name == '_group':
98
+ if task_name.startswith('_group'):
102
99
  tasks = self.get_tasks(
103
100
  task_opts,
104
- targets,
101
+ inputs,
105
102
  workflow_opts,
106
103
  run_opts
107
104
  )
@@ -109,7 +106,7 @@ class Workflow(Runner):
109
106
  elif task_name == '_chain':
110
107
  tasks = self.get_tasks(
111
108
  task_opts,
112
- targets,
109
+ inputs,
113
110
  workflow_opts,
114
111
  run_opts
115
112
  )
@@ -125,9 +122,13 @@ class Workflow(Runner):
125
122
  opts['hooks'] = {task: self._hooks.get(Task, {})}
126
123
  opts['context'] = self.context.copy()
127
124
  opts['name'] = task_name
125
+ opts['has_parent'] = True
126
+ opts['skip_if_no_inputs'] = True
128
127
 
129
128
  # Create task signature
130
- sig = task.s(targets, **opts).set(queue=task.profile)
129
+ task_id = str(uuid.uuid4())
130
+ sig = task.s(inputs, **opts).set(queue=task.profile, task_id=task_id)
131
+ self.add_subtask(task_id, task_name, task_opts.get('description', ''))
131
132
  self.output_types.extend(task.output_types)
132
133
  sigs.append(sig)
133
134
  return sigs
@@ -0,0 +1,28 @@
1
+ from secator.cli import ALL_SCANS
2
+
3
+
4
+ def generate_class(config):
5
+ from secator.runners import Workflow
6
+
7
+ class workflow(Workflow):
8
+ def __init__(self, inputs=[], **run_opts):
9
+ hooks = run_opts.pop('hooks', {})
10
+ results = run_opts.pop('results', [])
11
+ context = run_opts.pop('context', {})
12
+ super().__init__(
13
+ config=config,
14
+ inputs=inputs,
15
+ results=results,
16
+ run_opts=run_opts,
17
+ hooks=hooks,
18
+ context=context)
19
+ return workflow, config.name
20
+
21
+
22
+ DYNAMIC_SCANS = {}
23
+ for workflow in ALL_SCANS:
24
+ cls, name = generate_class(workflow)
25
+ DYNAMIC_SCANS[name] = cls
26
+
27
+ globals().update(DYNAMIC_SCANS)
28
+ __all__ = list(DYNAMIC_SCANS)
@@ -1,4 +1,6 @@
1
+ from datetime import date, datetime
1
2
  import json
3
+ from pathlib import PosixPath
2
4
  from secator.output_types import OUTPUT_TYPES
3
5
 
4
6
 
@@ -6,6 +8,10 @@ class DataclassEncoder(json.JSONEncoder):
6
8
  def default(self, obj):
7
9
  if hasattr(obj, 'toDict'):
8
10
  return obj.toDict()
11
+ elif isinstance(obj, PosixPath):
12
+ return str(obj)
13
+ elif isinstance(obj, (datetime, date)):
14
+ return obj.isoformat()
9
15
  else:
10
16
  return json.JSONEncoder.default(self, obj)
11
17
 
@@ -1,15 +1,20 @@
1
- import yaml
1
+ import json
2
2
 
3
3
 
4
4
  class JSONSerializer:
5
5
 
6
+ def __init__(self, strict=False):
7
+ self.strict = strict
8
+
6
9
  def run(self, line):
7
10
  start_index = line.find('{')
8
11
  end_index = line.rfind('}')
9
12
  if start_index == -1 or end_index == -1:
10
- return None
13
+ return
14
+ if start_index != 0 and self.strict:
15
+ return
11
16
  try:
12
17
  json_obj = line[start_index:end_index+1]
13
- return yaml.safe_load(json_obj)
14
- except yaml.YAMLError:
15
- return None
18
+ yield json.loads(json_obj)
19
+ except json.decoder.JSONDecodeError:
20
+ return
@@ -3,15 +3,23 @@ import re
3
3
 
4
4
  class RegexSerializer:
5
5
 
6
- def __init__(self, regex, fields=[]):
6
+ def __init__(self, regex, fields=[], findall=False):
7
7
  self.regex = re.compile(regex)
8
8
  self.fields = fields
9
+ self.findall = findall
9
10
 
10
11
  def run(self, line):
11
- match = self.regex.match(line)
12
+ if self.findall:
13
+ match = self.regex.findall(line)
14
+ yield from match
15
+ return
12
16
  output = {}
17
+ match = self.regex.match(line)
13
18
  if not match:
14
- return None
19
+ return
20
+ if not self.fields:
21
+ yield match.group(0)
22
+ return
15
23
  for field in self.fields:
16
24
  output[field] = match.group(field)
17
- return output
25
+ yield output
@@ -6,7 +6,7 @@ from bs4 import BeautifulSoup
6
6
  from cpe import CPE
7
7
 
8
8
  from secator.definitions import (CIDR_RANGE, CVSS_SCORE, DELAY, DEPTH, DESCRIPTION, FILTER_CODES,
9
- FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID,
9
+ FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID, IP,
10
10
  MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, NAME, PATH, PROVIDER, PROXY,
11
11
  RATE_LIMIT, REFERENCES, RETRIES, SEVERITY, TAGS, THREADS, TIMEOUT, URL, USER_AGENT,
12
12
  USERNAME, WORDLIST)
@@ -44,7 +44,7 @@ OPTS_HTTP = [
44
44
  ]
45
45
 
46
46
  OPTS_HTTP_CRAWLERS = OPTS_HTTP + [
47
- DEPTH, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, FILTER_REGEX, FILTER_CODES, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
47
+ DEPTH, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, FILTER_REGEX, FILTER_CODES, FILTER_SIZE, FILTER_WORDS,
48
48
  MATCH_CODES
49
49
  ]
50
50
 
@@ -106,7 +106,7 @@ class ReconIp(Recon):
106
106
 
107
107
 
108
108
  class ReconPort(Recon):
109
- input_type = HOST
109
+ input_type = IP
110
110
  output_types = [Port]
111
111
 
112
112
 
@@ -203,6 +203,9 @@ class Vuln(Command):
203
203
  resp = requests.get(f'https://cve.circl.lu/api/cve/{cve_id}', timeout=5)
204
204
  resp.raise_for_status()
205
205
  cve_info = resp.json()
206
+ if not cve_info:
207
+ debug(f'Empty response from https://cve.circl.lu/api/cve/{cve_id}.', sub='cve')
208
+ return None
206
209
  except requests.RequestException as e:
207
210
  debug(f'Failed remote query for {cve_id} ({str(e)}).', sub='cve')
208
211
  return None