secator 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. secator/.gitignore +162 -0
  2. secator/__init__.py +0 -0
  3. secator/celery.py +453 -0
  4. secator/celery_signals.py +138 -0
  5. secator/celery_utils.py +320 -0
  6. secator/cli.py +2035 -0
  7. secator/cli_helper.py +395 -0
  8. secator/click.py +87 -0
  9. secator/config.py +670 -0
  10. secator/configs/__init__.py +0 -0
  11. secator/configs/profiles/__init__.py +0 -0
  12. secator/configs/profiles/aggressive.yaml +8 -0
  13. secator/configs/profiles/all_ports.yaml +7 -0
  14. secator/configs/profiles/full.yaml +31 -0
  15. secator/configs/profiles/http_headless.yaml +7 -0
  16. secator/configs/profiles/http_record.yaml +8 -0
  17. secator/configs/profiles/insane.yaml +8 -0
  18. secator/configs/profiles/paranoid.yaml +8 -0
  19. secator/configs/profiles/passive.yaml +11 -0
  20. secator/configs/profiles/polite.yaml +8 -0
  21. secator/configs/profiles/sneaky.yaml +8 -0
  22. secator/configs/profiles/tor.yaml +5 -0
  23. secator/configs/scans/__init__.py +0 -0
  24. secator/configs/scans/domain.yaml +31 -0
  25. secator/configs/scans/host.yaml +23 -0
  26. secator/configs/scans/network.yaml +30 -0
  27. secator/configs/scans/subdomain.yaml +27 -0
  28. secator/configs/scans/url.yaml +19 -0
  29. secator/configs/workflows/__init__.py +0 -0
  30. secator/configs/workflows/cidr_recon.yaml +48 -0
  31. secator/configs/workflows/code_scan.yaml +29 -0
  32. secator/configs/workflows/domain_recon.yaml +46 -0
  33. secator/configs/workflows/host_recon.yaml +95 -0
  34. secator/configs/workflows/subdomain_recon.yaml +120 -0
  35. secator/configs/workflows/url_bypass.yaml +15 -0
  36. secator/configs/workflows/url_crawl.yaml +98 -0
  37. secator/configs/workflows/url_dirsearch.yaml +62 -0
  38. secator/configs/workflows/url_fuzz.yaml +68 -0
  39. secator/configs/workflows/url_params_fuzz.yaml +66 -0
  40. secator/configs/workflows/url_secrets_hunt.yaml +23 -0
  41. secator/configs/workflows/url_vuln.yaml +91 -0
  42. secator/configs/workflows/user_hunt.yaml +29 -0
  43. secator/configs/workflows/wordpress.yaml +38 -0
  44. secator/cve.py +718 -0
  45. secator/decorators.py +7 -0
  46. secator/definitions.py +168 -0
  47. secator/exporters/__init__.py +14 -0
  48. secator/exporters/_base.py +3 -0
  49. secator/exporters/console.py +10 -0
  50. secator/exporters/csv.py +37 -0
  51. secator/exporters/gdrive.py +123 -0
  52. secator/exporters/json.py +16 -0
  53. secator/exporters/table.py +36 -0
  54. secator/exporters/txt.py +28 -0
  55. secator/hooks/__init__.py +0 -0
  56. secator/hooks/gcs.py +80 -0
  57. secator/hooks/mongodb.py +281 -0
  58. secator/installer.py +694 -0
  59. secator/loader.py +128 -0
  60. secator/output_types/__init__.py +49 -0
  61. secator/output_types/_base.py +108 -0
  62. secator/output_types/certificate.py +78 -0
  63. secator/output_types/domain.py +50 -0
  64. secator/output_types/error.py +42 -0
  65. secator/output_types/exploit.py +58 -0
  66. secator/output_types/info.py +24 -0
  67. secator/output_types/ip.py +47 -0
  68. secator/output_types/port.py +55 -0
  69. secator/output_types/progress.py +36 -0
  70. secator/output_types/record.py +36 -0
  71. secator/output_types/stat.py +41 -0
  72. secator/output_types/state.py +29 -0
  73. secator/output_types/subdomain.py +45 -0
  74. secator/output_types/tag.py +69 -0
  75. secator/output_types/target.py +38 -0
  76. secator/output_types/url.py +112 -0
  77. secator/output_types/user_account.py +41 -0
  78. secator/output_types/vulnerability.py +101 -0
  79. secator/output_types/warning.py +30 -0
  80. secator/report.py +140 -0
  81. secator/rich.py +130 -0
  82. secator/runners/__init__.py +14 -0
  83. secator/runners/_base.py +1240 -0
  84. secator/runners/_helpers.py +218 -0
  85. secator/runners/celery.py +18 -0
  86. secator/runners/command.py +1178 -0
  87. secator/runners/python.py +126 -0
  88. secator/runners/scan.py +87 -0
  89. secator/runners/task.py +81 -0
  90. secator/runners/workflow.py +168 -0
  91. secator/scans/__init__.py +29 -0
  92. secator/serializers/__init__.py +8 -0
  93. secator/serializers/dataclass.py +39 -0
  94. secator/serializers/json.py +45 -0
  95. secator/serializers/regex.py +25 -0
  96. secator/tasks/__init__.py +8 -0
  97. secator/tasks/_categories.py +487 -0
  98. secator/tasks/arjun.py +113 -0
  99. secator/tasks/arp.py +53 -0
  100. secator/tasks/arpscan.py +70 -0
  101. secator/tasks/bbot.py +372 -0
  102. secator/tasks/bup.py +118 -0
  103. secator/tasks/cariddi.py +193 -0
  104. secator/tasks/dalfox.py +87 -0
  105. secator/tasks/dirsearch.py +84 -0
  106. secator/tasks/dnsx.py +186 -0
  107. secator/tasks/feroxbuster.py +93 -0
  108. secator/tasks/ffuf.py +135 -0
  109. secator/tasks/fping.py +85 -0
  110. secator/tasks/gau.py +102 -0
  111. secator/tasks/getasn.py +60 -0
  112. secator/tasks/gf.py +36 -0
  113. secator/tasks/gitleaks.py +96 -0
  114. secator/tasks/gospider.py +84 -0
  115. secator/tasks/grype.py +109 -0
  116. secator/tasks/h8mail.py +75 -0
  117. secator/tasks/httpx.py +167 -0
  118. secator/tasks/jswhois.py +36 -0
  119. secator/tasks/katana.py +203 -0
  120. secator/tasks/maigret.py +87 -0
  121. secator/tasks/mapcidr.py +42 -0
  122. secator/tasks/msfconsole.py +179 -0
  123. secator/tasks/naabu.py +85 -0
  124. secator/tasks/nmap.py +487 -0
  125. secator/tasks/nuclei.py +151 -0
  126. secator/tasks/search_vulns.py +225 -0
  127. secator/tasks/searchsploit.py +109 -0
  128. secator/tasks/sshaudit.py +299 -0
  129. secator/tasks/subfinder.py +48 -0
  130. secator/tasks/testssl.py +283 -0
  131. secator/tasks/trivy.py +130 -0
  132. secator/tasks/trufflehog.py +240 -0
  133. secator/tasks/urlfinder.py +100 -0
  134. secator/tasks/wafw00f.py +106 -0
  135. secator/tasks/whois.py +34 -0
  136. secator/tasks/wpprobe.py +116 -0
  137. secator/tasks/wpscan.py +202 -0
  138. secator/tasks/x8.py +94 -0
  139. secator/tasks/xurlfind3r.py +83 -0
  140. secator/template.py +294 -0
  141. secator/thread.py +24 -0
  142. secator/tree.py +196 -0
  143. secator/utils.py +922 -0
  144. secator/utils_test.py +297 -0
  145. secator/workflows/__init__.py +29 -0
  146. secator-0.22.0.dist-info/METADATA +447 -0
  147. secator-0.22.0.dist-info/RECORD +150 -0
  148. secator-0.22.0.dist-info/WHEEL +4 -0
  149. secator-0.22.0.dist-info/entry_points.txt +2 -0
  150. secator-0.22.0.dist-info/licenses/LICENSE +60 -0
@@ -0,0 +1,126 @@
1
+ """Python runner for executing custom Python code."""
2
+ import logging
3
+
4
+ from secator.config import CONFIG
5
+ from secator.runners import Runner
6
+ from secator.template import TemplateLoader
7
+
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class PythonRunner(Runner):
13
+ """Base class for Python-based tasks.
14
+
15
+ This runner allows creating tasks that execute custom Python code without
16
+ requiring external command-line tools. Tasks should inherit from this class
17
+ and override the yielder() method.
18
+
19
+ Example:
20
+ >>> from secator.decorators import task
21
+ >>> from secator.definitions import HOST
22
+ >>> from secator.output_types import Tag, Url
23
+ >>> from secator.runners import PythonRunner
24
+ >>>
25
+ >>> @task()
26
+ >>> class mytask(PythonRunner):
27
+ ... input_types = [HOST]
28
+ ... output_types = [Tag, Url]
29
+ ... opts = {'option1': {'type': str, 'help': 'An option'}}
30
+ ...
31
+ ... def yielder(self):
32
+ ... for target in self.inputs:
33
+ ... yield Url(url=f"http://{target}")
34
+ ... yield Tag(name="scanned", match=target)
35
+ """
36
+ default_exporters = CONFIG.tasks.exporters
37
+ tags = []
38
+ opts = {}
39
+ profile = 'io'
40
+
41
+ def needs_chunking(self, sync):
42
+ return False
43
+
44
+ def __init__(self, inputs=[], **run_opts):
45
+ """Initialize PythonRunner.
46
+
47
+ Args:
48
+ inputs (list): List of inputs to pass to the task.
49
+ **run_opts: Additional runner options.
50
+ """
51
+ # Build runner config on-the-fly
52
+ config = TemplateLoader(input={
53
+ 'name': self.__class__.__name__,
54
+ 'type': 'task',
55
+ 'input_types': self.input_types,
56
+ 'description': run_opts.get('description', None)
57
+ })
58
+
59
+ # Extract run opts
60
+ hooks = run_opts.pop('hooks', {})
61
+ caller = run_opts.get('caller', None)
62
+ results = run_opts.pop('results', [])
63
+ context = run_opts.pop('context', {})
64
+ node_id = context.get('node_id', None)
65
+ node_name = context.get('node_name', None)
66
+ if node_id:
67
+ config.node_id = node_id
68
+ if node_name:
69
+ config.node_name = node_name
70
+ self.skip_if_no_inputs = run_opts.pop('skip_if_no_inputs', False)
71
+ self.enable_validators = run_opts.pop('enable_validators', True)
72
+
73
+ # Prepare validators
74
+ input_validators = []
75
+ if not self.skip_if_no_inputs:
76
+ input_validators.append(self._validate_input_nonempty)
77
+ if not caller:
78
+ input_validators.append(self._validate_chunked_input)
79
+ validators = {'validate_input': input_validators}
80
+
81
+ # Call super().__init__
82
+ super().__init__(
83
+ config=config,
84
+ inputs=inputs,
85
+ results=results,
86
+ run_opts=run_opts,
87
+ hooks=hooks,
88
+ validators=validators,
89
+ context=context)
90
+
91
+ @staticmethod
92
+ def _validate_input_nonempty(self, inputs):
93
+ """Input is empty."""
94
+ if self.default_inputs is not None:
95
+ return True
96
+ if not inputs or len(inputs) == 0:
97
+ return False
98
+ return True
99
+
100
+ @staticmethod
101
+ def _validate_chunked_input(self, inputs):
102
+ """Command does not support multiple inputs in non-worker mode. Consider running with a remote worker instead."""
103
+ if len(inputs) > 1:
104
+ return False
105
+ return True
106
+
107
+ def yielder(self):
108
+ """Execute the Python task and yield its results.
109
+
110
+ This method should be overridden by subclasses to implement
111
+ the actual task logic.
112
+
113
+ Yields:
114
+ OutputType: Results from the Python task.
115
+ """
116
+ raise NotImplementedError("Subclasses must implement yielder() method")
117
+
118
+ @classmethod
119
+ def delay(cls, *args, **kwargs):
120
+ """Submit task to Celery for async execution."""
121
+ from secator.celery import run_command
122
+ kwargs['sync'] = False
123
+ return run_command.apply_async(
124
+ kwargs={'args': args, 'kwargs': kwargs},
125
+ queue=cls.profile if not callable(cls.profile) else cls.profile(kwargs)
126
+ )
@@ -0,0 +1,87 @@
1
+ import logging
2
+ from dotmap import DotMap
3
+
4
+ from secator.config import CONFIG
5
+ from secator.output_types.info import Info
6
+ from secator.runners._base import Runner
7
+ from secator.runners.workflow import Workflow
8
+ from secator.utils import merge_opts
9
+
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class Scan(Runner):
15
+
16
+ default_exporters = CONFIG.scans.exporters
17
+
18
+ @classmethod
19
+ def delay(cls, *args, **kwargs):
20
+ from secator.celery import run_scan
21
+ return run_scan.delay(args=args, kwargs=kwargs)
22
+
23
+ def build_celery_workflow(self):
24
+ """Build Celery workflow for scan execution.
25
+
26
+ Returns:
27
+ celery.Signature: Celery task signature.
28
+ """
29
+ from celery import chain
30
+ from secator.celery import mark_runner_started, mark_runner_completed
31
+ from secator.template import TemplateLoader
32
+
33
+ scan_opts = self.config.options
34
+
35
+ # Set hooks and reports
36
+ self.enable_hooks = False # Celery will handle hooks
37
+ self.enable_reports = True # Workflow will handle reports
38
+ self.print_item = not self.sync
39
+
40
+ # Build chain of workflows
41
+ sigs = []
42
+ sig = None
43
+ for name, workflow_opts in self.config.workflows.items():
44
+ run_opts = self.run_opts.copy()
45
+ run_opts.pop('profiles', None)
46
+ run_opts['no_poll'] = True
47
+ run_opts['caller'] = 'Scan'
48
+ run_opts['has_parent'] = True
49
+ run_opts['enable_reports'] = False
50
+ run_opts['print_profiles'] = False
51
+ opts = merge_opts(scan_opts, workflow_opts, run_opts)
52
+ name = name.split('/')[0]
53
+ config = TemplateLoader(name=f'workflow/{name}')
54
+ if not config:
55
+ raise ValueError(f'Workflow {name} not found')
56
+
57
+ # Skip workflow if condition is not met
58
+ condition = workflow_opts.pop('if', None) if workflow_opts else None
59
+ local_ns = {'opts': DotMap(opts)}
60
+ if condition and not eval(condition, {"__builtins__": {}}, local_ns):
61
+ self.add_result(Info(message=f'Skipped workflow {name} because condition is not met: {condition}'))
62
+ continue
63
+
64
+ # Build workflow
65
+ workflow = Workflow(
66
+ config,
67
+ self.inputs,
68
+ results=self.results,
69
+ run_opts=opts,
70
+ hooks=self._hooks,
71
+ context=self.context.copy()
72
+ )
73
+ celery_workflow = workflow.build_celery_workflow(chain_previous_results=True)
74
+ for task_id, task_info in workflow.celery_ids_map.items():
75
+ self.add_subtask(task_id, task_info['name'], task_info['descr'])
76
+ sigs.append(celery_workflow)
77
+
78
+ for result in workflow.results:
79
+ self.add_result(result, print=False, hooks=False)
80
+
81
+ if sigs:
82
+ sig = chain(
83
+ mark_runner_started.si([], self).set(queue='results'),
84
+ *sigs,
85
+ mark_runner_completed.s(self).set(queue='results'),
86
+ )
87
+ return sig
@@ -0,0 +1,81 @@
1
+ from secator.config import CONFIG
2
+ from secator.runners import Runner
3
+ from secator.loader import discover_tasks
4
+ from celery import chain
5
+
6
+
7
+ class Task(Runner):
8
+
9
+ default_exporters = CONFIG.tasks.exporters
10
+
11
+ @classmethod
12
+ def delay(cls, *args, **kwargs):
13
+ from secator.celery import run_task
14
+ return run_task.apply_async(kwargs={'args': args, 'kwargs': kwargs}, queue='celery')
15
+
16
+ def build_celery_workflow(self):
17
+ """Build Celery workflow for task execution.
18
+
19
+ Args:
20
+ run_opts (dict): Run options.
21
+ results (list): Prior results.
22
+
23
+ Returns:
24
+ celery.Signature: Celery task signature.
25
+ """
26
+ from secator.celery import run_command
27
+
28
+ # Get task class
29
+ task_cls = Task.get_task_class(self.config.name)
30
+
31
+ # Run opts
32
+ opts = self.run_opts.copy()
33
+ opts.pop('output', None)
34
+ opts.pop('profiles', None)
35
+ opts.pop('no_poll', False)
36
+
37
+ # Set output types
38
+ self.output_types = task_cls.output_types
39
+
40
+ # Set hooks and reports
41
+ self.enable_hooks = False # Celery will handle hooks
42
+ self.enable_reports = True # Task will handle reports
43
+
44
+ # Get hooks
45
+ hooks = self._hooks.get(Task, {})
46
+ opts['hooks'] = hooks
47
+ opts['context'] = self.context.copy()
48
+ opts['reports_folder'] = str(self.reports_folder)
49
+
50
+ # Task class will handle those
51
+ opts['enable_reports'] = False
52
+ opts['enable_profiles'] = False
53
+ opts['enable_duplicate_check'] = False
54
+ opts['print_start'] = False
55
+ opts['print_end'] = False
56
+ opts['print_target'] = False
57
+ opts['has_parent'] = False
58
+ opts['skip_if_no_inputs'] = False
59
+ opts['caller'] = 'Task'
60
+
61
+ # Create task signature
62
+ profile = task_cls.profile(opts) if callable(task_cls.profile) else task_cls.profile
63
+ sig = run_command.si(self.results, self.config.name, self.inputs, opts).set(queue=profile)
64
+ task_id = sig.freeze().task_id
65
+ self.add_subtask(task_id, self.config.name, self.description)
66
+ return chain(sig)
67
+
68
+ @staticmethod
69
+ def get_task_class(name):
70
+ """Get task class from a name.
71
+
72
+ Args:
73
+ name (str): Task name.
74
+ """
75
+ if '/' in name:
76
+ name = name.split('/')[0]
77
+ tasks_classes = discover_tasks()
78
+ for task_cls in tasks_classes:
79
+ if task_cls.__name__ == name:
80
+ return task_cls
81
+ raise ValueError(f'Task {name} not found. Aborting.')
@@ -0,0 +1,168 @@
1
+ from dotmap import DotMap
2
+
3
+ from secator.config import CONFIG
4
+ from secator.output_types import Info
5
+ from secator.runners._base import Runner
6
+ from secator.runners.task import Task
7
+ from secator.tree import build_runner_tree, walk_runner_tree
8
+ from secator.utils import merge_opts
9
+
10
+
11
+ class Workflow(Runner):
12
+
13
+ default_exporters = CONFIG.workflows.exporters
14
+
15
+ @classmethod
16
+ def delay(cls, *args, **kwargs):
17
+ from secator.celery import run_workflow
18
+ return run_workflow.delay(args=args, kwargs=kwargs)
19
+
20
+ @classmethod
21
+ def s(cls, *args, **kwargs):
22
+ from secator.celery import run_workflow
23
+ return run_workflow.s(args=args, kwargs=kwargs)
24
+
25
+ def build_celery_workflow(self, chain_previous_results=False):
26
+ """Build Celery workflow for workflow execution.
27
+
28
+ Args:
29
+ chain_previous_results (bool): Chain previous results.
30
+
31
+ Returns:
32
+ celery.Signature: Celery task signature.
33
+ """
34
+ from celery import chain
35
+ from secator.celery import mark_runner_started, mark_runner_completed, forward_results
36
+
37
+ # Prepare run options
38
+ opts = self.run_opts.copy()
39
+ opts.pop('output', None)
40
+ opts.pop('no_poll', False)
41
+ opts.pop('print_profiles', False)
42
+
43
+ # Set hooks and reports
44
+ self.enable_hooks = False # Celery will handle hooks
45
+ self.enable_reports = True # Workflow will handle reports
46
+ self.print_item = not self.sync
47
+
48
+ # Get hooks
49
+ hooks = self._hooks.get(Task, {})
50
+ opts['hooks'] = hooks
51
+ opts['context'] = self.context.copy()
52
+ opts['reports_folder'] = str(self.reports_folder)
53
+ opts['enable_reports'] = False # Workflow will handle reports
54
+ opts['enable_duplicate_check'] = False # Workflow will handle duplicate check
55
+ opts['has_parent'] = True
56
+ opts['skip_if_no_inputs'] = True
57
+ opts['caller'] = 'Workflow'
58
+
59
+ # Remove workflow config prefix from opts
60
+ for k, v in opts.copy().items():
61
+ if k.startswith(self.config.name + '_'):
62
+ opts[k.replace(self.config.name + '_', '')] = v
63
+
64
+ # Remove dynamic opts from parent runner
65
+ opts = {k: v for k, v in opts.items() if k not in self.dynamic_opts}
66
+
67
+ # Forward workflow opts to first task if needed
68
+ forwarded_opts = {}
69
+ if chain_previous_results:
70
+ forwarded_opts = self.dynamic_opts
71
+
72
+ # Build workflow tree
73
+ tree = build_runner_tree(self.config)
74
+ current_id = tree.root_nodes[0].id
75
+ ix = 0
76
+ sigs = []
77
+
78
+ def process_task(node, force=False, parent_ix=None):
79
+ from celery import chain, group
80
+ from secator.utils import debug
81
+ nonlocal ix
82
+ sig = None
83
+
84
+ if node.id is None:
85
+ return
86
+
87
+ if node.type == 'task':
88
+ if node.parent.type == 'group' and not force:
89
+ return
90
+
91
+ # Skip task if condition is not met
92
+ condition = node.opts.pop('if', None)
93
+ local_ns = {'opts': DotMap(opts)}
94
+ if condition:
95
+ # debug(f'{node.id} evaluating {condition} with opts {opts}', sub=self.config.name)
96
+ result = eval(condition, {"__builtins__": {}}, local_ns)
97
+ if not result:
98
+ debug(f'{node.id} skipped task because condition is not met: {condition}', sub=self.config.name)
99
+ self.add_result(Info(message=f'Skipped task [bold gold3]{node.name}[/] because condition is not met: [bold green]{condition}[/]')) # noqa: E501
100
+ return
101
+
102
+ # Get task class
103
+ task = Task.get_task_class(node.name)
104
+
105
+ # Merge task options (order of priority with overrides)
106
+ task_opts = merge_opts(self.config.default_options.toDict(), node.opts, opts)
107
+ if (ix == 0 or parent_ix == 0) and forwarded_opts:
108
+ task_opts.update(forwarded_opts)
109
+
110
+ # Create task signature
111
+ task_opts['name'] = node.name
112
+ task_opts['context'] = self.context.copy()
113
+ task_opts['context']['node_id'] = node.id
114
+ task_opts['context']['ancestor_id'] = None if (ix == 0 or parent_ix == 0) else current_id
115
+ task_opts['aliases'] = [node.id, node.name]
116
+ if task.__name__ != node.name:
117
+ task_opts['aliases'].append(task.__name__)
118
+ profile = task.profile(task_opts) if callable(task.profile) else task.profile
119
+ sig = task.s(self.inputs, **task_opts).set(queue=profile)
120
+ task_id = sig.freeze().task_id
121
+ debug(f'{node.id} sig built ix: {ix}, parent_ix: {parent_ix}', sub=self.config.name)
122
+ # debug(f'{node.id} opts', obj=task_opts, sub=f'workflow.{self.config.name}')
123
+ debug(f'{node.id} ancestor id: {task_opts.get("context", {}).get("ancestor_id")}', sub=self.config.name)
124
+ self.add_subtask(task_id, node.name, task_opts.get('description', ''))
125
+ self.output_types.extend(task.output_types)
126
+ ix += 1
127
+
128
+ elif node.type == 'group' and node.children:
129
+ parent_ix = ix
130
+ tasks = [sig for sig in [process_task(child, force=True, parent_ix=parent_ix) for child in node.children] if sig]
131
+ debug(f'{node.id} group built with {len(tasks)} tasks', sub=self.config.name)
132
+ if len(tasks) == 1:
133
+ debug(f'{node.id} downgraded group to task', sub=self.config.name)
134
+ sig = tasks[0]
135
+ elif len(tasks) > 1:
136
+ sig = group(*tasks)
137
+ last_sig = sigs[-1] if sigs else None
138
+ if sig and isinstance(last_sig, group): # cannot chain 2 groups without bridge task
139
+ debug(f'{node.id} previous is group, adding bridge task forward_results', sub=self.config.name)
140
+ sigs.append(forward_results.s())
141
+ else:
142
+ debug(f'{node.id} group built with 0 tasks', sub=self.config.name)
143
+ ix += 1
144
+
145
+ elif node.type == 'chain' and node.children:
146
+ tasks = [sig for sig in [process_task(child, force=True, parent_ix=ix) for child in node.children] if sig]
147
+ sig = chain(*tasks) if tasks else None
148
+ debug(f'{node.id} chain built with {len(tasks)} tasks', sub=self.config.name)
149
+ ix += 1
150
+
151
+ if sig and node.parent.type != 'group':
152
+ debug(f'{node.id} added to workflow', sub=self.config.name)
153
+ sigs.append(sig)
154
+
155
+ return sig
156
+
157
+ walk_runner_tree(tree, process_task)
158
+
159
+ # Build workflow chain with lifecycle management
160
+ start_sig = mark_runner_started.si([], self, enable_hooks=True).set(queue='results')
161
+ if chain_previous_results:
162
+ start_sig = mark_runner_started.s(self, enable_hooks=True).set(queue='results')
163
+ sig = chain(
164
+ start_sig,
165
+ *sigs,
166
+ mark_runner_completed.s(self, enable_hooks=True).set(queue='results'),
167
+ )
168
+ return sig
@@ -0,0 +1,29 @@
1
+ from secator.loader import get_configs_by_type
2
+ from secator.runners import Scan
3
+
4
+
5
+ class DynamicScan(Scan):
6
+ def __init__(self, config):
7
+ self.config = config
8
+
9
+ def __call__(self, targets, **kwargs):
10
+ hooks = kwargs.pop('hooks', {})
11
+ results = kwargs.pop('results', [])
12
+ context = kwargs.pop('context', {})
13
+ super().__init__(
14
+ config=self.config,
15
+ inputs=targets,
16
+ results=results,
17
+ hooks=hooks,
18
+ context=context,
19
+ run_opts=kwargs)
20
+ return self
21
+
22
+
23
+ DYNAMIC_SCANS = {}
24
+ for scan in get_configs_by_type('scan'):
25
+ instance = DynamicScan(scan)
26
+ DYNAMIC_SCANS[scan.name] = instance
27
+
28
+ globals().update(DYNAMIC_SCANS)
29
+ __all__ = list(DYNAMIC_SCANS)
@@ -0,0 +1,8 @@
1
+ __all__ = [
2
+ 'JSONSerializer',
3
+ 'RegexSerializer',
4
+ 'DataclassEncoder',
5
+ ]
6
+ from secator.serializers.json import JSONSerializer
7
+ from secator.serializers.regex import RegexSerializer
8
+ from secator.serializers.dataclass import DataclassEncoder
@@ -0,0 +1,39 @@
1
+ from datetime import date, datetime
2
+ import json
3
+ from pathlib import PosixPath
4
+ from secator.output_types import OUTPUT_TYPES
5
+
6
+
7
+ class DataclassEncoder(json.JSONEncoder):
8
+ def default(self, obj):
9
+ if hasattr(obj, 'toDict'):
10
+ return obj.toDict()
11
+ elif isinstance(obj, PosixPath):
12
+ return str(obj)
13
+ elif isinstance(obj, (datetime, date)):
14
+ return obj.isoformat()
15
+ else:
16
+ return json.JSONEncoder.default(self, obj)
17
+
18
+
19
+ def get_output_cls(type):
20
+ try:
21
+ return [cls for cls in OUTPUT_TYPES if cls.get_name() == type][0]
22
+ except IndexError:
23
+ return None
24
+
25
+
26
+ def dataclass_decoder(obj):
27
+ if '_type' in obj:
28
+ output_cls = get_output_cls(obj['_type'])
29
+ if output_cls:
30
+ return output_cls.load(obj)
31
+ return obj
32
+
33
+
34
+ def dumps_dataclass(obj, indent=None):
35
+ return json.dumps(obj, cls=DataclassEncoder, indent=indent)
36
+
37
+
38
+ def loads_dataclass(obj):
39
+ return json.loads(obj, object_hook=dataclass_decoder)
@@ -0,0 +1,45 @@
1
+ import json
2
+
3
+
4
+ class JSONSerializer:
5
+
6
+ def __init__(self, strict=False, list=False):
7
+ self.strict = strict
8
+ self.list = list
9
+
10
+ def run(self, line):
11
+ if self.list:
12
+ return self._load_list(line)
13
+ else:
14
+ return self._load_single(line)
15
+
16
+ def _load_single(self, line):
17
+ start_index = line.find('{')
18
+ end_index = line.rfind('}')
19
+ if start_index == -1 or end_index == -1:
20
+ return
21
+ if start_index != 0 and self.strict:
22
+ return
23
+ try:
24
+ json_obj = line[start_index:end_index+1]
25
+ yield json.loads(json_obj)
26
+ except json.decoder.JSONDecodeError:
27
+ return
28
+
29
+ def _load_list(self, line):
30
+ start_index = line.find('[{')
31
+ end_index = line.rfind('}]')
32
+ if start_index == -1 or end_index == -1:
33
+ return
34
+ if start_index != 0 and self.strict:
35
+ return
36
+ try:
37
+ json_obj = line[start_index:end_index+2]
38
+ obj = json.loads(json_obj)
39
+ if isinstance(obj, list):
40
+ for item in obj:
41
+ yield item
42
+ else:
43
+ yield obj
44
+ except json.decoder.JSONDecodeError:
45
+ return
@@ -0,0 +1,25 @@
1
+ import re
2
+
3
+
4
+ class RegexSerializer:
5
+
6
+ def __init__(self, regex, fields=[], findall=False):
7
+ self.regex = re.compile(regex)
8
+ self.fields = fields
9
+ self.findall = findall
10
+
11
+ def run(self, line):
12
+ if self.findall:
13
+ match = self.regex.findall(line)
14
+ yield from match
15
+ return
16
+ output = {}
17
+ match = self.regex.match(line)
18
+ if not match:
19
+ return
20
+ if not self.fields:
21
+ yield match.group(0)
22
+ return
23
+ for field in self.fields:
24
+ output[field] = match.group(field)
25
+ yield output
@@ -0,0 +1,8 @@
1
+ from secator.loader import discover_tasks
2
+ TASKS = discover_tasks()
3
+ __all__ = [
4
+ cls.__name__
5
+ for cls in TASKS
6
+ ]
7
+ for cls in TASKS:
8
+ exec(f'from .{cls.__name__} import {cls.__name__}')