secator 0.15.1__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (106) hide show
  1. secator/celery.py +40 -24
  2. secator/celery_signals.py +71 -68
  3. secator/celery_utils.py +43 -27
  4. secator/cli.py +520 -280
  5. secator/cli_helper.py +394 -0
  6. secator/click.py +87 -0
  7. secator/config.py +67 -39
  8. secator/configs/profiles/http_headless.yaml +6 -0
  9. secator/configs/profiles/http_record.yaml +6 -0
  10. secator/configs/profiles/tor.yaml +1 -1
  11. secator/configs/scans/domain.yaml +4 -2
  12. secator/configs/scans/host.yaml +1 -1
  13. secator/configs/scans/network.yaml +1 -4
  14. secator/configs/scans/subdomain.yaml +13 -1
  15. secator/configs/scans/url.yaml +1 -2
  16. secator/configs/workflows/cidr_recon.yaml +6 -4
  17. secator/configs/workflows/code_scan.yaml +1 -1
  18. secator/configs/workflows/host_recon.yaml +29 -3
  19. secator/configs/workflows/subdomain_recon.yaml +67 -16
  20. secator/configs/workflows/url_crawl.yaml +44 -15
  21. secator/configs/workflows/url_dirsearch.yaml +4 -4
  22. secator/configs/workflows/url_fuzz.yaml +25 -17
  23. secator/configs/workflows/url_params_fuzz.yaml +7 -0
  24. secator/configs/workflows/url_vuln.yaml +33 -8
  25. secator/configs/workflows/user_hunt.yaml +2 -1
  26. secator/configs/workflows/wordpress.yaml +5 -3
  27. secator/cve.py +718 -0
  28. secator/decorators.py +0 -454
  29. secator/definitions.py +49 -30
  30. secator/exporters/_base.py +2 -2
  31. secator/exporters/console.py +2 -2
  32. secator/exporters/table.py +4 -3
  33. secator/exporters/txt.py +1 -1
  34. secator/hooks/mongodb.py +2 -4
  35. secator/installer.py +77 -49
  36. secator/loader.py +116 -0
  37. secator/output_types/_base.py +3 -0
  38. secator/output_types/certificate.py +63 -63
  39. secator/output_types/error.py +4 -5
  40. secator/output_types/info.py +2 -2
  41. secator/output_types/ip.py +3 -1
  42. secator/output_types/progress.py +5 -9
  43. secator/output_types/state.py +17 -17
  44. secator/output_types/tag.py +3 -0
  45. secator/output_types/target.py +10 -2
  46. secator/output_types/url.py +19 -7
  47. secator/output_types/vulnerability.py +11 -7
  48. secator/output_types/warning.py +2 -2
  49. secator/report.py +27 -15
  50. secator/rich.py +18 -10
  51. secator/runners/_base.py +446 -233
  52. secator/runners/_helpers.py +133 -24
  53. secator/runners/command.py +182 -102
  54. secator/runners/scan.py +33 -5
  55. secator/runners/task.py +13 -7
  56. secator/runners/workflow.py +105 -72
  57. secator/scans/__init__.py +2 -2
  58. secator/serializers/dataclass.py +20 -20
  59. secator/tasks/__init__.py +4 -4
  60. secator/tasks/_categories.py +39 -27
  61. secator/tasks/arjun.py +9 -5
  62. secator/tasks/bbot.py +53 -21
  63. secator/tasks/bup.py +19 -5
  64. secator/tasks/cariddi.py +24 -3
  65. secator/tasks/dalfox.py +26 -7
  66. secator/tasks/dirsearch.py +10 -4
  67. secator/tasks/dnsx.py +70 -25
  68. secator/tasks/feroxbuster.py +11 -3
  69. secator/tasks/ffuf.py +42 -6
  70. secator/tasks/fping.py +20 -8
  71. secator/tasks/gau.py +3 -1
  72. secator/tasks/gf.py +3 -3
  73. secator/tasks/gitleaks.py +2 -2
  74. secator/tasks/gospider.py +7 -1
  75. secator/tasks/grype.py +5 -4
  76. secator/tasks/h8mail.py +2 -1
  77. secator/tasks/httpx.py +18 -5
  78. secator/tasks/katana.py +35 -15
  79. secator/tasks/maigret.py +4 -4
  80. secator/tasks/mapcidr.py +3 -3
  81. secator/tasks/msfconsole.py +4 -4
  82. secator/tasks/naabu.py +2 -2
  83. secator/tasks/nmap.py +12 -14
  84. secator/tasks/nuclei.py +3 -3
  85. secator/tasks/searchsploit.py +4 -5
  86. secator/tasks/subfinder.py +2 -2
  87. secator/tasks/testssl.py +264 -263
  88. secator/tasks/trivy.py +5 -5
  89. secator/tasks/wafw00f.py +21 -3
  90. secator/tasks/wpprobe.py +90 -83
  91. secator/tasks/wpscan.py +6 -5
  92. secator/template.py +218 -104
  93. secator/thread.py +15 -15
  94. secator/tree.py +196 -0
  95. secator/utils.py +131 -123
  96. secator/utils_test.py +60 -19
  97. secator/workflows/__init__.py +2 -2
  98. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/METADATA +36 -36
  99. secator-0.16.0.dist-info/RECORD +132 -0
  100. secator/configs/profiles/default.yaml +0 -8
  101. secator/configs/workflows/url_nuclei.yaml +0 -11
  102. secator/tasks/dnsxbrute.py +0 -42
  103. secator-0.15.1.dist-info/RECORD +0 -128
  104. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/WHEEL +0 -0
  105. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/entry_points.txt +0 -0
  106. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/licenses/LICENSE +0 -0
secator/template.py CHANGED
@@ -1,30 +1,28 @@
1
- import glob
1
+ import yaml
2
2
 
3
3
  from collections import OrderedDict
4
- from pathlib import Path
5
-
6
- import yaml
7
4
  from dotmap import DotMap
5
+ from pathlib import Path
8
6
 
9
- from secator.config import CONFIG, CONFIGS_FOLDER
10
- from secator.rich import console
11
- from secator.utils import convert_functions_to_strings, debug
12
7
  from secator.output_types import Error
13
-
14
- TEMPLATES = []
8
+ from secator.rich import console
15
9
 
16
10
 
17
11
  class TemplateLoader(DotMap):
18
12
 
19
13
  def __init__(self, input={}, name=None, **kwargs):
20
14
  if name:
21
- if '/' not in name:
15
+ split = name.split('/')
16
+ if len(split) != 2:
22
17
  console.print(Error(message=f'Cannot load {name}: you should specify a type for the template when loading by name (e.g. workflow/<workflow_name>)')) # noqa: E501
23
18
  return
24
- _type, name = name.split('/')
25
- config = next((p for p in TEMPLATES if p['type'] == _type and p['name'] == name in str(p)), None)
19
+ _type, _name = tuple(split)
20
+ if _type.endswith('s'):
21
+ _type = _type[:-1]
22
+ from secator.loader import find_templates
23
+ config = next((p for p in find_templates() if p['type'] == _type and p['name'] == _name), None)
26
24
  if not config:
27
- console.print(Error(message=f'Template {name} not found in loaded templates'))
25
+ console.print(Error(message=f'Template {_type}/{_name} not found in loaded templates'))
28
26
  config = {}
29
27
  elif isinstance(input, dict):
30
28
  config = input
@@ -35,9 +33,6 @@ class TemplateLoader(DotMap):
35
33
  config = self._load(input)
36
34
  super().__init__(config, **kwargs)
37
35
 
38
- def add_to_templates(self):
39
- TEMPLATES.append(self)
40
-
41
36
  def _load_from_path(self, path):
42
37
  if not path.exists():
43
38
  console.print(Error(message=f'Config path {path} does not exists'))
@@ -48,16 +43,6 @@ class TemplateLoader(DotMap):
48
43
  def _load(self, input):
49
44
  return yaml.load(input, Loader=yaml.Loader)
50
45
 
51
- @property
52
- def supported_opts(self):
53
- """Property to access supported options easily."""
54
- return self._collect_supported_opts()
55
-
56
- @property
57
- def flat_tasks(self):
58
- """Property to access tasks easily."""
59
- return self._extract_tasks()
60
-
61
46
  def print(self):
62
47
  """Print config as highlighted yaml."""
63
48
  config = self.toDict()
@@ -69,81 +54,210 @@ class TemplateLoader(DotMap):
69
54
  yaml_highlight = Syntax(yaml_str, 'yaml', line_numbers=True)
70
55
  console.print(yaml_highlight)
71
56
 
72
- def _collect_supported_opts(self):
73
- """Collect supported options from the tasks extracted from the config."""
74
- tasks = self._extract_tasks()
75
- opts = {}
76
- for _, task_info in tasks.items():
77
- task_class = task_info['class']
78
- if task_class:
79
- task_opts = task_class.get_supported_opts()
80
- for name, conf in task_opts.items():
81
- if name not in opts or not opts[name].get('supported', False):
82
- opts[name] = convert_functions_to_strings(conf)
83
- return opts
84
-
85
- def _extract_tasks(self):
86
- """Extract tasks from any workflow or scan config.
87
-
88
- Returns:
89
- dict: A dict of task full name to task configuration containing the keyts keys ['name', 'class', 'opts']).
90
- """
91
- from secator.runners import Task
92
- tasks = OrderedDict()
93
-
94
- def parse_config(config, prefix=''):
95
- for key, value in config.items():
96
- if key.startswith('_group'):
97
- parse_config(value, prefix)
98
- elif value:
99
- task_name = f'{prefix}/{key}' if prefix else key
100
- name = key.split('/')[0]
101
- if task_name not in tasks:
102
- tasks[task_name] = {'name': name, 'class': Task.get_task_class(name), 'opts': {}}
103
- tasks[task_name]['opts'] = value.toDict()
104
-
105
- if not self.type:
106
- return tasks
107
-
108
- elif self.type == 'task':
109
- tasks[self.name] = {'name': self.name, 'class': Task.get_task_class(self.name)}
110
-
111
- elif self.type == 'scan':
112
- # For each workflow in the scan, load it and incorporate it with a unique prefix
113
- for wf_name, _ in self.workflows.items():
114
- name = wf_name.split('/')[0]
115
- config = TemplateLoader(name=f'workflow/{name}')
116
- wf_tasks = config.flat_tasks
117
- # Prefix tasks from this workflow with its name to prevent collision
118
- for task_key, task_val in wf_tasks.items():
119
- unique_task_key = f"{wf_name}/{task_key}" # Append workflow name to task key
120
- tasks[unique_task_key] = task_val
121
-
122
- elif self.type == 'workflow':
123
- # Normal parsing of a workflow
124
- parse_config(self.tasks)
125
-
126
- return dict(tasks)
127
-
128
-
129
- def find_templates():
130
- results = []
131
- dirs = [CONFIGS_FOLDER]
132
- if CONFIG.dirs.templates:
133
- dirs.append(CONFIG.dirs.templates)
134
- paths = []
135
- for dir in dirs:
136
- config_paths = [
137
- Path(path)
138
- for path in glob.glob(str(dir).rstrip('/') + '/**/*.y*ml', recursive=True)
139
- ]
140
- debug(f'Found {len(config_paths)} templates in {dir}', sub='template')
141
- paths.extend(config_paths)
142
- for path in paths:
143
- config = TemplateLoader(input=path)
144
- debug(f'Loaded template from {path}', sub='template')
145
- results.append(config)
146
- return results
147
-
148
-
149
- TEMPLATES = find_templates()
57
+
58
+ def get_short_id(id_str, config_name):
59
+ """Remove config name prefix from ID string if present.
60
+
61
+ Args:
62
+ id_str: The ID string to process
63
+ config_name: The config name prefix to remove
64
+
65
+ Returns:
66
+ str: ID string with prefix removed, or original string if no prefix found
67
+ """
68
+ if id_str.startswith(config_name):
69
+ return id_str.replace(config_name + '.', '')
70
+ return id_str
71
+
72
+
73
+ def get_config_options(config, exec_opts=None, output_opts=None, type_mapping=None):
74
+ """Extract and normalize command-line options from configuration.
75
+
76
+ Args:
77
+ config: Configuration object (task, workflow, or scan)
78
+ exec_opts: Execution options dictionary (optional)
79
+ output_opts: Output options dictionary (optional)
80
+ type_mapping: Type mapping for option types (optional)
81
+
82
+ Returns:
83
+ OrderedDict: Normalized options with metadata
84
+ """
85
+ from secator.tree import build_runner_tree, walk_runner_tree, get_flat_node_list
86
+ from secator.utils import debug
87
+ from secator.runners.task import Task
88
+
89
+ # Task config created on-the-fly
90
+ if config.type == 'task':
91
+ config = TemplateLoader({
92
+ 'name': config.name,
93
+ 'type': 'workflow',
94
+ 'tasks': {config.name: {}}
95
+ })
96
+
97
+ # Get main info
98
+ tree = build_runner_tree(config)
99
+ nodes = get_flat_node_list(tree)
100
+ exec_opts = exec_opts or {}
101
+ output_opts = output_opts or {}
102
+ type_mapping = type_mapping or {}
103
+ all_opts = OrderedDict({})
104
+
105
+ # Log current config and tree
106
+ debug(f'[magenta]{config.name}[/]', sub=f'cli.{config.name}')
107
+ debug(f'{tree.render_tree()}', sub=f'cli.{config.name}')
108
+
109
+ # Process global execution options
110
+ for opt in exec_opts:
111
+ opt_conf = exec_opts[opt].copy()
112
+ opt_conf['prefix'] = 'Execution'
113
+ all_opts[opt] = opt_conf
114
+
115
+ # Process global output options
116
+ for opt in output_opts:
117
+ opt_conf = output_opts[opt].copy()
118
+ opt_conf['prefix'] = 'Output'
119
+ all_opts[opt] = opt_conf
120
+
121
+ # Process config options
122
+ # a.k.a:
123
+ # - default YAML config options, defined in default_options: key in the runner YAML config
124
+ # - new options defined in options: key in the runner YAML config
125
+ config_opts_defaults = config.default_options.toDict()
126
+ config_opts = config.options.toDict()
127
+ for k, v in config_opts.items():
128
+ all_opts[k] = v
129
+ all_opts[k]['prefix'] = f'{config.type}'
130
+
131
+ def find_same_opts(node, nodes, opt_name, check_class_opts=False):
132
+ """Find options with the same name that are defined in other nodes of the same type."""
133
+ same_opts = []
134
+ for _ in nodes:
135
+ if _.id == node.id or _.type != node.type:
136
+ continue
137
+ node_task = None
138
+ if check_class_opts:
139
+ node_task = Task.get_task_class(_.name)
140
+ if opt_name not in node_task.opts:
141
+ continue
142
+ opts_value = node_task.opts[opt_name]
143
+ else:
144
+ if opt_name not in _.opts:
145
+ continue
146
+ opts_value = _.opts[opt_name]
147
+ name_str = 'nodes' if not check_class_opts else 'tasks'
148
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{opt_name}[/] found in other {name_str} [bold blue]{_.id}[/]', sub=f'cli.{config.name}.same', verbose=True) # noqa: E501
149
+ same_opts.append({
150
+ 'id': _.id,
151
+ 'task_name': node_task.__name__ if node_task else None,
152
+ 'name': _.name,
153
+ 'value': opts_value,
154
+ })
155
+ if same_opts:
156
+ other_tasks = ", ".join([f'[bold yellow]{_["id"]}[/]' for _ in same_opts])
157
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{opt_name}[/] found in {len(same_opts)} other {name_str}: {other_tasks}', sub=f'cli.{config.name}.same', verbose=True) # noqa: E501
158
+ return same_opts
159
+
160
+ def process_node(node):
161
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] ({node.type})', sub=f'cli.{config.name}')
162
+
163
+ if node.type not in ['task', 'workflow']:
164
+ return
165
+
166
+ # Process workflow options
167
+ # a.k.a the new options defined in options: key in the workflow YAML config;
168
+ if node.type == 'workflow':
169
+ for k, v in node.opts.items():
170
+ same_opts = find_same_opts(node, nodes, k)
171
+ conf = v.copy()
172
+ opt_name = k
173
+ conf['prefix'] = f'{node.type.capitalize()} {node.name}'
174
+ if len(same_opts) > 0: # opt name conflict, change opt name
175
+ opt_name = f'{node.name}.{k}'
176
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] renamed to [bold green]{opt_name}[/] [dim red](duplicated)[/]', sub=f'cli.{config.name}') # noqa: E501
177
+ all_opts[opt_name] = conf
178
+ return
179
+
180
+ # Process task options
181
+ # a.k.a task options defined in their respective task classes
182
+ cls = Task.get_task_class(node.name)
183
+ task_opts = cls.opts.copy()
184
+ task_opts_meta = cls.meta_opts.copy()
185
+ task_opts_all = {**task_opts, **task_opts_meta}
186
+ node_opts = node.opts or {}
187
+ ancestor_opts_defaults = node.ancestor.default_opts or {}
188
+ node_id_str = get_short_id(node.id, config.name)
189
+
190
+ for k, v in task_opts_all.items():
191
+ conf = v.copy()
192
+ conf['prefix'] = f'Task {node.name}'
193
+ default_from_config = node_opts.get(k) or ancestor_opts_defaults.get(k) or config_opts_defaults.get(k)
194
+ opt_name = k
195
+ same_opts = find_same_opts(node, nodes, k)
196
+
197
+ # Found a default in YAML config, either in task options, or workflow options, or config options
198
+ if default_from_config:
199
+ conf['required'] = False
200
+ conf['default'] = default_from_config
201
+ conf['default_from'] = node_id_str
202
+ if node_opts.get(k):
203
+ conf['default_from'] = node_id_str
204
+ conf['prefix'] = 'Config'
205
+ elif ancestor_opts_defaults.get(k):
206
+ conf['default_from'] = get_short_id(node.ancestor.id, config.name)
207
+ conf['prefix'] = f'{node.ancestor.type.capitalize()} {node.ancestor.name}'
208
+ elif config_opts_defaults.get(k):
209
+ conf['default_from'] = config.name
210
+ conf['prefix'] = 'Config'
211
+ mapped_value = cls.opt_value_map.get(opt_name)
212
+ if mapped_value:
213
+ if callable(mapped_value):
214
+ default_from_config = mapped_value(default_from_config)
215
+ else:
216
+ default_from_config = mapped_value
217
+ conf['default'] = default_from_config
218
+ if len(same_opts) > 0: # change opt name to avoid conflict
219
+ conf['prefix'] = 'Config'
220
+ opt_name = f'{conf["default_from"]}.{k}'
221
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] renamed to [bold green]{opt_name}[/] [dim red](default set in config)[/]', sub=f'cli.{config.name}') # noqa: E501
222
+
223
+ # Standard meta options like rate_limit, delay, proxy, etc...
224
+ elif k in task_opts_meta:
225
+ conf['prefix'] = 'Meta'
226
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] changed prefix to [bold cyan]Meta[/]', sub=f'cli.{config.name}') # noqa: E501
227
+
228
+ # Task-specific options
229
+ elif k in task_opts:
230
+ same_opts = find_same_opts(node, nodes, k, check_class_opts=True)
231
+ if len(same_opts) > 0:
232
+ applies_to = set([node.name] + [_['name'] for _ in same_opts])
233
+ conf['applies_to'] = applies_to
234
+ conf['prefix'] = 'Shared task'
235
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] changed prefix to [bold cyan]Common[/] [dim red](duplicated {len(same_opts)} times)[/]', sub=f'cli.{config.name}') # noqa: E501
236
+ else:
237
+ raise ValueError(f'Unknown option {k} for task {node.id}')
238
+ all_opts[opt_name] = conf
239
+
240
+ walk_runner_tree(tree, process_node)
241
+
242
+ # Normalize all options
243
+ debug('[bold yellow3]All opts processed. Showing defaults:[/]', sub=f'cli.{config.name}')
244
+ normalized_opts = OrderedDict({})
245
+ for k, v in all_opts.items():
246
+ v['reverse'] = False
247
+ v['show_default'] = True
248
+ default_from = v.get('default_from')
249
+ default = v.get('default', False)
250
+ if isinstance(default, bool) and default is True:
251
+ v['reverse'] = True
252
+ if type_mapping and 'type' in v:
253
+ v['type'] = type_mapping.get(v['type'], str)
254
+ short = v.get('short')
255
+ k = k.replace('.', '-').replace('_', '-').replace('/', '-')
256
+ from_str = default_from.replace('.', '-').replace('_', '-').replace('/', '-') if default_from else None
257
+ if not default_from or from_str not in k:
258
+ v['short'] = short if short else None
259
+ else:
260
+ v['short'] = f'{from_str}-{short}' if short else None
261
+ debug(f'\t[bold]{k}[/] -> [bold green]{v.get("default", "N/A")}[/] [dim red](default from {v.get("default_from", "N/A")})[/]', sub=f'cli.{config.name}') # noqa: E501
262
+ normalized_opts[k] = v
263
+ return normalized_opts
secator/thread.py CHANGED
@@ -4,21 +4,21 @@ from secator.output_types import Error
4
4
 
5
5
 
6
6
  class Thread(threading.Thread):
7
- """A thread that returns errors in their join() method as secator.output_types.Error."""
7
+ """A thread that returns errors in their join() method as secator.output_types.Error."""
8
8
 
9
- def __init__(self, *args, **kwargs):
10
- super().__init__(*args, **kwargs)
11
- self.error = None
9
+ def __init__(self, *args, **kwargs):
10
+ super().__init__(*args, **kwargs)
11
+ self.error = None
12
12
 
13
- def run(self):
14
- try:
15
- if hasattr(self, '_target'):
16
- self._target(*self._args, **self._kwargs)
17
- except Exception as e:
18
- self.error = Error.from_exception(e)
13
+ def run(self):
14
+ try:
15
+ if hasattr(self, '_target'):
16
+ self._target(*self._args, **self._kwargs)
17
+ except Exception as e:
18
+ self.error = Error.from_exception(e)
19
19
 
20
- def join(self, *args, **kwargs):
21
- super().join(*args, **kwargs)
22
- if self.error:
23
- return self.error
24
- return None
20
+ def join(self, *args, **kwargs):
21
+ super().join(*args, **kwargs)
22
+ if self.error:
23
+ return self.error
24
+ return None
secator/tree.py ADDED
@@ -0,0 +1,196 @@
1
+ from typing import List, Optional, Union
2
+ from secator.template import TemplateLoader
3
+ from dotmap import DotMap
4
+
5
+
6
+ DEFAULT_RENDER_OPTS = {
7
+ 'group': lambda x: f"[dim]group {x.name.split('/')[-1] if '/' in x.name else ''}[/]",
8
+ 'task': lambda x: f"[bold gold3]:wrench: {x.name}[/]",
9
+ 'workflow': lambda x: f"[bold dark_orange3]:gear: {x.name}[/]",
10
+ 'scan': lambda x: f"[bold red]:magnifying_glass_tilted_left: {x.name}[/]",
11
+ 'condition': lambda x: f"[dim cyan]# if {x}[/]" if x else ''
12
+ }
13
+
14
+
15
+ class TaskNode:
16
+ """Represents a node in the workflow/scan task tree."""
17
+ def __init__(self, name: str, type_: str, id: str, opts: Optional[dict] = None, default_opts: Optional[dict] = None, condition: Optional[str] = None, description: Optional[str] = None, parent=None, ancestor=None): # noqa: E501
18
+ self.name = name
19
+ self.type = type_
20
+ self.id = id
21
+ self.opts = opts or {}
22
+ self.default_opts = default_opts or {}
23
+ self.description = description
24
+ self.condition = condition
25
+ self.children: List[TaskNode] = []
26
+ self.parent = parent
27
+ self.ancestor = ancestor
28
+
29
+ def add_child(self, child: 'TaskNode') -> None:
30
+ """Add a child node to this node."""
31
+ self.children.append(child)
32
+
33
+ def remove(self):
34
+ """Remove this node from its parent."""
35
+ if self.parent:
36
+ self.parent.children.remove(self)
37
+
38
+ def __str__(self) -> str:
39
+ """String representation with condition if present."""
40
+ if self.condition:
41
+ return f"{self.name} # if {self.condition}"
42
+ return self.name
43
+
44
+
45
+ class RunnerTree:
46
+ """Represents a tree of workflow/scan tasks."""
47
+ def __init__(self, name: str, type_: str, render_opts: Optional[dict] = DEFAULT_RENDER_OPTS):
48
+ self.name = name
49
+ self.type = type_
50
+ self.root_nodes: List[TaskNode] = []
51
+ self.render_opts = render_opts
52
+
53
+ def add_root_node(self, node: TaskNode) -> None:
54
+ """Add a root-level node to the tree."""
55
+ self.root_nodes.append(node)
56
+
57
+ def render_tree(self) -> str:
58
+ """Render the tree as a console-friendly string."""
59
+ lines = []
60
+ for node in self.root_nodes:
61
+ node_str = self.render_opts.get(node.type, lambda x: str(x))(node)
62
+ condition_str = self.render_opts.get('condition', lambda x: str(x) if x else '')(node.condition)
63
+ if condition_str:
64
+ node_str = f"{node_str} {condition_str}"
65
+ lines.append(node_str)
66
+ self._render_children(node, "", lines)
67
+ return "\n".join(lines)
68
+
69
+ def _render_children(self, node: TaskNode, prefix: str, lines: List[str]) -> None:
70
+ """Helper method to recursively render child nodes."""
71
+ children_count = len(node.children)
72
+ for i, child in enumerate(node.children):
73
+ is_last = i == children_count - 1
74
+ branch = "└─ " if is_last else "├─ "
75
+ child_str = self.render_opts.get(child.type, lambda x: str(x))
76
+ condition_str = self.render_opts.get('condition', lambda x: str(x) if x else '')(child.condition)
77
+ render_str = f"{prefix}{branch}{child_str(child)}"
78
+ if child.description:
79
+ render_str += f" - [dim]{child.description}[/]"
80
+ if condition_str:
81
+ render_str += f" {condition_str}"
82
+ lines.append(render_str)
83
+ if child.children:
84
+ new_prefix = prefix + (" " if is_last else "│ ")
85
+ self._render_children(child, new_prefix, lines)
86
+
87
+ def get_subtree(self, node: TaskNode) -> 'RunnerTree':
88
+ """Get the subtree of this node."""
89
+ subtree = RunnerTree(node.name, node.type)
90
+ for child in node.children:
91
+ subtree.add_root_node(child)
92
+ return subtree
93
+
94
+
95
+ def build_runner_tree(config: DotMap, condition: Optional[str] = None, parent: Optional[TaskNode] = None, ancestor: Optional[TaskNode] = None) -> Union[RunnerTree, str]: # noqa: E501
96
+ """
97
+ Build a tree representation from a runner config.
98
+
99
+ Args:
100
+ config (DotMap): The runner config.
101
+
102
+ Returns:
103
+ A RunnerTree object or an error message string
104
+ """
105
+ tree = RunnerTree(config.name, config.type)
106
+
107
+ if config.type == 'workflow':
108
+ root_node = TaskNode(config.name, 'workflow', config.name, opts=config.options, default_opts=config.default_options, condition=condition, parent=parent, ancestor=ancestor) # noqa: E501
109
+ tree.add_root_node(root_node)
110
+
111
+ # Add tasks to the tree
112
+ for task_name, task_details in config.tasks.items():
113
+ id = f'{config.name}.{task_name}'
114
+ if task_name.startswith('_group'):
115
+ group_node = TaskNode(task_name, 'group', id, parent=root_node, ancestor=root_node)
116
+ root_node.add_child(group_node)
117
+ for subtask_name, subtask_details in task_details.items():
118
+ subtask_details = subtask_details or {}
119
+ id = f'{config.name}.{subtask_name}'
120
+ condition = subtask_details.get('if')
121
+ description = subtask_details.get('description')
122
+ subtask_node = TaskNode(subtask_name, 'task', id, opts=subtask_details, condition=condition, description=description, parent=group_node, ancestor=root_node) # noqa: E501
123
+ group_node.add_child(subtask_node)
124
+ else:
125
+ condition = task_details.get('if') if task_details else None
126
+ description = task_details.get('description') if task_details else None
127
+ task_node = TaskNode(task_name, 'task', id, opts=task_details, condition=condition, description=description, parent=root_node, ancestor=root_node) # noqa: E501
128
+ root_node.add_child(task_node)
129
+
130
+ elif config.type == 'scan':
131
+ id = f'{config.name}'
132
+ root_node = TaskNode(config.name, 'scan', id, opts=config.options, parent=parent)
133
+ tree.add_root_node(root_node)
134
+
135
+ # Add workflows to the tree
136
+ for workflow_name, workflow_details in config.workflows.items():
137
+ id = f'{config.name}.{workflow_name}'
138
+ condition = workflow_details.get('if') if isinstance(workflow_details, dict) else None
139
+ split_name = workflow_name.split('/')
140
+ wf_name = split_name[0]
141
+ wf_config = TemplateLoader(name=f'workflow/{wf_name}')
142
+ wf_config.name = workflow_name
143
+ wf_tree = build_runner_tree(wf_config, condition, parent=root_node, ancestor=root_node)
144
+ if isinstance(wf_tree, RunnerTree):
145
+ for wf_root_node in wf_tree.root_nodes:
146
+ root_node.add_child(wf_root_node)
147
+
148
+ elif config.type == 'task':
149
+ root_node = TaskNode(config.name, 'task', config.name, opts={}, parent=parent, ancestor=ancestor)
150
+ tree.add_root_node(root_node)
151
+
152
+ return tree
153
+
154
+
155
+ def walk_runner_tree(tree: RunnerTree, visit_func):
156
+ """
157
+ Walk the RunnerTree and visit each node.
158
+
159
+ Args:
160
+ tree (RunnerTree): The RunnerTree to walk.
161
+ visit_func (function): A function to call on each node.
162
+ """
163
+ for root_node in tree.root_nodes:
164
+ _walk_node(root_node, visit_func)
165
+
166
+
167
+ def _walk_node(node: TaskNode, visit_func):
168
+ """
169
+ Recursively walk the node and its children.
170
+
171
+ Args:
172
+ node (TaskNode): The node to walk.
173
+ visit_func (function): A function to call on each node.
174
+ """
175
+ visit_func(node)
176
+ for child in node.children:
177
+ _walk_node(child, visit_func)
178
+
179
+
180
+ def get_flat_node_list(tree: RunnerTree) -> List[TaskNode]:
181
+ """
182
+ Get the flat list of all nodes in the RunnerTree.
183
+
184
+ Args:
185
+ tree (RunnerTree): The RunnerTree to traverse.
186
+
187
+ Returns:
188
+ List[TaskNode]: The list of all nodes in the tree.
189
+ """
190
+ nodes = []
191
+
192
+ def collect_node(node: TaskNode):
193
+ nodes.append(node)
194
+
195
+ walk_runner_tree(tree, collect_node)
196
+ return nodes