secator 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. secator/.gitignore +162 -0
  2. secator/__init__.py +0 -0
  3. secator/celery.py +453 -0
  4. secator/celery_signals.py +138 -0
  5. secator/celery_utils.py +320 -0
  6. secator/cli.py +2035 -0
  7. secator/cli_helper.py +395 -0
  8. secator/click.py +87 -0
  9. secator/config.py +670 -0
  10. secator/configs/__init__.py +0 -0
  11. secator/configs/profiles/__init__.py +0 -0
  12. secator/configs/profiles/aggressive.yaml +8 -0
  13. secator/configs/profiles/all_ports.yaml +7 -0
  14. secator/configs/profiles/full.yaml +31 -0
  15. secator/configs/profiles/http_headless.yaml +7 -0
  16. secator/configs/profiles/http_record.yaml +8 -0
  17. secator/configs/profiles/insane.yaml +8 -0
  18. secator/configs/profiles/paranoid.yaml +8 -0
  19. secator/configs/profiles/passive.yaml +11 -0
  20. secator/configs/profiles/polite.yaml +8 -0
  21. secator/configs/profiles/sneaky.yaml +8 -0
  22. secator/configs/profiles/tor.yaml +5 -0
  23. secator/configs/scans/__init__.py +0 -0
  24. secator/configs/scans/domain.yaml +31 -0
  25. secator/configs/scans/host.yaml +23 -0
  26. secator/configs/scans/network.yaml +30 -0
  27. secator/configs/scans/subdomain.yaml +27 -0
  28. secator/configs/scans/url.yaml +19 -0
  29. secator/configs/workflows/__init__.py +0 -0
  30. secator/configs/workflows/cidr_recon.yaml +48 -0
  31. secator/configs/workflows/code_scan.yaml +29 -0
  32. secator/configs/workflows/domain_recon.yaml +46 -0
  33. secator/configs/workflows/host_recon.yaml +95 -0
  34. secator/configs/workflows/subdomain_recon.yaml +120 -0
  35. secator/configs/workflows/url_bypass.yaml +15 -0
  36. secator/configs/workflows/url_crawl.yaml +98 -0
  37. secator/configs/workflows/url_dirsearch.yaml +62 -0
  38. secator/configs/workflows/url_fuzz.yaml +68 -0
  39. secator/configs/workflows/url_params_fuzz.yaml +66 -0
  40. secator/configs/workflows/url_secrets_hunt.yaml +23 -0
  41. secator/configs/workflows/url_vuln.yaml +91 -0
  42. secator/configs/workflows/user_hunt.yaml +29 -0
  43. secator/configs/workflows/wordpress.yaml +38 -0
  44. secator/cve.py +718 -0
  45. secator/decorators.py +7 -0
  46. secator/definitions.py +168 -0
  47. secator/exporters/__init__.py +14 -0
  48. secator/exporters/_base.py +3 -0
  49. secator/exporters/console.py +10 -0
  50. secator/exporters/csv.py +37 -0
  51. secator/exporters/gdrive.py +123 -0
  52. secator/exporters/json.py +16 -0
  53. secator/exporters/table.py +36 -0
  54. secator/exporters/txt.py +28 -0
  55. secator/hooks/__init__.py +0 -0
  56. secator/hooks/gcs.py +80 -0
  57. secator/hooks/mongodb.py +281 -0
  58. secator/installer.py +694 -0
  59. secator/loader.py +128 -0
  60. secator/output_types/__init__.py +49 -0
  61. secator/output_types/_base.py +108 -0
  62. secator/output_types/certificate.py +78 -0
  63. secator/output_types/domain.py +50 -0
  64. secator/output_types/error.py +42 -0
  65. secator/output_types/exploit.py +58 -0
  66. secator/output_types/info.py +24 -0
  67. secator/output_types/ip.py +47 -0
  68. secator/output_types/port.py +55 -0
  69. secator/output_types/progress.py +36 -0
  70. secator/output_types/record.py +36 -0
  71. secator/output_types/stat.py +41 -0
  72. secator/output_types/state.py +29 -0
  73. secator/output_types/subdomain.py +45 -0
  74. secator/output_types/tag.py +69 -0
  75. secator/output_types/target.py +38 -0
  76. secator/output_types/url.py +112 -0
  77. secator/output_types/user_account.py +41 -0
  78. secator/output_types/vulnerability.py +101 -0
  79. secator/output_types/warning.py +30 -0
  80. secator/report.py +140 -0
  81. secator/rich.py +130 -0
  82. secator/runners/__init__.py +14 -0
  83. secator/runners/_base.py +1240 -0
  84. secator/runners/_helpers.py +218 -0
  85. secator/runners/celery.py +18 -0
  86. secator/runners/command.py +1178 -0
  87. secator/runners/python.py +126 -0
  88. secator/runners/scan.py +87 -0
  89. secator/runners/task.py +81 -0
  90. secator/runners/workflow.py +168 -0
  91. secator/scans/__init__.py +29 -0
  92. secator/serializers/__init__.py +8 -0
  93. secator/serializers/dataclass.py +39 -0
  94. secator/serializers/json.py +45 -0
  95. secator/serializers/regex.py +25 -0
  96. secator/tasks/__init__.py +8 -0
  97. secator/tasks/_categories.py +487 -0
  98. secator/tasks/arjun.py +113 -0
  99. secator/tasks/arp.py +53 -0
  100. secator/tasks/arpscan.py +70 -0
  101. secator/tasks/bbot.py +372 -0
  102. secator/tasks/bup.py +118 -0
  103. secator/tasks/cariddi.py +193 -0
  104. secator/tasks/dalfox.py +87 -0
  105. secator/tasks/dirsearch.py +84 -0
  106. secator/tasks/dnsx.py +186 -0
  107. secator/tasks/feroxbuster.py +93 -0
  108. secator/tasks/ffuf.py +135 -0
  109. secator/tasks/fping.py +85 -0
  110. secator/tasks/gau.py +102 -0
  111. secator/tasks/getasn.py +60 -0
  112. secator/tasks/gf.py +36 -0
  113. secator/tasks/gitleaks.py +96 -0
  114. secator/tasks/gospider.py +84 -0
  115. secator/tasks/grype.py +109 -0
  116. secator/tasks/h8mail.py +75 -0
  117. secator/tasks/httpx.py +167 -0
  118. secator/tasks/jswhois.py +36 -0
  119. secator/tasks/katana.py +203 -0
  120. secator/tasks/maigret.py +87 -0
  121. secator/tasks/mapcidr.py +42 -0
  122. secator/tasks/msfconsole.py +179 -0
  123. secator/tasks/naabu.py +85 -0
  124. secator/tasks/nmap.py +487 -0
  125. secator/tasks/nuclei.py +151 -0
  126. secator/tasks/search_vulns.py +225 -0
  127. secator/tasks/searchsploit.py +109 -0
  128. secator/tasks/sshaudit.py +299 -0
  129. secator/tasks/subfinder.py +48 -0
  130. secator/tasks/testssl.py +283 -0
  131. secator/tasks/trivy.py +130 -0
  132. secator/tasks/trufflehog.py +240 -0
  133. secator/tasks/urlfinder.py +100 -0
  134. secator/tasks/wafw00f.py +106 -0
  135. secator/tasks/whois.py +34 -0
  136. secator/tasks/wpprobe.py +116 -0
  137. secator/tasks/wpscan.py +202 -0
  138. secator/tasks/x8.py +94 -0
  139. secator/tasks/xurlfind3r.py +83 -0
  140. secator/template.py +294 -0
  141. secator/thread.py +24 -0
  142. secator/tree.py +196 -0
  143. secator/utils.py +922 -0
  144. secator/utils_test.py +297 -0
  145. secator/workflows/__init__.py +29 -0
  146. secator-0.22.0.dist-info/METADATA +447 -0
  147. secator-0.22.0.dist-info/RECORD +150 -0
  148. secator-0.22.0.dist-info/WHEEL +4 -0
  149. secator-0.22.0.dist-info/entry_points.txt +2 -0
  150. secator-0.22.0.dist-info/licenses/LICENSE +60 -0
@@ -0,0 +1,83 @@
1
+ import validators
2
+ from collections import defaultdict
3
+ from urllib.parse import urlparse, urlunparse, parse_qs
4
+
5
+ from secator.definitions import HOST, URL, DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, FOLLOW_REDIRECT, METHOD, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, USER_AGENT, HEADER, OPT_NOT_SUPPORTED # noqa: E501
6
+ from secator.output_types import Url
7
+ from secator.decorators import task
8
+ from secator.serializers import JSONSerializer
9
+ from secator.tasks._categories import HttpCrawler
10
+
11
+ MAX_PARAM_OCCURRENCES = 10
12
+
13
+
14
+ @task()
15
+ class xurlfind3r(HttpCrawler):
16
+ """Discover URLs for a given domain in a simple, passive and efficient way"""
17
+ cmd = 'xurlfind3r'
18
+ tags = ['url', 'recon']
19
+ input_types = [HOST, URL]
20
+ output_types = [Url]
21
+ item_loaders = [JSONSerializer()]
22
+ json_flag = '--jsonl'
23
+ file_flag = '-l'
24
+ input_flag = '-d'
25
+ version_flag = 'version'
26
+ opts = {
27
+ 'sources': {'type': str, 'help': 'Sources to use (comma-delimited)', 'required': False},
28
+ 'sources_to_exclude': {'type': str, 'help': 'Sources to exclude (comma-delimited)', 'required': False},
29
+ 'include_subdomains': {'is_flag': True, 'help': 'Include subdomains', 'required': False, 'default': False},
30
+ 'max_param_occurrences': {'type': int, 'help': 'Max occurrences for the same parameter in the same URL before discarding next results', 'required': False, 'default': 10, 'internal': True}, # noqa: E501
31
+ }
32
+ opt_key_map = {
33
+ HEADER: OPT_NOT_SUPPORTED,
34
+ DELAY: OPT_NOT_SUPPORTED,
35
+ DEPTH: OPT_NOT_SUPPORTED,
36
+ FILTER_CODES: OPT_NOT_SUPPORTED,
37
+ FILTER_REGEX: OPT_NOT_SUPPORTED,
38
+ FILTER_SIZE: OPT_NOT_SUPPORTED,
39
+ FILTER_WORDS: OPT_NOT_SUPPORTED,
40
+ MATCH_CODES: OPT_NOT_SUPPORTED,
41
+ MATCH_REGEX: OPT_NOT_SUPPORTED,
42
+ MATCH_SIZE: OPT_NOT_SUPPORTED,
43
+ MATCH_WORDS: OPT_NOT_SUPPORTED,
44
+ FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
45
+ METHOD: OPT_NOT_SUPPORTED,
46
+ PROXY: OPT_NOT_SUPPORTED,
47
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
48
+ RETRIES: OPT_NOT_SUPPORTED,
49
+ THREADS: OPT_NOT_SUPPORTED,
50
+ TIMEOUT: OPT_NOT_SUPPORTED,
51
+ USER_AGENT: OPT_NOT_SUPPORTED,
52
+ }
53
+ install_version = '1.3.0'
54
+ install_cmd = 'go install -v github.com/hueristiq/xurlfind3r/cmd/xurlfind3r@[install_version]'
55
+ github_handle = 'hueristiq/xurlfind3r'
56
+ proxychains = False
57
+ proxy_socks5 = True
58
+ proxy_http = True
59
+ profile = 'io'
60
+
61
+ @staticmethod
62
+ def before_init(self):
63
+ for idx, input in enumerate(self.inputs):
64
+ if validators.url(input):
65
+ self.inputs[idx] = urlparse(input).netloc
66
+
67
+ @staticmethod
68
+ def on_init(self):
69
+ self.max_param_occurrences = self.get_opt_value('max_param_occurrences')
70
+ self.seen_params = defaultdict(lambda: defaultdict(int))
71
+
72
+ @staticmethod
73
+ def on_json_loaded(self, item):
74
+ url = item['url']
75
+ parsed_url = urlparse(url)
76
+ base_url = urlunparse(parsed_url._replace(query="", fragment="")) # Remove query & fragment
77
+ query_params = parse_qs(parsed_url.query)
78
+ current_params = set(query_params.keys())
79
+ for param in current_params:
80
+ self.seen_params[base_url][param] += 1
81
+ if self.seen_params[base_url][param] > int(self.max_param_occurrences):
82
+ return
83
+ yield Url(url=item['url'], host=parsed_url.hostname, extra_data={'source': item['source']})
secator/template.py ADDED
@@ -0,0 +1,294 @@
1
+ import yaml
2
+
3
+ from collections import OrderedDict
4
+ from dotmap import DotMap
5
+ from pathlib import Path
6
+
7
+ from secator.output_types import Error
8
+ from secator.rich import console
9
+
10
+
11
+ class TemplateLoader(DotMap):
12
+
13
+ def __init__(self, input={}, name=None, **kwargs):
14
+ if name:
15
+ split = name.split('/')
16
+ if len(split) != 2:
17
+ console.print(Error(message=f'Cannot load {name}: you should specify a type for the template when loading by name (e.g. workflow/<workflow_name>)')) # noqa: E501
18
+ return
19
+ _type, _name = tuple(split)
20
+ if _type.endswith('s'):
21
+ _type = _type[:-1]
22
+ from secator.loader import find_templates
23
+ config = next((p for p in find_templates() if p['type'] == _type and p['name'] == _name), None)
24
+ if not config:
25
+ console.print(Error(message=f'Template {_type}/{_name} not found in loaded templates'))
26
+ config = {}
27
+ elif isinstance(input, dict):
28
+ config = input
29
+ elif isinstance(input, Path) or Path(input).exists():
30
+ config = self._load_from_path(input)
31
+ config['_path'] = str(input)
32
+ elif isinstance(input, str):
33
+ config = self._load(input)
34
+ super().__init__(config, **kwargs)
35
+
36
+ def _load_from_path(self, path):
37
+ if not path.exists():
38
+ console.print(Error(message=f'Config path {path} does not exists'))
39
+ return
40
+ with path.open('r') as f:
41
+ return self._load(f.read())
42
+
43
+ def _load(self, input):
44
+ return yaml.load(input, Loader=yaml.Loader)
45
+
46
+ def print(self):
47
+ """Print config as highlighted yaml."""
48
+ config = self.toDict()
49
+ _path = config.pop('_path', None)
50
+ if _path:
51
+ console.print(f'[italic green]{_path}[/]\n')
52
+ yaml_str = yaml.dump(config, indent=4, sort_keys=False)
53
+ from rich.syntax import Syntax
54
+ yaml_highlight = Syntax(yaml_str, 'yaml', line_numbers=True)
55
+ console.print(yaml_highlight)
56
+
57
+
58
+ def get_short_id(id_str, config_name):
59
+ """Remove config name prefix from ID string if present.
60
+
61
+ Args:
62
+ id_str: The ID string to process
63
+ config_name: The config name prefix to remove
64
+
65
+ Returns:
66
+ str: ID string with prefix removed, or original string if no prefix found
67
+ """
68
+ if id_str.startswith(config_name):
69
+ return id_str.replace(config_name + '.', '')
70
+ return id_str
71
+
72
+
73
+ def get_config_options(config, exec_opts=None, output_opts=None, type_mapping=None):
74
+ """Extract and normalize command-line options from configuration.
75
+
76
+ Args:
77
+ config: Configuration object (task, workflow, or scan)
78
+ exec_opts: Execution options dictionary (optional)
79
+ output_opts: Output options dictionary (optional)
80
+ type_mapping: Type mapping for option types (optional)
81
+
82
+ Returns:
83
+ OrderedDict: Normalized options with metadata
84
+ """
85
+ from secator.tree import build_runner_tree, walk_runner_tree, get_flat_node_list
86
+ from secator.utils import debug
87
+ from secator.runners.task import Task
88
+
89
+ # Task config created on-the-fly
90
+ if config.type == 'task':
91
+ config = TemplateLoader({
92
+ 'name': config.name,
93
+ 'type': 'workflow',
94
+ 'tasks': {config.name: {}}
95
+ })
96
+
97
+ # Get main info
98
+ tree = build_runner_tree(config)
99
+ nodes = get_flat_node_list(tree)
100
+ exec_opts = exec_opts or {}
101
+ output_opts = output_opts or {}
102
+ type_mapping = type_mapping or {}
103
+ all_opts = OrderedDict({})
104
+
105
+ # Log current config and tree
106
+ debug(f'[magenta]{config.name}[/]', sub=f'cli.{config.name}')
107
+ debug(f'{tree.render_tree()}', sub=f'cli.{config.name}')
108
+
109
+ # Process global execution options
110
+ for opt in exec_opts:
111
+ opt_conf = exec_opts[opt].copy()
112
+ opt_conf['prefix'] = 'Execution'
113
+ all_opts[opt] = opt_conf
114
+
115
+ # Process global output options
116
+ for opt in output_opts:
117
+ opt_conf = output_opts[opt].copy()
118
+ opt_conf['prefix'] = 'Output'
119
+ all_opts[opt] = opt_conf
120
+
121
+ # Process config options
122
+ # a.k.a:
123
+ # - default YAML config options, defined in default_options: key in the runner YAML config
124
+ # - new options defined in options: key in the runner YAML config
125
+ config_opts_defaults = config.default_options.toDict()
126
+ config_opts = config.options.toDict()
127
+ for k, v in config_opts.items():
128
+ all_opts[k] = v
129
+ all_opts[k]['prefix'] = f'{config.type}'
130
+
131
+ def find_same_opts(node, nodes, opt_name, check_class_opts=False):
132
+ """Find options with the same name that are defined in other nodes of the same type."""
133
+ same_opts = []
134
+ for _ in nodes:
135
+ if _.id == node.id or _.type != node.type:
136
+ continue
137
+ node_task = None
138
+ if check_class_opts:
139
+ node_task = Task.get_task_class(_.name)
140
+ if opt_name not in node_task.opts:
141
+ continue
142
+ opts_value = node_task.opts[opt_name]
143
+ else:
144
+ if opt_name not in _.opts:
145
+ continue
146
+ opts_value = _.opts[opt_name]
147
+ name_str = 'nodes' if not check_class_opts else 'tasks'
148
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{opt_name}[/] found in other {name_str} [bold blue]{_.id}[/]', sub=f'cli.{config.name}.same', verbose=True) # noqa: E501
149
+ same_opts.append({
150
+ 'id': _.id,
151
+ 'task_name': node_task.__name__ if node_task else None,
152
+ 'name': _.name,
153
+ 'value': opts_value,
154
+ })
155
+ if same_opts:
156
+ other_tasks = ", ".join([f'[bold yellow]{_["id"]}[/]' for _ in same_opts])
157
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{opt_name}[/] found in {len(same_opts)} other {name_str}: {other_tasks}', sub=f'cli.{config.name}.same', verbose=True) # noqa: E501
158
+ return same_opts
159
+
160
+ def process_node(node):
161
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] ({node.type})', sub=f'cli.{config.name}')
162
+
163
+ if node.type not in ['task', 'workflow']:
164
+ return
165
+
166
+ # Process workflow options
167
+ # a.k.a the new options defined in options: key in the workflow YAML config;
168
+ if node.type == 'workflow':
169
+ for k, v in node.opts.items():
170
+ same_opts = find_same_opts(node, nodes, k)
171
+ conf = v.copy()
172
+ opt_name = k
173
+ conf['prefix'] = f'{node.type.capitalize()} {node.name}'
174
+ if len(same_opts) > 0: # opt name conflict, change opt name
175
+ opt_name = f'{node.name}.{k}'
176
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] renamed to [bold green]{opt_name}[/] [dim red](duplicated)[/]', sub=f'cli.{config.name}') # noqa: E501
177
+ all_opts[opt_name] = conf
178
+ return
179
+
180
+ # Process task options
181
+ # a.k.a task options defined in their respective task classes
182
+ cls = Task.get_task_class(node.name)
183
+ task_opts = cls.opts.copy()
184
+ task_opts_meta = getattr(cls, 'meta_opts', {}).copy()
185
+ task_opts_all = {**task_opts, **task_opts_meta}
186
+ node_opts = node.opts or {}
187
+ ancestor_opts_defaults = node.ancestor.default_opts or {}
188
+ node_id_str = get_short_id(node.id, config.name)
189
+
190
+ for k, v in task_opts_all.items():
191
+ conf = v.copy()
192
+ conf['prefix'] = f'Task {node.name}'
193
+ # Use explicit None checks to properly handle boolean False values
194
+ default_from_config = next(
195
+ (item for item in [node_opts.get(k), ancestor_opts_defaults.get(k), config_opts_defaults.get(k)]
196
+ if item is not None),
197
+ None
198
+ )
199
+ opt_name = k
200
+ same_opts = find_same_opts(node, nodes, k)
201
+
202
+ # Found a default in YAML config, either in task options, or workflow options, or config options
203
+ if default_from_config is not None:
204
+ conf['required'] = False
205
+ conf['default'] = default_from_config
206
+ conf['default_from'] = node_id_str
207
+ if node_opts.get(k) is not None:
208
+ conf['default_from'] = node_id_str
209
+ conf['prefix'] = 'Config'
210
+ elif ancestor_opts_defaults.get(k) is not None:
211
+ conf['default_from'] = get_short_id(node.ancestor.id, config.name)
212
+ conf['prefix'] = f'{node.ancestor.type.capitalize()} {node.ancestor.name}'
213
+ elif config_opts_defaults.get(k) is not None:
214
+ conf['default_from'] = config.name
215
+ conf['prefix'] = 'Config'
216
+ mapped_value = cls.opt_value_map.get(opt_name)
217
+ if mapped_value:
218
+ if callable(mapped_value):
219
+ default_from_config = mapped_value(default_from_config)
220
+ else:
221
+ default_from_config = mapped_value
222
+ conf['default'] = default_from_config
223
+ # Check for same opts in both config and class definitions to determine if we need to rename
224
+ same_opts_class = find_same_opts(node, nodes, k, check_class_opts=True)
225
+ if len(same_opts) > 0 or len(same_opts_class) > 0 or k in task_opts_meta: # change opt name to avoid conflict
226
+ conf['prefix'] = 'Config'
227
+ opt_name = f'{conf["default_from"]}.{k}'
228
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] renamed to [bold green]{opt_name}[/] [dim red](default set in config)[/]', sub=f'cli.{config.name}') # noqa: E501
229
+
230
+ # Standard meta options like rate_limit, delay, proxy, etc...
231
+ elif k in task_opts_meta:
232
+ conf['prefix'] = 'Meta'
233
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] changed prefix to [bold cyan]Meta[/]', sub=f'cli.{config.name}') # noqa: E501
234
+
235
+ # Task-specific options
236
+ elif k in task_opts:
237
+ same_opts = find_same_opts(node, nodes, k, check_class_opts=True)
238
+ if len(same_opts) > 0:
239
+ # Check if any node has this option explicitly set in config
240
+ # If so, skip adding shared version as those nodes will have their own prefixed versions
241
+ same_opt_ids = [so['id'] for so in same_opts]
242
+ relevant_nodes = [node] + [n for n in nodes if n.id in same_opt_ids]
243
+ # debug(f'relevant nodes: {[n.name for n in relevant_nodes]}', sub=f'cli.{config.name}')
244
+ has_config_override = False
245
+ for node_to_check in relevant_nodes:
246
+ if hasattr(node_to_check.opts, 'get'):
247
+ if node_to_check.opts.get(k) is not None:
248
+ has_config_override = True
249
+ # debug(f'has config override: {has_config_override}: {node_to_check.opts.get(k)}', sub=f'cli.{config.name}')
250
+ break
251
+ elif k in node_to_check.opts:
252
+ has_config_override = True
253
+ break
254
+
255
+ if not has_config_override:
256
+ applies_to = set([node.name] + [_['name'] for _ in same_opts])
257
+ conf['applies_to'] = applies_to
258
+ conf['prefix'] = 'Shared task'
259
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] changed prefix to [bold cyan]Common[/] [dim red](duplicated {len(same_opts)} times)[/]', sub=f'cli.{config.name}') # noqa: E501
260
+ else:
261
+ # Skip this option as it will be handled by the config override logic
262
+ debug(f'[bold]{config.name}[/] -> [bold blue]{node.id}[/] -> [bold green]{k}[/] skipped [dim red](has config override)[/]', sub=f'cli.{config.name}') # noqa: E501
263
+ opt_name = f'{node.name}-{k}'
264
+ conf['applies_to'] = set([node.name])
265
+ conf['prefix'] = 'Config'
266
+ # continue
267
+ else:
268
+ raise ValueError(f'Unknown option {k} for task {node.id}')
269
+ all_opts[opt_name] = conf
270
+
271
+ walk_runner_tree(tree, process_node)
272
+
273
+ # Normalize all options
274
+ debug('[bold yellow3]All opts processed. Showing defaults:[/]', sub=f'cli.{config.name}')
275
+ normalized_opts = OrderedDict({})
276
+ for k, v in all_opts.items():
277
+ v['reverse'] = False
278
+ v['show_default'] = True
279
+ default_from = v.get('default_from')
280
+ default = v.get('default', False)
281
+ if isinstance(default, bool) and default is True:
282
+ v['reverse'] = True
283
+ if type_mapping and 'type' in v:
284
+ v['type'] = type_mapping.get(v['type'], str)
285
+ short = v.get('short')
286
+ k = k.replace('.', '-').replace('_', '-').replace('/', '-')
287
+ from_str = default_from.replace('.', '-').replace('_', '-').replace('/', '-') if default_from else None
288
+ if not default_from or from_str not in k:
289
+ v['short'] = short if short else None
290
+ else:
291
+ v['short'] = f'{from_str}-{short}' if short else None
292
+ debug(f'\t[bold]{k}[/] -> [bold green]{v.get("default", "N/A")}[/] [dim red](default from {v.get("default_from", "N/A")})[/]', sub=f'cli.{config.name}') # noqa: E501
293
+ normalized_opts[k] = v
294
+ return normalized_opts
secator/thread.py ADDED
@@ -0,0 +1,24 @@
1
+ import threading
2
+
3
+ from secator.output_types import Error
4
+
5
+
6
+ class Thread(threading.Thread):
7
+ """A thread that returns errors in their join() method as secator.output_types.Error."""
8
+
9
+ def __init__(self, *args, **kwargs):
10
+ super().__init__(*args, **kwargs)
11
+ self.error = None
12
+
13
+ def run(self):
14
+ try:
15
+ if hasattr(self, '_target'):
16
+ self._target(*self._args, **self._kwargs)
17
+ except Exception as e:
18
+ self.error = Error.from_exception(e)
19
+
20
+ def join(self, *args, **kwargs):
21
+ super().join(*args, **kwargs)
22
+ if self.error:
23
+ return self.error
24
+ return None
secator/tree.py ADDED
@@ -0,0 +1,196 @@
1
+ from typing import List, Optional, Union
2
+ from secator.template import TemplateLoader
3
+ from dotmap import DotMap
4
+
5
+
6
+ DEFAULT_RENDER_OPTS = {
7
+ 'group': lambda x: f"[dim]group {x.name.split('/')[-1] if '/' in x.name else ''}[/]",
8
+ 'task': lambda x: f"[bold gold3]:wrench: {x.name}[/]",
9
+ 'workflow': lambda x: f"[bold dark_orange3]:gear: {x.name}[/]",
10
+ 'scan': lambda x: f"[bold red]:magnifying_glass_tilted_left: {x.name}[/]",
11
+ 'condition': lambda x: f"[dim cyan]# if {x}[/]" if x else ''
12
+ }
13
+
14
+
15
+ class TaskNode:
16
+ """Represents a node in the workflow/scan task tree."""
17
+ def __init__(self, name: str, type_: str, id: str, opts: Optional[dict] = None, default_opts: Optional[dict] = None, condition: Optional[str] = None, description: Optional[str] = None, parent=None, ancestor=None): # noqa: E501
18
+ self.name = name
19
+ self.type = type_
20
+ self.id = id
21
+ self.opts = opts or {}
22
+ self.default_opts = default_opts or {}
23
+ self.description = description
24
+ self.condition = condition
25
+ self.children: List[TaskNode] = []
26
+ self.parent = parent
27
+ self.ancestor = ancestor
28
+
29
+ def add_child(self, child: 'TaskNode') -> None:
30
+ """Add a child node to this node."""
31
+ self.children.append(child)
32
+
33
+ def remove(self):
34
+ """Remove this node from its parent."""
35
+ if self.parent:
36
+ self.parent.children.remove(self)
37
+
38
+ def __str__(self) -> str:
39
+ """String representation with condition if present."""
40
+ if self.condition:
41
+ return f"{self.name} # if {self.condition}"
42
+ return self.name
43
+
44
+
45
+ class RunnerTree:
46
+ """Represents a tree of workflow/scan tasks."""
47
+ def __init__(self, name: str, type_: str, render_opts: Optional[dict] = DEFAULT_RENDER_OPTS):
48
+ self.name = name
49
+ self.type = type_
50
+ self.root_nodes: List[TaskNode] = []
51
+ self.render_opts = render_opts
52
+
53
+ def add_root_node(self, node: TaskNode) -> None:
54
+ """Add a root-level node to the tree."""
55
+ self.root_nodes.append(node)
56
+
57
+ def render_tree(self) -> str:
58
+ """Render the tree as a console-friendly string."""
59
+ lines = []
60
+ for node in self.root_nodes:
61
+ node_str = self.render_opts.get(node.type, lambda x: str(x))(node)
62
+ condition_str = self.render_opts.get('condition', lambda x: str(x) if x else '')(node.condition)
63
+ if condition_str:
64
+ node_str = f"{node_str} {condition_str}"
65
+ lines.append(node_str)
66
+ self._render_children(node, "", lines)
67
+ return "\n".join(lines)
68
+
69
+ def _render_children(self, node: TaskNode, prefix: str, lines: List[str]) -> None:
70
+ """Helper method to recursively render child nodes."""
71
+ children_count = len(node.children)
72
+ for i, child in enumerate(node.children):
73
+ is_last = i == children_count - 1
74
+ branch = "└─ " if is_last else "├─ "
75
+ child_str = self.render_opts.get(child.type, lambda x: str(x))
76
+ condition_str = self.render_opts.get('condition', lambda x: str(x) if x else '')(child.condition)
77
+ render_str = f"{prefix}{branch}{child_str(child)}"
78
+ if child.description:
79
+ render_str += f" - [dim]{child.description}[/]"
80
+ if condition_str:
81
+ render_str += f" {condition_str}"
82
+ lines.append(render_str)
83
+ if child.children:
84
+ new_prefix = prefix + (" " if is_last else "│ ")
85
+ self._render_children(child, new_prefix, lines)
86
+
87
+ def get_subtree(self, node: TaskNode) -> 'RunnerTree':
88
+ """Get the subtree of this node."""
89
+ subtree = RunnerTree(node.name, node.type)
90
+ for child in node.children:
91
+ subtree.add_root_node(child)
92
+ return subtree
93
+
94
+
95
+ def build_runner_tree(config: DotMap, condition: Optional[str] = None, parent: Optional[TaskNode] = None, ancestor: Optional[TaskNode] = None) -> Union[RunnerTree, str]: # noqa: E501
96
+ """
97
+ Build a tree representation from a runner config.
98
+
99
+ Args:
100
+ config (DotMap): The runner config.
101
+
102
+ Returns:
103
+ A RunnerTree object or an error message string
104
+ """
105
+ tree = RunnerTree(config.name, config.type)
106
+
107
+ if config.type == 'workflow':
108
+ root_node = TaskNode(config.name, 'workflow', config.name, opts=config.options, default_opts=config.default_options, condition=condition, parent=parent, ancestor=ancestor) # noqa: E501
109
+ tree.add_root_node(root_node)
110
+
111
+ # Add tasks to the tree
112
+ for task_name, task_details in config.tasks.items():
113
+ id = f'{config.name}.{task_name}'
114
+ if task_name.startswith('_group'):
115
+ group_node = TaskNode(task_name, 'group', id, parent=root_node, ancestor=root_node)
116
+ root_node.add_child(group_node)
117
+ for subtask_name, subtask_details in task_details.items():
118
+ subtask_details = subtask_details or {}
119
+ id = f'{config.name}.{subtask_name}'
120
+ condition = subtask_details.get('if')
121
+ description = subtask_details.get('description')
122
+ subtask_node = TaskNode(subtask_name, 'task', id, opts=subtask_details, condition=condition, description=description, parent=group_node, ancestor=root_node) # noqa: E501
123
+ group_node.add_child(subtask_node)
124
+ else:
125
+ condition = task_details.get('if') if task_details else None
126
+ description = task_details.get('description') if task_details else None
127
+ task_node = TaskNode(task_name, 'task', id, opts=task_details, condition=condition, description=description, parent=root_node, ancestor=root_node) # noqa: E501
128
+ root_node.add_child(task_node)
129
+
130
+ elif config.type == 'scan':
131
+ id = f'{config.name}'
132
+ root_node = TaskNode(config.name, 'scan', id, opts=config.options, parent=parent)
133
+ tree.add_root_node(root_node)
134
+
135
+ # Add workflows to the tree
136
+ for workflow_name, workflow_details in config.workflows.items():
137
+ id = f'{config.name}.{workflow_name}'
138
+ condition = workflow_details.get('if') if isinstance(workflow_details, dict) else None
139
+ split_name = workflow_name.split('/')
140
+ wf_name = split_name[0]
141
+ wf_config = TemplateLoader(name=f'workflow/{wf_name}')
142
+ wf_config.name = workflow_name
143
+ wf_tree = build_runner_tree(wf_config, condition, parent=root_node, ancestor=root_node)
144
+ if isinstance(wf_tree, RunnerTree):
145
+ for wf_root_node in wf_tree.root_nodes:
146
+ root_node.add_child(wf_root_node)
147
+
148
+ elif config.type == 'task':
149
+ root_node = TaskNode(config.name, 'task', config.name, opts={}, parent=parent, ancestor=ancestor)
150
+ tree.add_root_node(root_node)
151
+
152
+ return tree
153
+
154
+
155
+ def walk_runner_tree(tree: RunnerTree, visit_func):
156
+ """
157
+ Walk the RunnerTree and visit each node.
158
+
159
+ Args:
160
+ tree (RunnerTree): The RunnerTree to walk.
161
+ visit_func (function): A function to call on each node.
162
+ """
163
+ for root_node in tree.root_nodes:
164
+ _walk_node(root_node, visit_func)
165
+
166
+
167
+ def _walk_node(node: TaskNode, visit_func):
168
+ """
169
+ Recursively walk the node and its children.
170
+
171
+ Args:
172
+ node (TaskNode): The node to walk.
173
+ visit_func (function): A function to call on each node.
174
+ """
175
+ visit_func(node)
176
+ for child in node.children:
177
+ _walk_node(child, visit_func)
178
+
179
+
180
+ def get_flat_node_list(tree: RunnerTree) -> List[TaskNode]:
181
+ """
182
+ Get the flat list of all nodes in the RunnerTree.
183
+
184
+ Args:
185
+ tree (RunnerTree): The RunnerTree to traverse.
186
+
187
+ Returns:
188
+ List[TaskNode]: The list of all nodes in the tree.
189
+ """
190
+ nodes = []
191
+
192
+ def collect_node(node: TaskNode):
193
+ nodes.append(node)
194
+
195
+ walk_runner_tree(tree, collect_node)
196
+ return nodes