secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from secator.output_types import Error
|
|
4
|
+
from secator.utils import deduplicate, debug
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def run_extractors(results, opts, inputs=None, ctx=None, dry_run=False):
|
|
8
|
+
"""Run extractors and merge extracted values with option dict.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
results (list): List of results.
|
|
12
|
+
opts (dict): Options.
|
|
13
|
+
inputs (list): Original inputs.
|
|
14
|
+
ctx (dict): Context.
|
|
15
|
+
dry_run (bool): Dry run.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
tuple: inputs, options, errors.
|
|
19
|
+
"""
|
|
20
|
+
if inputs is None:
|
|
21
|
+
inputs = []
|
|
22
|
+
if ctx is None:
|
|
23
|
+
ctx = {}
|
|
24
|
+
extractors = {k: v for k, v in opts.items() if k.endswith('_')}
|
|
25
|
+
if dry_run:
|
|
26
|
+
input_extractors = {k: v for k, v in extractors.items() if k.rstrip('_') == 'targets'}
|
|
27
|
+
opts_extractors = {k: v for k, v in extractors.items() if k.rstrip('_') != 'targets'}
|
|
28
|
+
if input_extractors:
|
|
29
|
+
dry_inputs = [" && ".join([fmt_extractor(v) for k, val in input_extractors.items() for v in val])]
|
|
30
|
+
else:
|
|
31
|
+
dry_inputs = inputs
|
|
32
|
+
if opts_extractors:
|
|
33
|
+
dry_opts = {k.rstrip('_'): [" && ".join([fmt_extractor(v) for v in val])] for k, val in opts_extractors.items()}
|
|
34
|
+
else:
|
|
35
|
+
dry_opts = {}
|
|
36
|
+
inputs = dry_inputs
|
|
37
|
+
opts.update(dry_opts)
|
|
38
|
+
return inputs, opts, []
|
|
39
|
+
|
|
40
|
+
errors = []
|
|
41
|
+
computed_inputs = []
|
|
42
|
+
input_extractors = False
|
|
43
|
+
computed_opts = {}
|
|
44
|
+
|
|
45
|
+
for key, val in extractors.items():
|
|
46
|
+
key = key.rstrip('_')
|
|
47
|
+
ctx['key'] = key
|
|
48
|
+
values, err = extract_from_results(results, val, ctx=ctx)
|
|
49
|
+
errors.extend(err)
|
|
50
|
+
if key == 'targets':
|
|
51
|
+
input_extractors = True
|
|
52
|
+
targets = deduplicate(values)
|
|
53
|
+
computed_inputs.extend(targets)
|
|
54
|
+
else:
|
|
55
|
+
computed_opt = deduplicate(values)
|
|
56
|
+
if computed_opt:
|
|
57
|
+
computed_opts[key] = computed_opt
|
|
58
|
+
opts[key] = computed_opts[key]
|
|
59
|
+
if input_extractors:
|
|
60
|
+
debug('computed_inputs', obj=computed_inputs, sub='extractors')
|
|
61
|
+
inputs = computed_inputs
|
|
62
|
+
if computed_opts:
|
|
63
|
+
debug('computed_opts', obj=computed_opts, sub='extractors')
|
|
64
|
+
return inputs, opts, errors
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def fmt_extractor(extractor):
|
|
68
|
+
"""Format extractor.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
extractor (dict / str): extractor definition.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
str: formatted extractor.
|
|
75
|
+
"""
|
|
76
|
+
parsed_extractor = parse_extractor(extractor)
|
|
77
|
+
if not parsed_extractor:
|
|
78
|
+
return '<DYNAMIC[INVALID_EXTRACTOR]>'
|
|
79
|
+
_type, _field, _condition = parsed_extractor
|
|
80
|
+
s = f'{_type}.{_field}'
|
|
81
|
+
if _condition:
|
|
82
|
+
_condition = _condition.replace("'", '').replace('"', '')
|
|
83
|
+
s = f'{s} if {_condition}'
|
|
84
|
+
return f'<DYNAMIC({s})>'
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def extract_from_results(results, extractors, ctx=None):
|
|
88
|
+
"""Extract sub extractors from list of results dict.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
results (list): List of dict.
|
|
92
|
+
extractors (list): List of extractors to extract from.
|
|
93
|
+
ctx (dict, optional): Context.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
tuple: List of extracted results (flat), list of errors.
|
|
97
|
+
"""
|
|
98
|
+
if ctx is None:
|
|
99
|
+
ctx = {}
|
|
100
|
+
all_results = []
|
|
101
|
+
errors = []
|
|
102
|
+
key = ctx.get('key', 'unknown')
|
|
103
|
+
ancestor_id = ctx.get('ancestor_id', None)
|
|
104
|
+
if not isinstance(extractors, list):
|
|
105
|
+
extractors = [extractors]
|
|
106
|
+
for extractor in extractors:
|
|
107
|
+
try:
|
|
108
|
+
extractor_results = process_extractor(results, extractor, ctx=ctx)
|
|
109
|
+
msg = f'extracted [bold]{len(extractor_results)}[/] / [bold]{len(results)}[/] for key [bold]{key}[/] with extractor [bold]{fmt_extractor(extractor)}[/]' # noqa: E501
|
|
110
|
+
if ancestor_id:
|
|
111
|
+
msg = f'{msg} ([bold]ancestor_id[/]: {ancestor_id})'
|
|
112
|
+
debug(msg, sub='extractors')
|
|
113
|
+
all_results.extend(extractor_results)
|
|
114
|
+
except Exception as e:
|
|
115
|
+
error = Error.from_exception(e)
|
|
116
|
+
errors.append(error)
|
|
117
|
+
if key == 'targets':
|
|
118
|
+
ctx['targets'] = all_results
|
|
119
|
+
return all_results, errors
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def parse_extractor(extractor):
|
|
123
|
+
"""Parse extractor.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
extractor (dict / str): extractor definition.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
tuple|None: type, field, condition or None if invalid.
|
|
130
|
+
"""
|
|
131
|
+
# Parse extractor, it can be a dict or a string (shortcut)
|
|
132
|
+
if isinstance(extractor, dict):
|
|
133
|
+
_type = extractor['type']
|
|
134
|
+
_field = extractor.get('field')
|
|
135
|
+
_condition = extractor.get('condition')
|
|
136
|
+
else:
|
|
137
|
+
parts = tuple(extractor.split('.'))
|
|
138
|
+
if len(parts) == 2:
|
|
139
|
+
_type = parts[0]
|
|
140
|
+
_field = parts[1]
|
|
141
|
+
_condition = None
|
|
142
|
+
else:
|
|
143
|
+
return None
|
|
144
|
+
return _type, _field, _condition
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def process_extractor(results, extractor, ctx=None):
|
|
148
|
+
"""Process extractor.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
results (list): List of results.
|
|
152
|
+
extractor (dict / str): extractor definition.
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
list: List of extracted results.
|
|
156
|
+
"""
|
|
157
|
+
if ctx is None:
|
|
158
|
+
ctx = {}
|
|
159
|
+
# debug('before extract', obj={'results_count': len(results), 'extractor': extractor, 'key': ctx.get('key')}, sub='extractor') # noqa: E501
|
|
160
|
+
ancestor_id = ctx.get('ancestor_id')
|
|
161
|
+
key = ctx.get('key')
|
|
162
|
+
|
|
163
|
+
# Parse extractor, it can be a dict or a string (shortcut)
|
|
164
|
+
parsed_extractor = parse_extractor(extractor)
|
|
165
|
+
if not parsed_extractor:
|
|
166
|
+
return results
|
|
167
|
+
_type, _field, _condition = parsed_extractor
|
|
168
|
+
|
|
169
|
+
# Evaluate condition for each result
|
|
170
|
+
if _condition:
|
|
171
|
+
tmp_results = []
|
|
172
|
+
if ancestor_id:
|
|
173
|
+
_condition = _condition + f' and item._context.get("ancestor_id") == "{str(ancestor_id)}"'
|
|
174
|
+
for item in results:
|
|
175
|
+
if item._type != _type:
|
|
176
|
+
continue
|
|
177
|
+
ctx['item'] = item
|
|
178
|
+
ctx[f'{_type}'] = item
|
|
179
|
+
safe_globals = {'__builtins__': {'len': len}}
|
|
180
|
+
eval_result = eval(_condition, safe_globals, ctx)
|
|
181
|
+
if eval_result:
|
|
182
|
+
tmp_results.append(item)
|
|
183
|
+
del ctx['item']
|
|
184
|
+
del ctx[f'{_type}']
|
|
185
|
+
# debug(f'kept {len(tmp_results)} / {len(results)} items after condition [bold]{_condition}[/bold]', sub='extractor') # noqa: E501
|
|
186
|
+
results = tmp_results
|
|
187
|
+
else:
|
|
188
|
+
results = [item for item in results if item._type == _type]
|
|
189
|
+
if ancestor_id:
|
|
190
|
+
results = [item for item in results if item._context.get('ancestor_id') == ancestor_id]
|
|
191
|
+
|
|
192
|
+
results_str = "\n".join([f'{repr(item)} [{str(item._context.get("ancestor_id", ""))}]' for item in results])
|
|
193
|
+
debug(f'extracted results ([bold]ancestor_id[/]: {ancestor_id}, [bold]key[/]: {key}):\n{results_str}', sub='extractor')
|
|
194
|
+
|
|
195
|
+
# Format field if needed
|
|
196
|
+
if _field:
|
|
197
|
+
already_formatted = '{' in _field and '}' in _field
|
|
198
|
+
_field = '{' + _field + '}' if not already_formatted else _field
|
|
199
|
+
results = [_field.format(**item.toDict()) for item in results]
|
|
200
|
+
# debug('after extract', obj={'results_count': len(results), 'key': ctx.get('key')}, sub='extractor')
|
|
201
|
+
return results
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def get_task_folder_id(path):
|
|
205
|
+
names = []
|
|
206
|
+
if not os.path.exists(path):
|
|
207
|
+
return 0
|
|
208
|
+
for f in os.scandir(path):
|
|
209
|
+
if f.is_dir():
|
|
210
|
+
try:
|
|
211
|
+
int(f.name)
|
|
212
|
+
names.append(int(f.name))
|
|
213
|
+
except ValueError:
|
|
214
|
+
continue
|
|
215
|
+
names.sort()
|
|
216
|
+
if names:
|
|
217
|
+
return names[-1] + 1
|
|
218
|
+
return 0
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from secator.celery_utils import CeleryData
|
|
2
|
+
from secator.runners import Runner
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Celery(Runner):
|
|
6
|
+
def yielder(self):
|
|
7
|
+
if not self.celery_result:
|
|
8
|
+
result = self.build_celery_workflow()
|
|
9
|
+
if self.sync:
|
|
10
|
+
yield from result.apply().get()
|
|
11
|
+
yield from CeleryData.iter_results(
|
|
12
|
+
self.celery_result,
|
|
13
|
+
ids_map=self.celery_ids_map,
|
|
14
|
+
print_remote_info=False
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
def error_handler(self, e):
|
|
18
|
+
self.stop_celery_tasks()
|