secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
secator/config.py
ADDED
|
@@ -0,0 +1,670 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from subprocess import call, DEVNULL
|
|
4
|
+
from typing import Dict, List
|
|
5
|
+
from typing_extensions import Annotated, Self
|
|
6
|
+
|
|
7
|
+
import validators
|
|
8
|
+
import requests
|
|
9
|
+
import shutil
|
|
10
|
+
import yaml
|
|
11
|
+
from dotenv import find_dotenv, load_dotenv
|
|
12
|
+
from dotmap import DotMap
|
|
13
|
+
from pydantic import AfterValidator, BaseModel, model_validator, ValidationError
|
|
14
|
+
|
|
15
|
+
from secator.rich import console, console_stdout
|
|
16
|
+
|
|
17
|
+
load_dotenv(find_dotenv(usecwd=True), override=False)
|
|
18
|
+
|
|
19
|
+
Directory = Annotated[Path, AfterValidator(lambda v: v.expanduser())]
|
|
20
|
+
StrExpandHome = Annotated[str, AfterValidator(lambda v: v.replace('~', str(Path.home())))]
|
|
21
|
+
|
|
22
|
+
ROOT_FOLDER = Path(__file__).parent.parent
|
|
23
|
+
LIB_FOLDER = ROOT_FOLDER / 'secator'
|
|
24
|
+
CONFIGS_FOLDER = LIB_FOLDER / 'configs'
|
|
25
|
+
DATA_FOLDER = os.environ.get('SECATOR_DIRS_DATA') or str(Path.home() / '.secator')
|
|
26
|
+
|
|
27
|
+
USER_AGENTS = {
|
|
28
|
+
'chrome_134.0_win10': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36', # noqa: E501
|
|
29
|
+
'chrome_134.0_macos': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36', # noqa: E501
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class StrictModel(BaseModel, extra='forbid'):
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class Directories(StrictModel):
|
|
38
|
+
bin: Directory = Path.home() / '.local' / 'bin'
|
|
39
|
+
share: Directory = Path.home() / '.local' / 'share'
|
|
40
|
+
data: Directory = Path(DATA_FOLDER)
|
|
41
|
+
templates: Directory = ''
|
|
42
|
+
reports: Directory = ''
|
|
43
|
+
wordlists: Directory = ''
|
|
44
|
+
cves: Directory = ''
|
|
45
|
+
payloads: Directory = ''
|
|
46
|
+
performance: Directory = ''
|
|
47
|
+
revshells: Directory = ''
|
|
48
|
+
celery: Directory = ''
|
|
49
|
+
celery_data: Directory = ''
|
|
50
|
+
celery_results: Directory = ''
|
|
51
|
+
|
|
52
|
+
@model_validator(mode='after')
|
|
53
|
+
def set_default_folders(self) -> Self:
|
|
54
|
+
"""Set folders to be relative to the data folders if they are unspecified in config."""
|
|
55
|
+
for folder in ['templates', 'reports', 'wordlists', 'cves', 'payloads', 'performance', 'revshells', 'celery', 'celery_data', 'celery_results']: # noqa: E501
|
|
56
|
+
rel_target = '/'.join(folder.split('_'))
|
|
57
|
+
val = getattr(self, folder) or self.data / rel_target
|
|
58
|
+
setattr(self, folder, val)
|
|
59
|
+
return self
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class Celery(StrictModel):
|
|
63
|
+
broker_url: str = 'filesystem://'
|
|
64
|
+
broker_pool_limit: int = 10
|
|
65
|
+
broker_connection_timeout: float = 4.0
|
|
66
|
+
broker_visibility_timeout: int = 3600
|
|
67
|
+
broker_transport_options: str = ""
|
|
68
|
+
override_default_logging: bool = True
|
|
69
|
+
result_backend: StrExpandHome = ''
|
|
70
|
+
result_backend_transport_options: str = ""
|
|
71
|
+
result_expires: int = 86400 # 1 day
|
|
72
|
+
task_acks_late: bool = False
|
|
73
|
+
task_send_sent_event: bool = False
|
|
74
|
+
task_reject_on_worker_lost: bool = False
|
|
75
|
+
task_max_timeout: int = -1
|
|
76
|
+
task_memory_limit_mb: int = -1
|
|
77
|
+
worker_max_tasks_per_child: int = 20
|
|
78
|
+
worker_prefetch_multiplier: int = 1
|
|
79
|
+
worker_send_task_events: bool = False
|
|
80
|
+
worker_kill_after_task: bool = False
|
|
81
|
+
worker_kill_after_idle_seconds: int = -1
|
|
82
|
+
worker_command_verbose: bool = False
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class Cli(StrictModel):
|
|
86
|
+
github_token: str = os.environ.get('GITHUB_TOKEN', '')
|
|
87
|
+
record: bool = False
|
|
88
|
+
stdin_timeout: int = 1000
|
|
89
|
+
show_http_response_headers: bool = False
|
|
90
|
+
show_command_output: bool = False
|
|
91
|
+
exclude_http_response_headers: List[str] = ["connection", "content_type", "content_length", "date", "server"]
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class Runners(StrictModel):
|
|
95
|
+
input_chunk_size: int = 100
|
|
96
|
+
progress_update_frequency: int = 20
|
|
97
|
+
stat_update_frequency: int = 20
|
|
98
|
+
backend_update_frequency: int = 5
|
|
99
|
+
poll_frequency: int = 5
|
|
100
|
+
skip_cve_search: bool = False
|
|
101
|
+
skip_exploit_search: bool = False
|
|
102
|
+
skip_cve_low_confidence: bool = False
|
|
103
|
+
remove_duplicates: bool = False
|
|
104
|
+
force_tty: bool = False
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class Security(StrictModel):
|
|
108
|
+
allow_local_file_access: bool = True
|
|
109
|
+
auto_install_commands: bool = True
|
|
110
|
+
force_source_install: bool = False
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class HTTP(StrictModel):
|
|
114
|
+
socks5_proxy: str = 'socks5://127.0.0.1:9050'
|
|
115
|
+
http_proxy: str = 'https://127.0.0.1:9080'
|
|
116
|
+
store_responses: bool = True
|
|
117
|
+
response_max_size_bytes: int = 100000 # 100MB
|
|
118
|
+
proxychains_command: str = 'proxychains'
|
|
119
|
+
freeproxy_timeout: int = 1
|
|
120
|
+
default_header: str = 'User-Agent: ' + USER_AGENTS['chrome_134.0_win10']
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class Tasks(StrictModel):
|
|
124
|
+
exporters: List[str] = ['json', 'csv', 'txt']
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class Workflows(StrictModel):
|
|
128
|
+
exporters: List[str] = ['json', 'csv', 'txt']
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
class Scans(StrictModel):
|
|
132
|
+
exporters: List[str] = ['json', 'csv', 'txt']
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class Profiles(StrictModel):
|
|
136
|
+
defaults: List[str] = []
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class Drivers(StrictModel):
|
|
140
|
+
defaults: List[str] = []
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class Payloads(StrictModel):
|
|
144
|
+
templates: Dict[str, str] = {
|
|
145
|
+
'lse': 'https://github.com/diego-treitos/linux-smart-enumeration/releases/latest/download/lse.sh',
|
|
146
|
+
'linpeas': 'https://github.com/carlospolop/PEASS-ng/releases/latest/download/linpeas.sh',
|
|
147
|
+
'sudo_killer': 'https://github.com/TH3xACE/SUDO_KILLER/archive/refs/heads/V3.zip'
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class Wordlists(StrictModel):
|
|
152
|
+
defaults: Dict[str, str] = {'http': 'bo0m_fuzz', 'dns': 'combined_subdomains', 'http_params': 'burp-parameter-names'}
|
|
153
|
+
templates: Dict[str, str] = {
|
|
154
|
+
'bo0m_fuzz': 'https://raw.githubusercontent.com/Bo0oM/fuzz.txt/master/fuzz.txt',
|
|
155
|
+
'combined_subdomains': 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt', # noqa: E501
|
|
156
|
+
'directory_list_small': 'https://gist.githubusercontent.com/sl4v/c087e36164e74233514b/raw/c51a811c70bbdd87f4725521420cc30e7232b36d/directory-list-2.3-small.txt', # noqa: E501
|
|
157
|
+
'burp-parameter-names': 'https://raw.githubusercontent.com/danielmiessler/SecLists/refs/heads/master/Discovery/Web-Content/burp-parameter-names.txt', # noqa: E501
|
|
158
|
+
}
|
|
159
|
+
lists: Dict[str, List[str]] = {}
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
class GoogleDriveAddon(StrictModel):
|
|
163
|
+
enabled: bool = False
|
|
164
|
+
drive_parent_folder_id: str = ''
|
|
165
|
+
credentials_path: str = ''
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class GoogleCloudStorageAddon(StrictModel):
|
|
169
|
+
enabled: bool = False
|
|
170
|
+
bucket_name: str = ''
|
|
171
|
+
credentials_path: str = ''
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class WorkerAddon(StrictModel):
|
|
175
|
+
enabled: bool = False
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class MongodbAddon(StrictModel):
|
|
179
|
+
enabled: bool = False
|
|
180
|
+
url: str = 'mongodb://localhost'
|
|
181
|
+
update_frequency: int = 60
|
|
182
|
+
max_pool_size: int = 10
|
|
183
|
+
server_selection_timeout_ms: int = 5000
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
class Addons(StrictModel):
|
|
187
|
+
gdrive: GoogleDriveAddon = GoogleDriveAddon()
|
|
188
|
+
gcs: GoogleCloudStorageAddon = GoogleCloudStorageAddon()
|
|
189
|
+
worker: WorkerAddon = WorkerAddon()
|
|
190
|
+
mongodb: MongodbAddon = MongodbAddon()
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class SecatorConfig(StrictModel):
|
|
194
|
+
debug: str = ''
|
|
195
|
+
dirs: Directories = Directories()
|
|
196
|
+
celery: Celery = Celery()
|
|
197
|
+
cli: Cli = Cli()
|
|
198
|
+
runners: Runners = Runners()
|
|
199
|
+
http: HTTP = HTTP()
|
|
200
|
+
tasks: Tasks = Tasks()
|
|
201
|
+
workflows: Workflows = Workflows()
|
|
202
|
+
scans: Scans = Scans()
|
|
203
|
+
payloads: Payloads = Payloads()
|
|
204
|
+
wordlists: Wordlists = Wordlists()
|
|
205
|
+
profiles: Profiles = Profiles()
|
|
206
|
+
drivers: Drivers = Drivers()
|
|
207
|
+
addons: Addons = Addons()
|
|
208
|
+
security: Security = Security()
|
|
209
|
+
offline_mode: bool = False
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class Config(DotMap):
|
|
213
|
+
"""Config class.
|
|
214
|
+
|
|
215
|
+
Examples:
|
|
216
|
+
>>> config = Config.parse() # get default config.
|
|
217
|
+
>>> config = Config.parse({'dirs': {'data': '/tmp/'}) # get custom config (from dict).
|
|
218
|
+
>>> config = Config.parse(path='/path/to/config.yml') # get custom config (from YAML file).
|
|
219
|
+
>>> config.print() # print config without defaults.
|
|
220
|
+
>>> config.print(partial=False) # print full config.
|
|
221
|
+
>>> config.set('addons.gdrive.enabled', False) # set value in config.
|
|
222
|
+
>>> config.save() # save config back to disk.
|
|
223
|
+
"""
|
|
224
|
+
|
|
225
|
+
_error = False
|
|
226
|
+
|
|
227
|
+
def get(self, key=None, print=True):
|
|
228
|
+
"""Retrieve a value from the configuration using a dotted path.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
key (str | None): Dotted key path.
|
|
232
|
+
print (bool): Print the resulting value.
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
Any: value at key.
|
|
236
|
+
"""
|
|
237
|
+
value = self
|
|
238
|
+
if key:
|
|
239
|
+
for part in key.split('.'):
|
|
240
|
+
value = value[part]
|
|
241
|
+
if value is None:
|
|
242
|
+
console.print(f'[bold red]Key {key} does not exist.[/]')
|
|
243
|
+
return None
|
|
244
|
+
if print:
|
|
245
|
+
if key:
|
|
246
|
+
yaml_str = Config.dump(DotMap({key: value}), partial=False)
|
|
247
|
+
else:
|
|
248
|
+
yaml_str = Config.dump(self, partial=False)
|
|
249
|
+
Config.print_yaml(yaml_str)
|
|
250
|
+
return value
|
|
251
|
+
|
|
252
|
+
def set(self, key, value, set_partial=True):
|
|
253
|
+
"""Set a value in the configuration using a dotted path.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
key (str | None): Dotted key path.
|
|
257
|
+
value (Any): Value.
|
|
258
|
+
set_partial (bool): Set in partial config.
|
|
259
|
+
"""
|
|
260
|
+
# Get existing value
|
|
261
|
+
existing_value = self.get(key, print=False)
|
|
262
|
+
|
|
263
|
+
# Convert dotted key path to the corresponding uppercase key used in _keymap
|
|
264
|
+
map_key = key.upper().replace('.', '_')
|
|
265
|
+
|
|
266
|
+
# Check if map key exists
|
|
267
|
+
if map_key not in self._keymap:
|
|
268
|
+
console.print(f'[bold red]Key "{key}" not found in config keymap[/].')
|
|
269
|
+
return
|
|
270
|
+
|
|
271
|
+
# Traverse to the second last key to handle the setting correctly
|
|
272
|
+
target = self
|
|
273
|
+
partial = self._partial
|
|
274
|
+
for part in self._keymap[map_key][:-1]:
|
|
275
|
+
target = target[part]
|
|
276
|
+
partial = partial[part]
|
|
277
|
+
|
|
278
|
+
# Set the value on the final part of the path
|
|
279
|
+
final_key = self._keymap[map_key][-1]
|
|
280
|
+
|
|
281
|
+
# Try to convert value to expected type
|
|
282
|
+
try:
|
|
283
|
+
if isinstance(existing_value, list):
|
|
284
|
+
if isinstance(value, str):
|
|
285
|
+
if value.startswith('[') and value.endswith(']'):
|
|
286
|
+
value = value[1:-1]
|
|
287
|
+
if ',' in value:
|
|
288
|
+
value = [c.strip() for c in value.split(',')]
|
|
289
|
+
elif value:
|
|
290
|
+
value = [value]
|
|
291
|
+
else:
|
|
292
|
+
value = []
|
|
293
|
+
elif isinstance(existing_value, dict):
|
|
294
|
+
if isinstance(value, str):
|
|
295
|
+
if value.startswith('{') and value.endswith('}'):
|
|
296
|
+
import json
|
|
297
|
+
value = json.loads(value)
|
|
298
|
+
elif isinstance(existing_value, bool):
|
|
299
|
+
if isinstance(value, str):
|
|
300
|
+
value = value.lower() in ("true", "1", "t")
|
|
301
|
+
elif isinstance(value, (int, float)):
|
|
302
|
+
value = True if value == 1 else False
|
|
303
|
+
elif isinstance(existing_value, int):
|
|
304
|
+
value = int(value)
|
|
305
|
+
elif isinstance(existing_value, float):
|
|
306
|
+
value = float(value)
|
|
307
|
+
elif isinstance(existing_value, Path):
|
|
308
|
+
value = Path(value)
|
|
309
|
+
except ValueError:
|
|
310
|
+
pass
|
|
311
|
+
finally:
|
|
312
|
+
if set_partial:
|
|
313
|
+
if value is None or value == target[final_key]:
|
|
314
|
+
if final_key in partial:
|
|
315
|
+
del partial[final_key]
|
|
316
|
+
return
|
|
317
|
+
else:
|
|
318
|
+
partial[final_key] = value
|
|
319
|
+
target[final_key] = value
|
|
320
|
+
|
|
321
|
+
def unset(self, key, set_partial=True):
|
|
322
|
+
"""Unset a value in the configuration using a dotted path.
|
|
323
|
+
|
|
324
|
+
Args:
|
|
325
|
+
key (str): Dotted key path.
|
|
326
|
+
set_partial (bool): Set in partial config.
|
|
327
|
+
"""
|
|
328
|
+
self.set(key, None, set_partial=set_partial)
|
|
329
|
+
|
|
330
|
+
def save(self, target_path: Path = None, partial=True):
|
|
331
|
+
"""Save config as YAML on disk.
|
|
332
|
+
|
|
333
|
+
Args:
|
|
334
|
+
target_path (Path | None): If passed, saves the config to this path.
|
|
335
|
+
partial (bool): Save partial config.
|
|
336
|
+
"""
|
|
337
|
+
if not target_path:
|
|
338
|
+
if not self._path:
|
|
339
|
+
return
|
|
340
|
+
target_path = self._path
|
|
341
|
+
with target_path.open('w') as f:
|
|
342
|
+
f.write(Config.dump(self, partial=partial))
|
|
343
|
+
self._path = target_path
|
|
344
|
+
|
|
345
|
+
def print(self, partial=True):
|
|
346
|
+
"""Print config.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
partial (bool): Print partial config only.
|
|
350
|
+
"""
|
|
351
|
+
yaml_str = self.dump(self, partial=partial)
|
|
352
|
+
yaml_str = f'# {self._path}\n\n{yaml_str}' if self._path and partial else yaml_str
|
|
353
|
+
Config.print_yaml(yaml_str)
|
|
354
|
+
|
|
355
|
+
@staticmethod
|
|
356
|
+
def parse(data: dict = {}, path: Path = None, print_errors: bool = True):
|
|
357
|
+
"""Parse config.
|
|
358
|
+
|
|
359
|
+
Args:
|
|
360
|
+
data (dict): Config data.
|
|
361
|
+
path (Path | None): Path to YAML config.
|
|
362
|
+
print_errors (bool): Print validation errors to console.
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
Config: instance of Config object.
|
|
366
|
+
None: if the config was not loaded properly or there are validation errors.
|
|
367
|
+
"""
|
|
368
|
+
# Load YAML file
|
|
369
|
+
if path:
|
|
370
|
+
data = Config.read_yaml(path)
|
|
371
|
+
|
|
372
|
+
# Load data
|
|
373
|
+
config = Config.load(SecatorConfig, data, print_errors=print_errors)
|
|
374
|
+
valid = config is not None
|
|
375
|
+
if not valid:
|
|
376
|
+
return None
|
|
377
|
+
|
|
378
|
+
# Set extras
|
|
379
|
+
config.set_extras(data, path)
|
|
380
|
+
|
|
381
|
+
# Override config values with environment variables
|
|
382
|
+
config.apply_env_overrides(print_errors=print_errors)
|
|
383
|
+
|
|
384
|
+
# Validate config
|
|
385
|
+
config.validate(print_errors=print_errors)
|
|
386
|
+
|
|
387
|
+
return config
|
|
388
|
+
|
|
389
|
+
def validate(self, print_errors=True):
|
|
390
|
+
"""Validate config."""
|
|
391
|
+
return Config.load(
|
|
392
|
+
SecatorConfig,
|
|
393
|
+
data=self._partial.toDict(),
|
|
394
|
+
print_errors=print_errors)
|
|
395
|
+
|
|
396
|
+
def set_extras(self, original_data, original_path):
|
|
397
|
+
"""Set extra useful values in config.
|
|
398
|
+
|
|
399
|
+
Args:
|
|
400
|
+
original_data (data): Original dict data.
|
|
401
|
+
original_path (pathlib.Path): Original YAML path.
|
|
402
|
+
valid (bool): Boolean indicating if config is valid or not.
|
|
403
|
+
"""
|
|
404
|
+
self._path = original_path
|
|
405
|
+
self._partial = Config(original_data)
|
|
406
|
+
self._keymap = Config.build_key_map(self)
|
|
407
|
+
|
|
408
|
+
# HACK: set default result_backend if unset
|
|
409
|
+
if not self.celery.result_backend:
|
|
410
|
+
self.celery.result_backend = f'file://{self.dirs.celery_results}'
|
|
411
|
+
|
|
412
|
+
@staticmethod
|
|
413
|
+
def load(schema, data: dict = {}, print_errors=True):
|
|
414
|
+
"""Validate a config using Pydantic.
|
|
415
|
+
|
|
416
|
+
Args:
|
|
417
|
+
schema (pydantic.Schema): Pydantic schema.
|
|
418
|
+
data (dict): Input data.
|
|
419
|
+
print_errors (bool): Print validation errors.
|
|
420
|
+
|
|
421
|
+
Returns:
|
|
422
|
+
Config|None: instance of Config object or None if invalid.
|
|
423
|
+
"""
|
|
424
|
+
try:
|
|
425
|
+
return Config(schema(**data).model_dump())
|
|
426
|
+
except ValidationError as e:
|
|
427
|
+
if print_errors:
|
|
428
|
+
error_str = str(e).replace('\n', '\n ')
|
|
429
|
+
console.print(f'[bold red]:x: {error_str}')
|
|
430
|
+
return None
|
|
431
|
+
|
|
432
|
+
@staticmethod
|
|
433
|
+
def read_yaml(yaml_path):
|
|
434
|
+
"""Read YAML from path.
|
|
435
|
+
|
|
436
|
+
Args:
|
|
437
|
+
yaml_path (Path): path to yaml config.
|
|
438
|
+
|
|
439
|
+
Returns:
|
|
440
|
+
dict: Loaded data.
|
|
441
|
+
"""
|
|
442
|
+
if not yaml_path.exists():
|
|
443
|
+
console.print(f'[bold red]Config not found: {yaml_path}.[/]')
|
|
444
|
+
return {}
|
|
445
|
+
try:
|
|
446
|
+
with yaml_path.open('r') as f:
|
|
447
|
+
data = yaml.load(f.read(), Loader=yaml.Loader)
|
|
448
|
+
return data or {}
|
|
449
|
+
except yaml.YAMLError as e:
|
|
450
|
+
console.print(f'[bold red]:x: Error loading {yaml_path} {str(e)}')
|
|
451
|
+
return {}
|
|
452
|
+
|
|
453
|
+
@staticmethod
|
|
454
|
+
def print_yaml(string):
|
|
455
|
+
"""Print YAML string using rich.
|
|
456
|
+
|
|
457
|
+
Args:
|
|
458
|
+
string (str): YAML string.
|
|
459
|
+
"""
|
|
460
|
+
from rich.syntax import Syntax
|
|
461
|
+
data = Syntax(string, 'yaml', theme='ansi-dark', padding=0, background_color='default')
|
|
462
|
+
console_stdout.print(data)
|
|
463
|
+
|
|
464
|
+
@staticmethod
|
|
465
|
+
def dump(config, partial=True):
|
|
466
|
+
"""Safe dump config as yaml:
|
|
467
|
+
- `Path`, `PosixPath` and `WindowsPath` objects are translated to strings.
|
|
468
|
+
- Home directory in paths is replaced with the tilde '~'.
|
|
469
|
+
|
|
470
|
+
Returns:
|
|
471
|
+
str: YAML dump.
|
|
472
|
+
"""
|
|
473
|
+
import yaml
|
|
474
|
+
from pathlib import Path, PosixPath, WindowsPath
|
|
475
|
+
|
|
476
|
+
# Get home dir
|
|
477
|
+
home = str(Path.home())
|
|
478
|
+
|
|
479
|
+
# Custom dumper to add line breaks between items and a path representer to translate paths to strings
|
|
480
|
+
class LineBreakDumper(yaml.SafeDumper):
|
|
481
|
+
def write_line_break(self, data=None):
|
|
482
|
+
super().write_line_break(data)
|
|
483
|
+
if len(self.indents) == 1:
|
|
484
|
+
super().write_line_break()
|
|
485
|
+
|
|
486
|
+
def posix_path_representer(dumper, data):
|
|
487
|
+
path = str(data)
|
|
488
|
+
if path.startswith(home):
|
|
489
|
+
path = path.replace(home, '~')
|
|
490
|
+
return dumper.represent_scalar('tag:yaml.org,2002:str', path)
|
|
491
|
+
|
|
492
|
+
LineBreakDumper.add_representer(str, posix_path_representer)
|
|
493
|
+
LineBreakDumper.add_representer(Path, posix_path_representer)
|
|
494
|
+
LineBreakDumper.add_representer(PosixPath, posix_path_representer)
|
|
495
|
+
LineBreakDumper.add_representer(WindowsPath, posix_path_representer)
|
|
496
|
+
|
|
497
|
+
# Get data dict
|
|
498
|
+
data = config.toDict()
|
|
499
|
+
|
|
500
|
+
# HACK: Replace home dir in result_backend
|
|
501
|
+
if isinstance(config, Config):
|
|
502
|
+
data['celery']['result_backend'] = data['celery']['result_backend'].replace(home, '~')
|
|
503
|
+
del data['_path']
|
|
504
|
+
if partial:
|
|
505
|
+
data = data['_partial']
|
|
506
|
+
else:
|
|
507
|
+
del data['_partial']
|
|
508
|
+
|
|
509
|
+
data = {k: v for k, v in data.items() if not k.startswith('_')}
|
|
510
|
+
return yaml.dump(data, Dumper=LineBreakDumper, sort_keys=False)
|
|
511
|
+
|
|
512
|
+
@staticmethod
|
|
513
|
+
def build_key_map(config, base_path=[]):
|
|
514
|
+
key_map = {}
|
|
515
|
+
for key, value in config.items():
|
|
516
|
+
if key.startswith('_'): # ignore
|
|
517
|
+
continue
|
|
518
|
+
current_path = base_path + [key]
|
|
519
|
+
if isinstance(value, dict):
|
|
520
|
+
key_map.update(Config.build_key_map(value, current_path))
|
|
521
|
+
else:
|
|
522
|
+
key_map['_'.join(current_path).upper()] = current_path
|
|
523
|
+
return key_map
|
|
524
|
+
|
|
525
|
+
def apply_env_overrides(self, print_errors=True):
|
|
526
|
+
"""Override config values from environment variables."""
|
|
527
|
+
prefix = "SECATOR_"
|
|
528
|
+
for var in os.environ:
|
|
529
|
+
if var.startswith(prefix):
|
|
530
|
+
key = var[len(prefix):] # remove prefix
|
|
531
|
+
if key in self._keymap:
|
|
532
|
+
path = '.'.join(k.lower() for k in self._keymap[key])
|
|
533
|
+
value = os.environ[var]
|
|
534
|
+
self.set(path, value, set_partial=False)
|
|
535
|
+
if not self.validate(print_errors=False) and print_errors:
|
|
536
|
+
console.print(f'[bold red]{var} (override failed)[/]')
|
|
537
|
+
# elif print_errors:
|
|
538
|
+
# console.print(f'[bold red]{var} (override failed: key not found)[/]')
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
def download_files(data: dict, target_folder: Path, offline_mode: bool, type: str):
|
|
542
|
+
"""Download remote files to target folder, clone git repos, or symlink local files.
|
|
543
|
+
|
|
544
|
+
Args:
|
|
545
|
+
data (dict): Dict of name to url or local path prefixed with 'git+' for Git repos.
|
|
546
|
+
target_folder (Path): Target folder for storing files or repos.
|
|
547
|
+
type (str): Type of files to handle.
|
|
548
|
+
offline_mode (bool): Offline mode.
|
|
549
|
+
"""
|
|
550
|
+
for name, url_or_path in data.items():
|
|
551
|
+
target_path = download_file(url_or_path, target_folder, offline_mode, type, name=name)
|
|
552
|
+
if target_path:
|
|
553
|
+
data[name] = target_path
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
def download_file(url_or_path, target_folder: Path, offline_mode: bool, type: str, name: str = None):
|
|
557
|
+
"""Download remote file to target folder, clone git repos, or symlink local files.
|
|
558
|
+
|
|
559
|
+
Args:
|
|
560
|
+
data (dict): Dict of name to url or local path prefixed with 'git+' for Git repos.
|
|
561
|
+
target_folder (Path): Target folder for storing files or repos.
|
|
562
|
+
offline_mode (bool): Offline mode.
|
|
563
|
+
type (str): Type of files to handle.
|
|
564
|
+
name (str, Optional): Name of object.
|
|
565
|
+
|
|
566
|
+
Returns:
|
|
567
|
+
path (Path): Path to downloaded file / folder.
|
|
568
|
+
"""
|
|
569
|
+
from secator.output_types import Info, Error
|
|
570
|
+
if url_or_path.startswith('git+'):
|
|
571
|
+
# Clone Git repository
|
|
572
|
+
git_url = url_or_path[4:] # remove 'git+' prefix
|
|
573
|
+
repo_name = git_url.split('/')[-1]
|
|
574
|
+
if repo_name.endswith('.git'):
|
|
575
|
+
repo_name = repo_name[:-4]
|
|
576
|
+
target_path = target_folder / repo_name
|
|
577
|
+
if not target_path.exists():
|
|
578
|
+
console.print(repr(Info(message=f'[bold turquoise4]Cloning git {type} [bold magenta]{repo_name}[/] ...[/] ')), highlight=False, end='') # noqa: E501
|
|
579
|
+
if offline_mode:
|
|
580
|
+
console.print('[bold orange1]skipped [dim][offline[/].[/]')
|
|
581
|
+
return
|
|
582
|
+
try:
|
|
583
|
+
call(['git', 'clone', git_url, str(target_path)], stderr=DEVNULL, stdout=DEVNULL)
|
|
584
|
+
console.print('[bold green]ok.[/]')
|
|
585
|
+
except Exception as e:
|
|
586
|
+
error = Error.from_exception(e)
|
|
587
|
+
console.print(f'[bold red]failed ({str(e)}).[/]')
|
|
588
|
+
console.print(error)
|
|
589
|
+
return target_path.resolve()
|
|
590
|
+
elif Path(url_or_path).exists():
|
|
591
|
+
# Move local file to target folder
|
|
592
|
+
local_path = Path(url_or_path)
|
|
593
|
+
target_path = target_folder / local_path.name
|
|
594
|
+
if not name:
|
|
595
|
+
name = url_or_path.split('/')[-1]
|
|
596
|
+
try:
|
|
597
|
+
local_path.resolve().relative_to(CONFIG.dirs.data.resolve())
|
|
598
|
+
except ValueError:
|
|
599
|
+
if not CONFIG.security.allow_local_file_access:
|
|
600
|
+
console.print(Error(message=f'File {local_path.resolve()} is not in {CONFIG.dirs.data} and security.allow_local_file_access is disabled.')) # noqa: E501
|
|
601
|
+
return None
|
|
602
|
+
from secator.output_types import Info
|
|
603
|
+
console.print(repr(Info(message=f'[bold turquoise4]Copying {type} [bold magenta]{name}[/] to {target_folder} ...[/] ')), highlight=False, end='') # noqa: E501
|
|
604
|
+
shutil.copyfile(local_path, target_folder / name)
|
|
605
|
+
target_path = target_folder / local_path.name
|
|
606
|
+
console.print('[bold green]ok.[/]')
|
|
607
|
+
return target_path.resolve()
|
|
608
|
+
elif validators.url(url_or_path):
|
|
609
|
+
# Download file from URL
|
|
610
|
+
ext = url_or_path.split('.')[-1]
|
|
611
|
+
if not name:
|
|
612
|
+
name = url_or_path.split('/')[-1]
|
|
613
|
+
filename = f'{name}.{ext}' if not name.endswith(ext) else name
|
|
614
|
+
target_path = target_folder / filename
|
|
615
|
+
try:
|
|
616
|
+
if offline_mode:
|
|
617
|
+
return
|
|
618
|
+
if target_path.exists():
|
|
619
|
+
return target_path.resolve()
|
|
620
|
+
console.print(repr(Info(message=f'[bold turquoise4]Downloading {type} [bold magenta]{filename}[/] ...[/] ')), highlight=False, end='') # noqa: E501
|
|
621
|
+
resp = requests.get(url_or_path, timeout=3)
|
|
622
|
+
resp.raise_for_status()
|
|
623
|
+
with open(target_path, 'wb') as f:
|
|
624
|
+
f.write(resp.content)
|
|
625
|
+
console.print('[bold green]ok.[/]')
|
|
626
|
+
except requests.RequestException as e:
|
|
627
|
+
console.print(f'[bold red]failed ({str(e)}).[/]')
|
|
628
|
+
return
|
|
629
|
+
return target_path.resolve()
|
|
630
|
+
else:
|
|
631
|
+
console.print(Error(message=f'Invalid {type} [bold magenta]{url_or_path}[/]: not a valid git repository, URL or local path.')) # noqa: E501
|
|
632
|
+
return None
|
|
633
|
+
|
|
634
|
+
|
|
635
|
+
# Load default_config
|
|
636
|
+
default_config = Config.parse(print_errors=False)
|
|
637
|
+
|
|
638
|
+
# Load user config
|
|
639
|
+
data_root = default_config.dirs.data
|
|
640
|
+
config_path = data_root / 'config.yml'
|
|
641
|
+
if not config_path.exists():
|
|
642
|
+
if not data_root.exists():
|
|
643
|
+
console.print(f'[bold turquoise4]Creating directory [bold magenta]{data_root}[/] ... [/]', end='')
|
|
644
|
+
data_root.mkdir(parents=False)
|
|
645
|
+
console.print('[bold green]ok.[/]')
|
|
646
|
+
console.print(
|
|
647
|
+
f'[bold turquoise4]Creating user conf [bold magenta]{config_path}[/]... [/]', end='')
|
|
648
|
+
config_path.touch()
|
|
649
|
+
console.print('[bold green]ok.[/]')
|
|
650
|
+
CONFIG = Config.parse(path=config_path)
|
|
651
|
+
|
|
652
|
+
# Fallback to default if invalid user config
|
|
653
|
+
if not CONFIG:
|
|
654
|
+
console.print(f'[bold orange1]Invalid user config {config_path}. Falling back to default config.')
|
|
655
|
+
CONFIG = default_config
|
|
656
|
+
|
|
657
|
+
# Create directories if they don't exist already
|
|
658
|
+
for name, dir in CONFIG.dirs.items():
|
|
659
|
+
if not dir.exists():
|
|
660
|
+
console.print(f'[bold turquoise4]Creating directory [bold magenta]{dir}[/] ... [/]', end='')
|
|
661
|
+
dir.mkdir(parents=False)
|
|
662
|
+
console.print('[bold green]ok.[/]')
|
|
663
|
+
|
|
664
|
+
# Download wordlists and payloads
|
|
665
|
+
# download_files(CONFIG.wordlists.templates, CONFIG.dirs.wordlists, CONFIG.offline_mode, 'wordlist')
|
|
666
|
+
# download_files(CONFIG.payloads.templates, CONFIG.dirs.payloads, CONFIG.offline_mode, 'payload')
|
|
667
|
+
|
|
668
|
+
# Print config
|
|
669
|
+
if 'config' in CONFIG.debug:
|
|
670
|
+
CONFIG.print()
|
|
File without changes
|
|
File without changes
|