secator 0.3.6__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

secator/config.py CHANGED
@@ -1,137 +1,570 @@
1
- import glob
2
1
  import os
3
2
  from pathlib import Path
3
+ from subprocess import call, DEVNULL
4
+ from typing import Dict, List
5
+ from typing_extensions import Annotated, Self
4
6
 
7
+ import requests
5
8
  import yaml
6
9
  from dotmap import DotMap
10
+ from pydantic import AfterValidator, BaseModel, model_validator, ValidationError
7
11
 
8
- from secator.rich import console
9
- from secator.definitions import CONFIGS_FOLDER, EXTRA_CONFIGS_FOLDER
12
+ from secator.rich import console, console_stdout
10
13
 
11
- CONFIGS_DIR_KEYS = ['workflow', 'scan', 'profile']
14
+ Directory = Annotated[Path, AfterValidator(lambda v: v.expanduser())]
15
+ StrExpandHome = Annotated[str, AfterValidator(lambda v: v.replace('~', str(Path.home())))]
12
16
 
17
+ ROOT_FOLDER = Path(__file__).parent.parent
18
+ LIB_FOLDER = ROOT_FOLDER / 'secator'
19
+ CONFIGS_FOLDER = LIB_FOLDER / 'configs'
13
20
 
14
- def load_config(name):
15
- """Load a config by name.
16
21
 
17
- Args:
18
- name: Name of the config, for instances profiles/aggressive or workflows/domain_scan.
22
+ class StrictModel(BaseModel, extra='forbid'):
23
+ pass
24
+
25
+
26
+ class Directories(StrictModel):
27
+ bin: Directory = Path.home() / '.local' / 'bin'
28
+ data: Directory = Path.home() / '.secator'
29
+ templates: Directory = ''
30
+ reports: Directory = ''
31
+ wordlists: Directory = ''
32
+ cves: Directory = ''
33
+ payloads: Directory = ''
34
+ revshells: Directory = ''
35
+ celery: Directory = ''
36
+ celery_data: Directory = ''
37
+ celery_results: Directory = ''
38
+
39
+ @model_validator(mode='after')
40
+ def set_default_folders(self) -> Self:
41
+ """Set folders to be relative to the data folders if they are unspecified in config."""
42
+ for folder in ['templates', 'reports', 'wordlists', 'cves', 'payloads', 'revshells', 'celery', 'celery_data', 'celery_results']: # noqa: E501
43
+ rel_target = '/'.join(folder.split('_'))
44
+ val = getattr(self, folder) or self.data / rel_target
45
+ setattr(self, folder, val)
46
+ return self
47
+
48
+
49
+ class Debug(StrictModel):
50
+ level: int = 0
51
+ component: str = ''
52
+
53
+
54
+ class Celery(StrictModel):
55
+ broker_url: str = 'filesystem://'
56
+ broker_pool_limit: int = 10
57
+ broker_connection_timeout: float = 4.0
58
+ broker_visibility_timeout: int = 3600
59
+ override_default_logging: bool = True
60
+ result_backend: StrExpandHome = ''
61
+
62
+
63
+ class Cli(StrictModel):
64
+ github_token: str = ''
65
+ record: bool = False
66
+ stdin_timeout: int = 1000
67
+
68
+
69
+ class Runners(StrictModel):
70
+ input_chunk_size: int = 1000
71
+ progress_update_frequency: int = 60
72
+ skip_cve_search: bool = False
73
+ skip_cve_low_confidence: bool = True
74
+
75
+
76
+ class HTTP(StrictModel):
77
+ socks5_proxy: str = 'socks5://127.0.0.1:9050'
78
+ http_proxy: str = 'https://127.0.0.1:9080'
79
+ store_responses: bool = False
80
+ proxychains_command: str = 'proxychains'
81
+ freeproxy_timeout: int = 1
82
+
83
+
84
+ class Tasks(StrictModel):
85
+ exporters: List[str] = ['json', 'csv']
86
+
87
+
88
+ class Workflows(StrictModel):
89
+ exporters: List[str] = ['json', 'csv']
90
+
91
+
92
+ class Scans(StrictModel):
93
+ exporters: List[str] = ['json', 'csv']
94
+
95
+
96
+ class Payloads(StrictModel):
97
+ templates: Dict[str, str] = {
98
+ 'lse': 'https://github.com/diego-treitos/linux-smart-enumeration/releases/latest/download/lse.sh',
99
+ 'linpeas': 'https://github.com/carlospolop/PEASS-ng/releases/latest/download/linpeas.sh',
100
+ 'sudo_killer': 'https://github.com/TH3xACE/SUDO_KILLER/archive/refs/heads/V3.zip'
101
+ }
102
+
103
+
104
+ class Wordlists(StrictModel):
105
+ defaults: Dict[str, str] = {'http': 'bo0m_fuzz', 'dns': 'combined_subdomains'}
106
+ templates: Dict[str, str] = {
107
+ 'bo0m_fuzz': 'https://raw.githubusercontent.com/Bo0oM/fuzz.txt/master/fuzz.txt',
108
+ 'combined_subdomains': 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt' # noqa: E501
109
+ }
110
+ lists: Dict[str, List[str]] = {}
111
+
19
112
 
20
- Returns:
21
- dict: Loaded config.
113
+ class GoogleAddon(StrictModel):
114
+ enabled: bool = False
115
+ drive_parent_folder_id: str = ''
116
+ credentials_path: str = ''
117
+
118
+
119
+ class WorkerAddon(StrictModel):
120
+ enabled: bool = False
121
+
122
+
123
+ class MongodbAddon(StrictModel):
124
+ enabled: bool = False
125
+ url: str = 'mongodb://localhost'
126
+ update_frequency: int = 60
127
+
128
+
129
+ class Addons(StrictModel):
130
+ google: GoogleAddon = GoogleAddon()
131
+ worker: WorkerAddon = WorkerAddon()
132
+ mongodb: MongodbAddon = MongodbAddon()
133
+
134
+
135
+ class SecatorConfig(StrictModel):
136
+ dirs: Directories = Directories()
137
+ debug: Debug = Debug()
138
+ celery: Celery = Celery()
139
+ cli: Cli = Cli()
140
+ runners: Runners = Runners()
141
+ http: HTTP = HTTP()
142
+ tasks: Tasks = Tasks()
143
+ workflows: Workflows = Workflows()
144
+ scans: Scans = Scans()
145
+ payloads: Payloads = Payloads()
146
+ wordlists: Wordlists = Wordlists()
147
+ addons: Addons = Addons()
148
+ offline_mode: bool = False
149
+
150
+
151
+ class Config(DotMap):
152
+ """Config class.
153
+
154
+ Examples:
155
+ >>> config = Config.parse() # get default config.
156
+ >>> config = Config.parse({'dirs': {'data': '/tmp/'}) # get custom config (from dict).
157
+ >>> config = Config.parse(path='/path/to/config.yml') # get custom config (from YAML file).
158
+ >>> config.print() # print config without defaults.
159
+ >>> config.print(partial=False) # print full config.
160
+ >>> config.set('addons.google.enabled', False) # set value in config.
161
+ >>> config.save() # save config back to disk.
22
162
  """
23
- path = Path(CONFIGS_FOLDER) / f'{name}.yaml'
24
- if not path.exists():
25
- console.log(f'Config "{name}" could not be loaded.')
26
- return
27
- with path.open('r') as f:
28
- return yaml.load(f.read(), Loader=yaml.Loader)
29
-
30
-
31
- def find_configs():
32
- results = {'scan': [], 'workflow': [], 'profile': []}
33
- dirs_type = [CONFIGS_FOLDER]
34
- if EXTRA_CONFIGS_FOLDER:
35
- dirs_type.append(EXTRA_CONFIGS_FOLDER)
36
- paths = []
37
- for dir in dirs_type:
38
- dir_paths = [
39
- os.path.abspath(path)
40
- for path in glob.glob(dir.rstrip('/') + '/**/*.y*ml', recursive=True)
41
- ]
42
- paths.extend(dir_paths)
43
- for path in paths:
44
- with open(path, 'r') as f:
45
- try:
46
- config = yaml.load(f.read(), yaml.Loader)
47
- type = config.get('type')
48
- if type:
49
- results[type].append(path)
50
- except yaml.YAMLError as exc:
51
- console.log(f'Unable to load config at {path}')
52
- console.log(str(exc))
53
- return results
54
-
55
-
56
- class ConfigLoader(DotMap):
57
-
58
- def __init__(self, input={}, name=None, **kwargs):
59
- if name:
60
- name = name.replace('-', '_') # so that workflows have a nice '-' in CLI
61
- config = self._load_from_name(name)
62
- elif isinstance(input, str):
63
- config = self._load_from_file(input)
64
- else:
65
- config = input
66
- super().__init__(config)
67
163
 
68
- def _load_from_file(self, path):
69
- if not os.path.exists(path):
70
- console.log(f'Config path {path} does not exists', style='bold red')
164
+ _error = False
165
+
166
+ def get(self, key=None, print=True):
167
+ """Retrieve a value from the configuration using a dotted path.
168
+
169
+ Args:
170
+ key (str | None): Dotted key path.
171
+ print (bool): Print the resulting value.
172
+
173
+ Returns:
174
+ Any: value at key.
175
+ """
176
+ value = self
177
+ if key:
178
+ for part in key.split('.'):
179
+ value = value[part]
180
+ if value is None:
181
+ console.print(f'[bold red]Key {key} does not exist.[/]')
182
+ return None
183
+ if print:
184
+ if key:
185
+ yaml_str = Config.dump(DotMap({key: value}), partial=False)
186
+ else:
187
+ yaml_str = Config.dump(self, partial=False)
188
+ Config.print_yaml(yaml_str)
189
+ return value
190
+
191
+ def set(self, key, value, set_partial=True):
192
+ """Set a value in the configuration using a dotted path.
193
+
194
+ Args:
195
+ key (str | None): Dotted key path.
196
+ value (Any): Value.
197
+ set_partial (bool): Set in partial config.
198
+ """
199
+ # Get existing value
200
+ existing_value = self.get(key, print=False)
201
+
202
+ # Convert dotted key path to the corresponding uppercase key used in _keymap
203
+ map_key = key.upper().replace('.', '_')
204
+
205
+ # Check if map key exists
206
+ if map_key not in self._keymap:
207
+ console.print(f'[bold red]Key "{key}" not found in config keymap[/].')
71
208
  return
72
- if path and os.path.exists(path):
73
- with open(path, 'r') as f:
74
- return yaml.load(f.read(), Loader=yaml.Loader)
75
-
76
- def _load_from_name(self, name):
77
- return load_config(name)
78
-
79
- @classmethod
80
- def load_all(cls):
81
- configs = find_configs()
82
- return ConfigLoader({
83
- key: [ConfigLoader(path) for path in configs[key]]
84
- for key in CONFIGS_DIR_KEYS
85
- })
86
-
87
- def get_tasks_class(self):
88
- from secator.runners import Task
89
- tasks = []
90
- for name, conf in self.tasks.items():
91
- if name == '_group':
92
- group_conf = ConfigLoader(input={'tasks': conf})
93
- tasks.extend(group_conf.get_tasks_class())
209
+
210
+ # Traverse to the second last key to handle the setting correctly
211
+ target = self
212
+ partial = self._partial
213
+ for part in self._keymap[map_key][:-1]:
214
+ target = target[part]
215
+ partial = partial[part]
216
+
217
+ # Set the value on the final part of the path
218
+ final_key = self._keymap[map_key][-1]
219
+
220
+ # Try to convert value to expected type
221
+ try:
222
+ if isinstance(existing_value, list):
223
+ if isinstance(value, str):
224
+ if value.startswith('[') and value.endswith(']'):
225
+ value = value[1:-1]
226
+ if ',' in value:
227
+ value = [c.strip() for c in value.split(',')]
228
+ elif value:
229
+ value = [value]
230
+ else:
231
+ value = []
232
+ elif isinstance(existing_value, dict):
233
+ if isinstance(value, str):
234
+ if value.startswith('{') and value.endswith('}'):
235
+ import json
236
+ value = json.loads(value)
237
+ elif isinstance(existing_value, bool):
238
+ if isinstance(value, str):
239
+ value = value.lower() in ("true", "1", "t")
240
+ elif isinstance(value, (int, float)):
241
+ value = True if value == 1 else False
242
+ elif isinstance(existing_value, int):
243
+ value = int(value)
244
+ elif isinstance(existing_value, float):
245
+ value = float(value)
246
+ elif isinstance(existing_value, Path):
247
+ value = Path(value)
248
+ except ValueError as e:
249
+ from secator.utils import debug
250
+ debug(f'Could not cast value {value} to expected type {type(existing_value).__name__}: {str(e)}', sub='config')
251
+ pass
252
+ finally:
253
+ target[final_key] = value
254
+ if set_partial:
255
+ partial[final_key] = value
256
+
257
+ def save(self, target_path: Path = None, partial=True):
258
+ """Save config as YAML on disk.
259
+
260
+ Args:
261
+ target_path (Path | None): If passed, saves the config to this path.
262
+ partial (bool): Save partial config.
263
+ """
264
+ if not target_path:
265
+ if not self._path:
266
+ return
267
+ target_path = self._path
268
+ with target_path.open('w') as f:
269
+ f.write(Config.dump(self, partial=partial))
270
+ self._path = target_path
271
+
272
+ def print(self, partial=True):
273
+ """Print config.
274
+
275
+ Args:
276
+ partial (bool): Print partial config only.
277
+ """
278
+ yaml_str = self.dump(self, partial=partial)
279
+ yaml_str = f'# {self._path}\n\n{yaml_str}' if self._path and partial else yaml_str
280
+ Config.print_yaml(yaml_str)
281
+
282
+ @staticmethod
283
+ def parse(data: dict = {}, path: Path = None, print_errors: bool = True):
284
+ """Parse config.
285
+
286
+ Args:
287
+ data (dict): Config data.
288
+ path (Path | None): Path to YAML config.
289
+ print_errors (bool): Print validation errors to console.
290
+
291
+ Returns:
292
+ Config: instance of Config object.
293
+ None: if the config was not loaded properly or there are validation errors.
294
+ """
295
+ # Load YAML file
296
+ if path:
297
+ data = Config.read_yaml(path)
298
+
299
+ # Load data
300
+ config = Config.load(SecatorConfig, data, print_errors=print_errors)
301
+ valid = config is not None
302
+ if not valid:
303
+ return None
304
+
305
+ # Set extras
306
+ config.set_extras(data, path)
307
+
308
+ # Override config values with environment variables
309
+ config.apply_env_overrides(print_errors=print_errors)
310
+
311
+ # Validate config
312
+ config.validate(print_errors=print_errors)
313
+
314
+ return config
315
+
316
+ def validate(self, print_errors=True):
317
+ """Validate config."""
318
+ return Config.load(
319
+ SecatorConfig,
320
+ data=self._partial.toDict(),
321
+ print_errors=print_errors)
322
+
323
+ def set_extras(self, original_data, original_path):
324
+ """Set extra useful values in config.
325
+
326
+ Args:
327
+ original_data (data): Original dict data.
328
+ original_path (pathlib.Path): Original YAML path.
329
+ valid (bool): Boolean indicating if config is valid or not.
330
+ """
331
+ self._path = original_path
332
+ self._partial = Config(original_data)
333
+ self._keymap = Config.build_key_map(self)
334
+
335
+ # HACK: set default result_backend if unset
336
+ if not self.celery.result_backend:
337
+ self.celery.result_backend = f'file://{self.dirs.celery_results}'
338
+
339
+ @staticmethod
340
+ def load(schema, data: dict = {}, print_errors=True):
341
+ """Validate a config using Pydantic.
342
+
343
+ Args:
344
+ schema (pydantic.Schema): Pydantic schema.
345
+ data (dict): Input data.
346
+ print_errors (bool): Print validation errors.
347
+
348
+ Returns:
349
+ Config|None: instance of Config object or None if invalid.
350
+ """
351
+ try:
352
+ return Config(schema(**data).model_dump())
353
+ except ValidationError as e:
354
+ if print_errors:
355
+ error_str = str(e).replace('\n', '\n ')
356
+ console.print(f'[bold red]:x: {error_str}')
357
+ return None
358
+
359
+ @staticmethod
360
+ def read_yaml(yaml_path):
361
+ """Read YAML from path.
362
+
363
+ Args:
364
+ yaml_path (Path): path to yaml config.
365
+
366
+ Returns:
367
+ dict: Loaded data.
368
+ """
369
+ if not yaml_path.exists():
370
+ console.print(f'[bold red]Config not found: {yaml_path}.[/]')
371
+ return {}
372
+ try:
373
+ with yaml_path.open('r') as f:
374
+ data = yaml.load(f.read(), Loader=yaml.Loader)
375
+ return data or {}
376
+ except yaml.YAMLError as e:
377
+ console.print(f'[bold red]:x: Error loading {yaml_path} {str(e)}')
378
+ return {}
379
+
380
+ @staticmethod
381
+ def print_yaml(string):
382
+ """Print YAML string using rich.
383
+
384
+ Args:
385
+ string (str): YAML string.
386
+ """
387
+ from rich.syntax import Syntax
388
+ data = Syntax(string, 'yaml', theme='ansi-dark', padding=0, background_color='default')
389
+ console_stdout.print(data)
390
+
391
+ @staticmethod
392
+ def dump(config, partial=True):
393
+ """Safe dump config as yaml:
394
+ - `Path`, `PosixPath` and `WindowsPath` objects are translated to strings.
395
+ - Home directory in paths is replaced with the tilde '~'.
396
+
397
+ Returns:
398
+ str: YAML dump.
399
+ """
400
+ import yaml
401
+ from pathlib import Path, PosixPath, WindowsPath
402
+
403
+ # Get home dir
404
+ home = str(Path.home())
405
+
406
+ # Custom dumper to add line breaks between items and a path representer to translate paths to strings
407
+ class LineBreakDumper(yaml.SafeDumper):
408
+ def write_line_break(self, data=None):
409
+ super().write_line_break(data)
410
+ if len(self.indents) == 1:
411
+ super().write_line_break()
412
+
413
+ def posix_path_representer(dumper, data):
414
+ path = str(data)
415
+ if path.startswith(home):
416
+ path = path.replace(home, '~')
417
+ return dumper.represent_scalar('tag:yaml.org,2002:str', path)
418
+
419
+ LineBreakDumper.add_representer(str, posix_path_representer)
420
+ LineBreakDumper.add_representer(Path, posix_path_representer)
421
+ LineBreakDumper.add_representer(PosixPath, posix_path_representer)
422
+ LineBreakDumper.add_representer(WindowsPath, posix_path_representer)
423
+
424
+ # Get data dict
425
+ data = config.toDict()
426
+
427
+ # HACK: Replace home dir in result_backend
428
+ if isinstance(config, Config):
429
+ data['celery']['result_backend'] = data['celery']['result_backend'].replace(home, '~')
430
+ del data['_path']
431
+ if partial:
432
+ data = data['_partial']
94
433
  else:
95
- tasks.append(Task.get_task_class(name))
96
- return tasks
97
-
98
- def get_workflows(self):
99
- return [ConfigLoader(name=f'workflows/{name}') for name, _ in self.workflows.items()]
100
-
101
- def get_workflow_supported_opts(self):
102
- opts = {}
103
- tasks = self.get_tasks_class()
104
- for task_cls in tasks:
105
- task_opts = task_cls.get_supported_opts()
106
- for name, conf in task_opts.items():
107
- supported = opts.get(name, {}).get('supported', False)
108
- opts[name] = conf
109
- opts[name]['supported'] = conf['supported'] or supported
110
- return opts
111
-
112
- def get_scan_supported_opts(self):
113
- opts = {}
114
- workflows = self.get_workflows()
115
- for workflow in workflows:
116
- workflow_opts = workflow.get_workflow_supported_opts()
117
- for name, conf in workflow_opts.items():
118
- supported = opts.get(name, {}).get('supported', False)
119
- opts[name] = conf
120
- opts[name]['supported'] = conf['supported'] or supported
121
- return opts
122
-
123
- @property
124
- def supported_opts(self):
125
- return self.get_supported_opts()
126
-
127
- def get_supported_opts(self):
128
- opts = {}
129
- if self.type == 'workflow':
130
- opts = self.get_workflow_supported_opts()
131
- elif self.type == 'scan':
132
- opts = self.get_scan_supported_opts()
133
- elif self.type == 'task':
134
- tasks = self.get_tasks_class()
135
- if tasks:
136
- opts = tasks[0].get_supported_opts()
137
- return dict(sorted(opts.items()))
434
+ del data['_partial']
435
+
436
+ data = {k: v for k, v in data.items() if not k.startswith('_')}
437
+ return yaml.dump(data, Dumper=LineBreakDumper, sort_keys=False)
438
+
439
+ @staticmethod
440
+ def build_key_map(config, base_path=[]):
441
+ key_map = {}
442
+ for key, value in config.items():
443
+ if key.startswith('_'): # ignore
444
+ continue
445
+ current_path = base_path + [key]
446
+ if isinstance(value, dict):
447
+ key_map.update(Config.build_key_map(value, current_path))
448
+ else:
449
+ key_map['_'.join(current_path).upper()] = current_path
450
+ return key_map
451
+
452
+ def apply_env_overrides(self, print_errors=True):
453
+ """Override config values from environment variables."""
454
+ prefix = "SECATOR_"
455
+ for var in os.environ:
456
+ if var.startswith(prefix):
457
+ key = var[len(prefix):] # remove prefix
458
+ if key in self._keymap:
459
+ path = '.'.join(k.lower() for k in self._keymap[key])
460
+ value = os.environ[var]
461
+ self.set(path, value, set_partial=False)
462
+ if not self.validate(print_errors=False) and print_errors:
463
+ console.print(f'[bold red]{var} (override failed)[/]')
464
+ elif print_errors:
465
+ console.print(f'[bold red]{var} (override failed: key not found)[/]')
466
+
467
+
468
+ def download_files(data: dict, target_folder: Path, offline_mode: bool, type: str):
469
+ """Download remote files to target folder, clone git repos, or symlink local files.
470
+
471
+ Args:
472
+ data (dict): Dict of name to url or local path prefixed with 'git+' for Git repos.
473
+ target_folder (Path): Target folder for storing files or repos.
474
+ type (str): Type of files to handle.
475
+ offline_mode (bool): Offline mode.
476
+ """
477
+ for name, url_or_path in data.items():
478
+ if url_or_path.startswith('git+'):
479
+ # Clone Git repository
480
+ git_url = url_or_path[4:] # remove 'git+' prefix
481
+ repo_name = git_url.split('/')[-1]
482
+ if repo_name.endswith('.git'):
483
+ repo_name = repo_name[:-4]
484
+ target_path = target_folder / repo_name
485
+ if not target_path.exists():
486
+ console.print(f'[bold turquoise4]Cloning git {type} [bold magenta]{repo_name}[/] ...[/] ', end='')
487
+ if offline_mode:
488
+ console.print('[bold orange1]skipped [dim][offline[/].[/]')
489
+ continue
490
+ try:
491
+ call(['git', 'clone', git_url, str(target_path)], stderr=DEVNULL, stdout=DEVNULL)
492
+ console.print('[bold green]ok.[/]')
493
+ except Exception as e:
494
+ console.print(f'[bold red]failed ({str(e)}).[/]')
495
+ data[name] = target_path.resolve()
496
+ elif Path(url_or_path).exists():
497
+ # Create a symbolic link for a local file
498
+ local_path = Path(url_or_path)
499
+ target_path = target_folder / local_path.name
500
+ if not target_path.exists():
501
+ console.print(f'[bold turquoise4]Symlinking {type} [bold magenta]{name}[/] ...[/] ', end='')
502
+ try:
503
+ target_path.symlink_to(local_path)
504
+ console.print('[bold green]ok.[/]')
505
+ except Exception as e:
506
+ console.print(f'[bold red]failed ({str(e)}).[/]')
507
+ data[name] = target_path.resolve()
508
+ else:
509
+ # Download file from URL
510
+ ext = url_or_path.split('.')[-1]
511
+ filename = f'{name}.{ext}'
512
+ target_path = target_folder / filename
513
+ if not target_path.exists():
514
+ try:
515
+ console.print(f'[bold turquoise4]Downloading {type} [bold magenta]{filename}[/] ...[/] ', end='')
516
+ if offline_mode:
517
+ console.print('[bold orange1]skipped [dim](offline)[/].[/]')
518
+ continue
519
+ resp = requests.get(url_or_path, timeout=3)
520
+ resp.raise_for_status()
521
+ with open(target_path, 'wb') as f:
522
+ f.write(resp.content)
523
+ console.print('[bold green]ok.[/]')
524
+ except requests.RequestException as e:
525
+ console.print(f'[bold red]failed ({str(e)}).[/]')
526
+ continue
527
+ data[name] = target_path.resolve()
528
+
529
+
530
+ # Load default_config
531
+ default_config = Config.parse(print_errors=False)
532
+
533
+ # Load user config
534
+ data_root = default_config.dirs.data
535
+ config_path = data_root / 'config.yml'
536
+ if not config_path.exists():
537
+ if not data_root.exists():
538
+ console.print(f'[bold turquoise4]Creating directory [bold magenta]{data_root}[/] ... [/]', end='')
539
+ data_root.mkdir(parents=False)
540
+ console.print('[bold green]ok.[/]')
541
+ console.print(
542
+ f'[bold turquoise4]Creating user conf [bold magenta]{config_path}[/]... [/]', end='')
543
+ config_path.touch()
544
+ console.print('[bold green]ok.[/]')
545
+ CONFIG = Config.parse(path=config_path)
546
+
547
+ # Fallback to default if invalid user config
548
+ if not CONFIG:
549
+ console.print(f'[bold orange1]Invalid user config {config_path}. Falling back to default config.')
550
+ CONFIG = default_config
551
+
552
+ # Create directories if they don't exist already
553
+ for name, dir in CONFIG.dirs.items():
554
+ if not dir.exists():
555
+ console.print(f'[bold turquoise4]Creating directory [bold magenta]{dir}[/] ... [/]', end='')
556
+ dir.mkdir(parents=False)
557
+ console.print('[bold green]ok.[/]')
558
+
559
+ # Download wordlists and set defaults
560
+ download_files(CONFIG.wordlists.templates, CONFIG.dirs.wordlists, CONFIG.offline_mode, 'wordlist')
561
+ for category, name in CONFIG.wordlists.defaults.items():
562
+ if name in CONFIG.wordlists.templates.keys():
563
+ CONFIG.wordlists.defaults[category] = str(CONFIG.wordlists.templates[name])
564
+
565
+ # Download payloads
566
+ download_files(CONFIG.payloads.templates, CONFIG.dirs.payloads, CONFIG.offline_mode, 'payload')
567
+
568
+ # Print config
569
+ if CONFIG.debug.component == 'config':
570
+ CONFIG.print()