secator 0.3.6__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

secator/config.py CHANGED
@@ -1,137 +1,573 @@
1
- import glob
2
1
  import os
3
2
  from pathlib import Path
3
+ from subprocess import call, DEVNULL
4
+ from typing import Dict, List
5
+ from typing_extensions import Annotated, Self
4
6
 
7
+ import requests
5
8
  import yaml
9
+ from dotenv import find_dotenv, load_dotenv
6
10
  from dotmap import DotMap
11
+ from pydantic import AfterValidator, BaseModel, model_validator, ValidationError
7
12
 
8
- from secator.rich import console
9
- from secator.definitions import CONFIGS_FOLDER, EXTRA_CONFIGS_FOLDER
13
+ from secator.rich import console, console_stdout
10
14
 
11
- CONFIGS_DIR_KEYS = ['workflow', 'scan', 'profile']
15
+ load_dotenv(find_dotenv(usecwd=True), override=False)
12
16
 
17
+ Directory = Annotated[Path, AfterValidator(lambda v: v.expanduser())]
18
+ StrExpandHome = Annotated[str, AfterValidator(lambda v: v.replace('~', str(Path.home())))]
13
19
 
14
- def load_config(name):
15
- """Load a config by name.
20
+ ROOT_FOLDER = Path(__file__).parent.parent
21
+ LIB_FOLDER = ROOT_FOLDER / 'secator'
22
+ CONFIGS_FOLDER = LIB_FOLDER / 'configs'
16
23
 
17
- Args:
18
- name: Name of the config, for instances profiles/aggressive or workflows/domain_scan.
19
24
 
20
- Returns:
21
- dict: Loaded config.
25
+ class StrictModel(BaseModel, extra='forbid'):
26
+ pass
27
+
28
+
29
+ class Directories(StrictModel):
30
+ bin: Directory = Path.home() / '.local' / 'bin'
31
+ data: Directory = Path.home() / '.secator'
32
+ templates: Directory = ''
33
+ reports: Directory = ''
34
+ wordlists: Directory = ''
35
+ cves: Directory = ''
36
+ payloads: Directory = ''
37
+ revshells: Directory = ''
38
+ celery: Directory = ''
39
+ celery_data: Directory = ''
40
+ celery_results: Directory = ''
41
+
42
+ @model_validator(mode='after')
43
+ def set_default_folders(self) -> Self:
44
+ """Set folders to be relative to the data folders if they are unspecified in config."""
45
+ for folder in ['templates', 'reports', 'wordlists', 'cves', 'payloads', 'revshells', 'celery', 'celery_data', 'celery_results']: # noqa: E501
46
+ rel_target = '/'.join(folder.split('_'))
47
+ val = getattr(self, folder) or self.data / rel_target
48
+ setattr(self, folder, val)
49
+ return self
50
+
51
+
52
+ class Debug(StrictModel):
53
+ level: int = 0
54
+ component: str = ''
55
+
56
+
57
+ class Celery(StrictModel):
58
+ broker_url: str = 'filesystem://'
59
+ broker_pool_limit: int = 10
60
+ broker_connection_timeout: float = 4.0
61
+ broker_visibility_timeout: int = 3600
62
+ override_default_logging: bool = True
63
+ result_backend: StrExpandHome = ''
64
+
65
+
66
+ class Cli(StrictModel):
67
+ github_token: str = ''
68
+ record: bool = False
69
+ stdin_timeout: int = 1000
70
+
71
+
72
+ class Runners(StrictModel):
73
+ input_chunk_size: int = 1000
74
+ progress_update_frequency: int = 60
75
+ skip_cve_search: bool = False
76
+ skip_cve_low_confidence: bool = True
77
+
78
+
79
+ class HTTP(StrictModel):
80
+ socks5_proxy: str = 'socks5://127.0.0.1:9050'
81
+ http_proxy: str = 'https://127.0.0.1:9080'
82
+ store_responses: bool = False
83
+ proxychains_command: str = 'proxychains'
84
+ freeproxy_timeout: int = 1
85
+
86
+
87
+ class Tasks(StrictModel):
88
+ exporters: List[str] = ['json', 'csv']
89
+
90
+
91
+ class Workflows(StrictModel):
92
+ exporters: List[str] = ['json', 'csv']
93
+
94
+
95
+ class Scans(StrictModel):
96
+ exporters: List[str] = ['json', 'csv']
97
+
98
+
99
+ class Payloads(StrictModel):
100
+ templates: Dict[str, str] = {
101
+ 'lse': 'https://github.com/diego-treitos/linux-smart-enumeration/releases/latest/download/lse.sh',
102
+ 'linpeas': 'https://github.com/carlospolop/PEASS-ng/releases/latest/download/linpeas.sh',
103
+ 'sudo_killer': 'https://github.com/TH3xACE/SUDO_KILLER/archive/refs/heads/V3.zip'
104
+ }
105
+
106
+
107
+ class Wordlists(StrictModel):
108
+ defaults: Dict[str, str] = {'http': 'bo0m_fuzz', 'dns': 'combined_subdomains'}
109
+ templates: Dict[str, str] = {
110
+ 'bo0m_fuzz': 'https://raw.githubusercontent.com/Bo0oM/fuzz.txt/master/fuzz.txt',
111
+ 'combined_subdomains': 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt' # noqa: E501
112
+ }
113
+ lists: Dict[str, List[str]] = {}
114
+
115
+
116
+ class GoogleAddon(StrictModel):
117
+ enabled: bool = False
118
+ drive_parent_folder_id: str = ''
119
+ credentials_path: str = ''
120
+
121
+
122
+ class WorkerAddon(StrictModel):
123
+ enabled: bool = False
124
+
125
+
126
+ class MongodbAddon(StrictModel):
127
+ enabled: bool = False
128
+ url: str = 'mongodb://localhost'
129
+ update_frequency: int = 60
130
+
131
+
132
+ class Addons(StrictModel):
133
+ google: GoogleAddon = GoogleAddon()
134
+ worker: WorkerAddon = WorkerAddon()
135
+ mongodb: MongodbAddon = MongodbAddon()
136
+
137
+
138
+ class SecatorConfig(StrictModel):
139
+ dirs: Directories = Directories()
140
+ debug: Debug = Debug()
141
+ celery: Celery = Celery()
142
+ cli: Cli = Cli()
143
+ runners: Runners = Runners()
144
+ http: HTTP = HTTP()
145
+ tasks: Tasks = Tasks()
146
+ workflows: Workflows = Workflows()
147
+ scans: Scans = Scans()
148
+ payloads: Payloads = Payloads()
149
+ wordlists: Wordlists = Wordlists()
150
+ addons: Addons = Addons()
151
+ offline_mode: bool = False
152
+
153
+
154
+ class Config(DotMap):
155
+ """Config class.
156
+
157
+ Examples:
158
+ >>> config = Config.parse() # get default config.
159
+ >>> config = Config.parse({'dirs': {'data': '/tmp/'}) # get custom config (from dict).
160
+ >>> config = Config.parse(path='/path/to/config.yml') # get custom config (from YAML file).
161
+ >>> config.print() # print config without defaults.
162
+ >>> config.print(partial=False) # print full config.
163
+ >>> config.set('addons.google.enabled', False) # set value in config.
164
+ >>> config.save() # save config back to disk.
22
165
  """
23
- path = Path(CONFIGS_FOLDER) / f'{name}.yaml'
24
- if not path.exists():
25
- console.log(f'Config "{name}" could not be loaded.')
26
- return
27
- with path.open('r') as f:
28
- return yaml.load(f.read(), Loader=yaml.Loader)
29
-
30
-
31
- def find_configs():
32
- results = {'scan': [], 'workflow': [], 'profile': []}
33
- dirs_type = [CONFIGS_FOLDER]
34
- if EXTRA_CONFIGS_FOLDER:
35
- dirs_type.append(EXTRA_CONFIGS_FOLDER)
36
- paths = []
37
- for dir in dirs_type:
38
- dir_paths = [
39
- os.path.abspath(path)
40
- for path in glob.glob(dir.rstrip('/') + '/**/*.y*ml', recursive=True)
41
- ]
42
- paths.extend(dir_paths)
43
- for path in paths:
44
- with open(path, 'r') as f:
45
- try:
46
- config = yaml.load(f.read(), yaml.Loader)
47
- type = config.get('type')
48
- if type:
49
- results[type].append(path)
50
- except yaml.YAMLError as exc:
51
- console.log(f'Unable to load config at {path}')
52
- console.log(str(exc))
53
- return results
54
-
55
-
56
- class ConfigLoader(DotMap):
57
-
58
- def __init__(self, input={}, name=None, **kwargs):
59
- if name:
60
- name = name.replace('-', '_') # so that workflows have a nice '-' in CLI
61
- config = self._load_from_name(name)
62
- elif isinstance(input, str):
63
- config = self._load_from_file(input)
64
- else:
65
- config = input
66
- super().__init__(config)
67
166
 
68
- def _load_from_file(self, path):
69
- if not os.path.exists(path):
70
- console.log(f'Config path {path} does not exists', style='bold red')
167
+ _error = False
168
+
169
+ def get(self, key=None, print=True):
170
+ """Retrieve a value from the configuration using a dotted path.
171
+
172
+ Args:
173
+ key (str | None): Dotted key path.
174
+ print (bool): Print the resulting value.
175
+
176
+ Returns:
177
+ Any: value at key.
178
+ """
179
+ value = self
180
+ if key:
181
+ for part in key.split('.'):
182
+ value = value[part]
183
+ if value is None:
184
+ console.print(f'[bold red]Key {key} does not exist.[/]')
185
+ return None
186
+ if print:
187
+ if key:
188
+ yaml_str = Config.dump(DotMap({key: value}), partial=False)
189
+ else:
190
+ yaml_str = Config.dump(self, partial=False)
191
+ Config.print_yaml(yaml_str)
192
+ return value
193
+
194
+ def set(self, key, value, set_partial=True):
195
+ """Set a value in the configuration using a dotted path.
196
+
197
+ Args:
198
+ key (str | None): Dotted key path.
199
+ value (Any): Value.
200
+ set_partial (bool): Set in partial config.
201
+ """
202
+ # Get existing value
203
+ existing_value = self.get(key, print=False)
204
+
205
+ # Convert dotted key path to the corresponding uppercase key used in _keymap
206
+ map_key = key.upper().replace('.', '_')
207
+
208
+ # Check if map key exists
209
+ if map_key not in self._keymap:
210
+ console.print(f'[bold red]Key "{key}" not found in config keymap[/].')
71
211
  return
72
- if path and os.path.exists(path):
73
- with open(path, 'r') as f:
74
- return yaml.load(f.read(), Loader=yaml.Loader)
75
-
76
- def _load_from_name(self, name):
77
- return load_config(name)
78
-
79
- @classmethod
80
- def load_all(cls):
81
- configs = find_configs()
82
- return ConfigLoader({
83
- key: [ConfigLoader(path) for path in configs[key]]
84
- for key in CONFIGS_DIR_KEYS
85
- })
86
-
87
- def get_tasks_class(self):
88
- from secator.runners import Task
89
- tasks = []
90
- for name, conf in self.tasks.items():
91
- if name == '_group':
92
- group_conf = ConfigLoader(input={'tasks': conf})
93
- tasks.extend(group_conf.get_tasks_class())
212
+
213
+ # Traverse to the second last key to handle the setting correctly
214
+ target = self
215
+ partial = self._partial
216
+ for part in self._keymap[map_key][:-1]:
217
+ target = target[part]
218
+ partial = partial[part]
219
+
220
+ # Set the value on the final part of the path
221
+ final_key = self._keymap[map_key][-1]
222
+
223
+ # Try to convert value to expected type
224
+ try:
225
+ if isinstance(existing_value, list):
226
+ if isinstance(value, str):
227
+ if value.startswith('[') and value.endswith(']'):
228
+ value = value[1:-1]
229
+ if ',' in value:
230
+ value = [c.strip() for c in value.split(',')]
231
+ elif value:
232
+ value = [value]
233
+ else:
234
+ value = []
235
+ elif isinstance(existing_value, dict):
236
+ if isinstance(value, str):
237
+ if value.startswith('{') and value.endswith('}'):
238
+ import json
239
+ value = json.loads(value)
240
+ elif isinstance(existing_value, bool):
241
+ if isinstance(value, str):
242
+ value = value.lower() in ("true", "1", "t")
243
+ elif isinstance(value, (int, float)):
244
+ value = True if value == 1 else False
245
+ elif isinstance(existing_value, int):
246
+ value = int(value)
247
+ elif isinstance(existing_value, float):
248
+ value = float(value)
249
+ elif isinstance(existing_value, Path):
250
+ value = Path(value)
251
+ except ValueError:
252
+ # from secator.utils import debug
253
+ # debug(f'Could not cast value {value} to expected type {type(existing_value).__name__}: {str(e)}', sub='config')
254
+ pass
255
+ finally:
256
+ target[final_key] = value
257
+ if set_partial:
258
+ partial[final_key] = value
259
+
260
+ def save(self, target_path: Path = None, partial=True):
261
+ """Save config as YAML on disk.
262
+
263
+ Args:
264
+ target_path (Path | None): If passed, saves the config to this path.
265
+ partial (bool): Save partial config.
266
+ """
267
+ if not target_path:
268
+ if not self._path:
269
+ return
270
+ target_path = self._path
271
+ with target_path.open('w') as f:
272
+ f.write(Config.dump(self, partial=partial))
273
+ self._path = target_path
274
+
275
+ def print(self, partial=True):
276
+ """Print config.
277
+
278
+ Args:
279
+ partial (bool): Print partial config only.
280
+ """
281
+ yaml_str = self.dump(self, partial=partial)
282
+ yaml_str = f'# {self._path}\n\n{yaml_str}' if self._path and partial else yaml_str
283
+ Config.print_yaml(yaml_str)
284
+
285
+ @staticmethod
286
+ def parse(data: dict = {}, path: Path = None, print_errors: bool = True):
287
+ """Parse config.
288
+
289
+ Args:
290
+ data (dict): Config data.
291
+ path (Path | None): Path to YAML config.
292
+ print_errors (bool): Print validation errors to console.
293
+
294
+ Returns:
295
+ Config: instance of Config object.
296
+ None: if the config was not loaded properly or there are validation errors.
297
+ """
298
+ # Load YAML file
299
+ if path:
300
+ data = Config.read_yaml(path)
301
+
302
+ # Load data
303
+ config = Config.load(SecatorConfig, data, print_errors=print_errors)
304
+ valid = config is not None
305
+ if not valid:
306
+ return None
307
+
308
+ # Set extras
309
+ config.set_extras(data, path)
310
+
311
+ # Override config values with environment variables
312
+ config.apply_env_overrides(print_errors=print_errors)
313
+
314
+ # Validate config
315
+ config.validate(print_errors=print_errors)
316
+
317
+ return config
318
+
319
+ def validate(self, print_errors=True):
320
+ """Validate config."""
321
+ return Config.load(
322
+ SecatorConfig,
323
+ data=self._partial.toDict(),
324
+ print_errors=print_errors)
325
+
326
+ def set_extras(self, original_data, original_path):
327
+ """Set extra useful values in config.
328
+
329
+ Args:
330
+ original_data (data): Original dict data.
331
+ original_path (pathlib.Path): Original YAML path.
332
+ valid (bool): Boolean indicating if config is valid or not.
333
+ """
334
+ self._path = original_path
335
+ self._partial = Config(original_data)
336
+ self._keymap = Config.build_key_map(self)
337
+
338
+ # HACK: set default result_backend if unset
339
+ if not self.celery.result_backend:
340
+ self.celery.result_backend = f'file://{self.dirs.celery_results}'
341
+
342
+ @staticmethod
343
+ def load(schema, data: dict = {}, print_errors=True):
344
+ """Validate a config using Pydantic.
345
+
346
+ Args:
347
+ schema (pydantic.Schema): Pydantic schema.
348
+ data (dict): Input data.
349
+ print_errors (bool): Print validation errors.
350
+
351
+ Returns:
352
+ Config|None: instance of Config object or None if invalid.
353
+ """
354
+ try:
355
+ return Config(schema(**data).model_dump())
356
+ except ValidationError as e:
357
+ if print_errors:
358
+ error_str = str(e).replace('\n', '\n ')
359
+ console.print(f'[bold red]:x: {error_str}')
360
+ return None
361
+
362
+ @staticmethod
363
+ def read_yaml(yaml_path):
364
+ """Read YAML from path.
365
+
366
+ Args:
367
+ yaml_path (Path): path to yaml config.
368
+
369
+ Returns:
370
+ dict: Loaded data.
371
+ """
372
+ if not yaml_path.exists():
373
+ console.print(f'[bold red]Config not found: {yaml_path}.[/]')
374
+ return {}
375
+ try:
376
+ with yaml_path.open('r') as f:
377
+ data = yaml.load(f.read(), Loader=yaml.Loader)
378
+ return data or {}
379
+ except yaml.YAMLError as e:
380
+ console.print(f'[bold red]:x: Error loading {yaml_path} {str(e)}')
381
+ return {}
382
+
383
+ @staticmethod
384
+ def print_yaml(string):
385
+ """Print YAML string using rich.
386
+
387
+ Args:
388
+ string (str): YAML string.
389
+ """
390
+ from rich.syntax import Syntax
391
+ data = Syntax(string, 'yaml', theme='ansi-dark', padding=0, background_color='default')
392
+ console_stdout.print(data)
393
+
394
+ @staticmethod
395
+ def dump(config, partial=True):
396
+ """Safe dump config as yaml:
397
+ - `Path`, `PosixPath` and `WindowsPath` objects are translated to strings.
398
+ - Home directory in paths is replaced with the tilde '~'.
399
+
400
+ Returns:
401
+ str: YAML dump.
402
+ """
403
+ import yaml
404
+ from pathlib import Path, PosixPath, WindowsPath
405
+
406
+ # Get home dir
407
+ home = str(Path.home())
408
+
409
+ # Custom dumper to add line breaks between items and a path representer to translate paths to strings
410
+ class LineBreakDumper(yaml.SafeDumper):
411
+ def write_line_break(self, data=None):
412
+ super().write_line_break(data)
413
+ if len(self.indents) == 1:
414
+ super().write_line_break()
415
+
416
+ def posix_path_representer(dumper, data):
417
+ path = str(data)
418
+ if path.startswith(home):
419
+ path = path.replace(home, '~')
420
+ return dumper.represent_scalar('tag:yaml.org,2002:str', path)
421
+
422
+ LineBreakDumper.add_representer(str, posix_path_representer)
423
+ LineBreakDumper.add_representer(Path, posix_path_representer)
424
+ LineBreakDumper.add_representer(PosixPath, posix_path_representer)
425
+ LineBreakDumper.add_representer(WindowsPath, posix_path_representer)
426
+
427
+ # Get data dict
428
+ data = config.toDict()
429
+
430
+ # HACK: Replace home dir in result_backend
431
+ if isinstance(config, Config):
432
+ data['celery']['result_backend'] = data['celery']['result_backend'].replace(home, '~')
433
+ del data['_path']
434
+ if partial:
435
+ data = data['_partial']
94
436
  else:
95
- tasks.append(Task.get_task_class(name))
96
- return tasks
97
-
98
- def get_workflows(self):
99
- return [ConfigLoader(name=f'workflows/{name}') for name, _ in self.workflows.items()]
100
-
101
- def get_workflow_supported_opts(self):
102
- opts = {}
103
- tasks = self.get_tasks_class()
104
- for task_cls in tasks:
105
- task_opts = task_cls.get_supported_opts()
106
- for name, conf in task_opts.items():
107
- supported = opts.get(name, {}).get('supported', False)
108
- opts[name] = conf
109
- opts[name]['supported'] = conf['supported'] or supported
110
- return opts
111
-
112
- def get_scan_supported_opts(self):
113
- opts = {}
114
- workflows = self.get_workflows()
115
- for workflow in workflows:
116
- workflow_opts = workflow.get_workflow_supported_opts()
117
- for name, conf in workflow_opts.items():
118
- supported = opts.get(name, {}).get('supported', False)
119
- opts[name] = conf
120
- opts[name]['supported'] = conf['supported'] or supported
121
- return opts
122
-
123
- @property
124
- def supported_opts(self):
125
- return self.get_supported_opts()
126
-
127
- def get_supported_opts(self):
128
- opts = {}
129
- if self.type == 'workflow':
130
- opts = self.get_workflow_supported_opts()
131
- elif self.type == 'scan':
132
- opts = self.get_scan_supported_opts()
133
- elif self.type == 'task':
134
- tasks = self.get_tasks_class()
135
- if tasks:
136
- opts = tasks[0].get_supported_opts()
137
- return dict(sorted(opts.items()))
437
+ del data['_partial']
438
+
439
+ data = {k: v for k, v in data.items() if not k.startswith('_')}
440
+ return yaml.dump(data, Dumper=LineBreakDumper, sort_keys=False)
441
+
442
+ @staticmethod
443
+ def build_key_map(config, base_path=[]):
444
+ key_map = {}
445
+ for key, value in config.items():
446
+ if key.startswith('_'): # ignore
447
+ continue
448
+ current_path = base_path + [key]
449
+ if isinstance(value, dict):
450
+ key_map.update(Config.build_key_map(value, current_path))
451
+ else:
452
+ key_map['_'.join(current_path).upper()] = current_path
453
+ return key_map
454
+
455
+ def apply_env_overrides(self, print_errors=True):
456
+ """Override config values from environment variables."""
457
+ prefix = "SECATOR_"
458
+ for var in os.environ:
459
+ if var.startswith(prefix):
460
+ key = var[len(prefix):] # remove prefix
461
+ if key in self._keymap:
462
+ path = '.'.join(k.lower() for k in self._keymap[key])
463
+ value = os.environ[var]
464
+ self.set(path, value, set_partial=False)
465
+ if not self.validate(print_errors=False) and print_errors:
466
+ console.print(f'[bold red]{var} (override failed)[/]')
467
+ elif print_errors:
468
+ console.print(f'[bold red]{var} (override failed: key not found)[/]')
469
+
470
+
471
+ def download_files(data: dict, target_folder: Path, offline_mode: bool, type: str):
472
+ """Download remote files to target folder, clone git repos, or symlink local files.
473
+
474
+ Args:
475
+ data (dict): Dict of name to url or local path prefixed with 'git+' for Git repos.
476
+ target_folder (Path): Target folder for storing files or repos.
477
+ type (str): Type of files to handle.
478
+ offline_mode (bool): Offline mode.
479
+ """
480
+ for name, url_or_path in data.items():
481
+ if url_or_path.startswith('git+'):
482
+ # Clone Git repository
483
+ git_url = url_or_path[4:] # remove 'git+' prefix
484
+ repo_name = git_url.split('/')[-1]
485
+ if repo_name.endswith('.git'):
486
+ repo_name = repo_name[:-4]
487
+ target_path = target_folder / repo_name
488
+ if not target_path.exists():
489
+ console.print(f'[bold turquoise4]Cloning git {type} [bold magenta]{repo_name}[/] ...[/] ', end='')
490
+ if offline_mode:
491
+ console.print('[bold orange1]skipped [dim][offline[/].[/]')
492
+ continue
493
+ try:
494
+ call(['git', 'clone', git_url, str(target_path)], stderr=DEVNULL, stdout=DEVNULL)
495
+ console.print('[bold green]ok.[/]')
496
+ except Exception as e:
497
+ console.print(f'[bold red]failed ({str(e)}).[/]')
498
+ data[name] = target_path.resolve()
499
+ elif Path(url_or_path).exists():
500
+ # Create a symbolic link for a local file
501
+ local_path = Path(url_or_path)
502
+ target_path = target_folder / local_path.name
503
+ if not target_path.exists():
504
+ console.print(f'[bold turquoise4]Symlinking {type} [bold magenta]{name}[/] ...[/] ', end='')
505
+ try:
506
+ target_path.symlink_to(local_path)
507
+ console.print('[bold green]ok.[/]')
508
+ except Exception as e:
509
+ console.print(f'[bold red]failed ({str(e)}).[/]')
510
+ data[name] = target_path.resolve()
511
+ else:
512
+ # Download file from URL
513
+ ext = url_or_path.split('.')[-1]
514
+ filename = f'{name}.{ext}'
515
+ target_path = target_folder / filename
516
+ if not target_path.exists():
517
+ try:
518
+ console.print(f'[bold turquoise4]Downloading {type} [bold magenta]{filename}[/] ...[/] ', end='')
519
+ if offline_mode:
520
+ console.print('[bold orange1]skipped [dim](offline)[/].[/]')
521
+ continue
522
+ resp = requests.get(url_or_path, timeout=3)
523
+ resp.raise_for_status()
524
+ with open(target_path, 'wb') as f:
525
+ f.write(resp.content)
526
+ console.print('[bold green]ok.[/]')
527
+ except requests.RequestException as e:
528
+ console.print(f'[bold red]failed ({str(e)}).[/]')
529
+ continue
530
+ data[name] = target_path.resolve()
531
+
532
+
533
+ # Load default_config
534
+ default_config = Config.parse(print_errors=False)
535
+
536
+ # Load user config
537
+ data_root = default_config.dirs.data
538
+ config_path = data_root / 'config.yml'
539
+ if not config_path.exists():
540
+ if not data_root.exists():
541
+ console.print(f'[bold turquoise4]Creating directory [bold magenta]{data_root}[/] ... [/]', end='')
542
+ data_root.mkdir(parents=False)
543
+ console.print('[bold green]ok.[/]')
544
+ console.print(
545
+ f'[bold turquoise4]Creating user conf [bold magenta]{config_path}[/]... [/]', end='')
546
+ config_path.touch()
547
+ console.print('[bold green]ok.[/]')
548
+ CONFIG = Config.parse(path=config_path)
549
+
550
+ # Fallback to default if invalid user config
551
+ if not CONFIG:
552
+ console.print(f'[bold orange1]Invalid user config {config_path}. Falling back to default config.')
553
+ CONFIG = default_config
554
+
555
+ # Create directories if they don't exist already
556
+ for name, dir in CONFIG.dirs.items():
557
+ if not dir.exists():
558
+ console.print(f'[bold turquoise4]Creating directory [bold magenta]{dir}[/] ... [/]', end='')
559
+ dir.mkdir(parents=False)
560
+ console.print('[bold green]ok.[/]')
561
+
562
+ # Download wordlists and set defaults
563
+ download_files(CONFIG.wordlists.templates, CONFIG.dirs.wordlists, CONFIG.offline_mode, 'wordlist')
564
+ for category, name in CONFIG.wordlists.defaults.items():
565
+ if name in CONFIG.wordlists.templates.keys():
566
+ CONFIG.wordlists.defaults[category] = str(CONFIG.wordlists.templates[name])
567
+
568
+ # Download payloads
569
+ download_files(CONFIG.payloads.templates, CONFIG.dirs.payloads, CONFIG.offline_mode, 'payload')
570
+
571
+ # Print config
572
+ if CONFIG.debug.component == 'config':
573
+ CONFIG.print()