secator 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (114) hide show
  1. secator/__init__.py +0 -0
  2. secator/celery.py +482 -0
  3. secator/cli.py +617 -0
  4. secator/config.py +137 -0
  5. secator/configs/__init__.py +0 -0
  6. secator/configs/profiles/__init__.py +0 -0
  7. secator/configs/profiles/aggressive.yaml +7 -0
  8. secator/configs/profiles/default.yaml +9 -0
  9. secator/configs/profiles/stealth.yaml +7 -0
  10. secator/configs/scans/__init__.py +0 -0
  11. secator/configs/scans/domain.yaml +18 -0
  12. secator/configs/scans/host.yaml +14 -0
  13. secator/configs/scans/network.yaml +17 -0
  14. secator/configs/scans/subdomain.yaml +8 -0
  15. secator/configs/scans/url.yaml +12 -0
  16. secator/configs/workflows/__init__.py +0 -0
  17. secator/configs/workflows/cidr_recon.yaml +28 -0
  18. secator/configs/workflows/code_scan.yaml +11 -0
  19. secator/configs/workflows/host_recon.yaml +41 -0
  20. secator/configs/workflows/port_scan.yaml +34 -0
  21. secator/configs/workflows/subdomain_recon.yaml +33 -0
  22. secator/configs/workflows/url_crawl.yaml +29 -0
  23. secator/configs/workflows/url_dirsearch.yaml +29 -0
  24. secator/configs/workflows/url_fuzz.yaml +35 -0
  25. secator/configs/workflows/url_nuclei.yaml +11 -0
  26. secator/configs/workflows/url_vuln.yaml +55 -0
  27. secator/configs/workflows/user_hunt.yaml +10 -0
  28. secator/configs/workflows/wordpress.yaml +14 -0
  29. secator/decorators.py +309 -0
  30. secator/definitions.py +165 -0
  31. secator/exporters/__init__.py +12 -0
  32. secator/exporters/_base.py +3 -0
  33. secator/exporters/csv.py +30 -0
  34. secator/exporters/gdrive.py +118 -0
  35. secator/exporters/json.py +15 -0
  36. secator/exporters/table.py +7 -0
  37. secator/exporters/txt.py +25 -0
  38. secator/hooks/__init__.py +0 -0
  39. secator/hooks/mongodb.py +212 -0
  40. secator/output_types/__init__.py +24 -0
  41. secator/output_types/_base.py +95 -0
  42. secator/output_types/exploit.py +50 -0
  43. secator/output_types/ip.py +33 -0
  44. secator/output_types/port.py +45 -0
  45. secator/output_types/progress.py +35 -0
  46. secator/output_types/record.py +34 -0
  47. secator/output_types/subdomain.py +42 -0
  48. secator/output_types/tag.py +46 -0
  49. secator/output_types/target.py +30 -0
  50. secator/output_types/url.py +76 -0
  51. secator/output_types/user_account.py +41 -0
  52. secator/output_types/vulnerability.py +97 -0
  53. secator/report.py +107 -0
  54. secator/rich.py +124 -0
  55. secator/runners/__init__.py +12 -0
  56. secator/runners/_base.py +833 -0
  57. secator/runners/_helpers.py +153 -0
  58. secator/runners/command.py +638 -0
  59. secator/runners/scan.py +65 -0
  60. secator/runners/task.py +106 -0
  61. secator/runners/workflow.py +135 -0
  62. secator/serializers/__init__.py +8 -0
  63. secator/serializers/dataclass.py +33 -0
  64. secator/serializers/json.py +15 -0
  65. secator/serializers/regex.py +17 -0
  66. secator/tasks/__init__.py +10 -0
  67. secator/tasks/_categories.py +304 -0
  68. secator/tasks/cariddi.py +102 -0
  69. secator/tasks/dalfox.py +65 -0
  70. secator/tasks/dirsearch.py +90 -0
  71. secator/tasks/dnsx.py +56 -0
  72. secator/tasks/dnsxbrute.py +34 -0
  73. secator/tasks/feroxbuster.py +91 -0
  74. secator/tasks/ffuf.py +86 -0
  75. secator/tasks/fping.py +44 -0
  76. secator/tasks/gau.py +47 -0
  77. secator/tasks/gf.py +33 -0
  78. secator/tasks/gospider.py +71 -0
  79. secator/tasks/grype.py +79 -0
  80. secator/tasks/h8mail.py +81 -0
  81. secator/tasks/httpx.py +99 -0
  82. secator/tasks/katana.py +133 -0
  83. secator/tasks/maigret.py +78 -0
  84. secator/tasks/mapcidr.py +32 -0
  85. secator/tasks/msfconsole.py +174 -0
  86. secator/tasks/naabu.py +52 -0
  87. secator/tasks/nmap.py +344 -0
  88. secator/tasks/nuclei.py +97 -0
  89. secator/tasks/searchsploit.py +52 -0
  90. secator/tasks/subfinder.py +40 -0
  91. secator/tasks/wpscan.py +179 -0
  92. secator/utils.py +445 -0
  93. secator/utils_test.py +183 -0
  94. secator-0.0.1.dist-info/LICENSE +60 -0
  95. secator-0.0.1.dist-info/METADATA +199 -0
  96. secator-0.0.1.dist-info/RECORD +114 -0
  97. secator-0.0.1.dist-info/WHEEL +5 -0
  98. secator-0.0.1.dist-info/entry_points.txt +2 -0
  99. secator-0.0.1.dist-info/top_level.txt +2 -0
  100. tests/__init__.py +0 -0
  101. tests/integration/__init__.py +0 -0
  102. tests/integration/inputs.py +42 -0
  103. tests/integration/outputs.py +392 -0
  104. tests/integration/test_scans.py +82 -0
  105. tests/integration/test_tasks.py +103 -0
  106. tests/integration/test_workflows.py +163 -0
  107. tests/performance/__init__.py +0 -0
  108. tests/performance/loadtester.py +56 -0
  109. tests/unit/__init__.py +0 -0
  110. tests/unit/test_celery.py +39 -0
  111. tests/unit/test_scans.py +0 -0
  112. tests/unit/test_serializers.py +51 -0
  113. tests/unit/test_tasks.py +348 -0
  114. tests/unit/test_workflows.py +96 -0
@@ -0,0 +1,106 @@
1
+ from secator.definitions import DEBUG
2
+ from secator.output_types import Target
3
+ from secator.runners import Runner
4
+ from secator.utils import discover_tasks
5
+
6
+
7
+ class Task(Runner):
8
+ default_exporters = []
9
+ enable_hooks = False
10
+
11
+ def delay(cls, *args, **kwargs):
12
+ from secator.celery import run_task
13
+ return run_task.apply_async(kwargs={'args': args, 'kwargs': kwargs}, queue='celery')
14
+
15
+ def yielder(self):
16
+ """Run task.
17
+
18
+ Args:
19
+ sync (bool): Run in sync mode (main thread). If False, run in Celery worker in distributed mode.
20
+
21
+ Returns:
22
+ list: List of results.
23
+ """
24
+ # Get task class
25
+ task_cls = Task.get_task_class(self.config.name)
26
+
27
+ # Task opts
28
+ run_opts = self.run_opts.copy()
29
+ run_opts.pop('output', None)
30
+ dry_run = run_opts.get('show', False)
31
+ if dry_run:
32
+ self.print_item_count = False
33
+
34
+ # Fmt opts
35
+ fmt_opts = {
36
+ 'json': run_opts.get('json', False),
37
+ 'print_cmd': True,
38
+ 'print_cmd_prefix': not self.sync,
39
+ 'print_input_file': DEBUG > 0,
40
+ 'print_item': True,
41
+ 'print_item_count': not self.sync and not dry_run,
42
+ 'print_line': not self.output_quiet,
43
+ }
44
+ # self.print_item = not self.sync # enable print_item for base Task only if running remote
45
+ run_opts.update(fmt_opts)
46
+
47
+ # Set task output types
48
+ self.output_types = task_cls.output_types
49
+
50
+ # Get hooks
51
+ hooks = {task_cls: self.hooks}
52
+ run_opts['hooks'] = hooks
53
+ run_opts['context'] = self.context
54
+
55
+ # Run task
56
+ if self.sync:
57
+ task = task_cls(self.targets, **run_opts)
58
+ if dry_run: # don't run
59
+ return
60
+ else:
61
+ result = task_cls.delay(self.targets, **run_opts)
62
+ task = self.process_live_tasks(
63
+ result,
64
+ description=False,
65
+ results_only=True,
66
+ print_remote_status=self.print_remote_status)
67
+
68
+ # Yield task results
69
+ yield from task
70
+
71
+ # Yield targets
72
+ for target in self.targets:
73
+ yield Target(name=target, _source=self.config.name, _type='target', _context=self.context)
74
+
75
+ @staticmethod
76
+ def get_task_class(name):
77
+ """Get task class from a name.
78
+
79
+ Args:
80
+ name (str): Task name.
81
+ """
82
+ if '/' in name:
83
+ name = name.split('/')[0]
84
+ tasks_classes = discover_tasks()
85
+ for task_cls in tasks_classes:
86
+ if task_cls.__name__ == name:
87
+ return task_cls
88
+ raise ValueError(f'Task {name} not found. Aborting.')
89
+
90
+ @staticmethod
91
+ def get_tasks_from_conf(config):
92
+ """Get task names from config. Ignore hierarchy and keywords.
93
+
94
+ TODO: Add hierarchy tree / add make flow diagrams.
95
+ """
96
+ tasks = []
97
+ for name, opts in config.items():
98
+ if name == '_group':
99
+ tasks.extend(Task.get_tasks_from_conf(opts))
100
+ elif name == '_chain':
101
+ tasks.extend(Task.get_tasks_from_conf(opts))
102
+ else:
103
+ if '/' in name:
104
+ name = name.split('/')[0]
105
+ tasks.append(name)
106
+ return tasks
@@ -0,0 +1,135 @@
1
+ from celery import chain, chord
2
+
3
+ from secator.definitions import DEBUG
4
+ from secator.exporters import CsvExporter, JsonExporter
5
+ from secator.output_types import Target
6
+ from secator.runners._base import Runner
7
+ from secator.runners.task import Task
8
+ from secator.utils import merge_opts
9
+
10
+
11
+ class Workflow(Runner):
12
+
13
+ default_exporters = [
14
+ JsonExporter,
15
+ CsvExporter
16
+ ]
17
+
18
+ @classmethod
19
+ def delay(cls, *args, **kwargs):
20
+ from secator.celery import run_workflow
21
+ return run_workflow.delay(args=args, kwargs=kwargs)
22
+
23
+ def yielder(self):
24
+ """Run workflow.
25
+
26
+ Args:
27
+ sync (bool): Run in sync mode (main thread). If False, run in Celery worker in distributed mode.
28
+
29
+ Returns:
30
+ list: List of results.
31
+ """
32
+ # Yield targets
33
+ for target in self.targets:
34
+ yield Target(name=target, _source=self.config.name, _type='target', _context=self.context)
35
+
36
+ # Task fmt opts
37
+ run_opts = self.run_opts.copy()
38
+ fmt_opts = {
39
+ 'json': run_opts.get('json', False),
40
+ 'print_cmd': True,
41
+ 'print_cmd_prefix': not self.sync,
42
+ 'print_description': self.sync,
43
+ 'print_input_file': DEBUG,
44
+ 'print_item': True,
45
+ 'print_item_count': True,
46
+ 'print_line': not self.sync,
47
+ }
48
+
49
+ # Construct run opts
50
+ run_opts['hooks'] = self._hooks.get(Task, {})
51
+ run_opts.update(fmt_opts)
52
+
53
+ # Build Celery workflow
54
+ workflow = self.build_celery_workflow(run_opts=run_opts, results=self.results)
55
+
56
+ # Run Celery workflow and get results
57
+ if self.sync:
58
+ results = workflow.apply().get()
59
+ else:
60
+ result = workflow()
61
+ self.result = result
62
+ results = self.process_live_tasks(result, results_only=True, print_remote_status=self.print_remote_status)
63
+
64
+ # Get workflow results
65
+ yield from results
66
+
67
+ def build_celery_workflow(self, run_opts={}, results=[]):
68
+ """"Build Celery workflow.
69
+
70
+ Returns:
71
+ celery.chain: Celery task chain.
72
+ """
73
+ from secator.celery import forward_results
74
+ sigs = self.get_tasks(
75
+ self.config.tasks.toDict(),
76
+ self.targets,
77
+ self.config.options,
78
+ run_opts)
79
+ sigs = [forward_results.si(results).set(queue='io')] + sigs + [forward_results.s().set(queue='io')]
80
+ workflow = chain(*sigs)
81
+ return workflow
82
+
83
+ def get_tasks(self, obj, targets, workflow_opts, run_opts):
84
+ """Get tasks recursively as Celery chains / chords.
85
+
86
+ Args:
87
+ obj (secator.config.ConfigLoader): Config.
88
+ targets (list): List of targets.
89
+ workflow_opts (dict): Workflow options.
90
+ run_opts (dict): Run options.
91
+ sync (bool): Synchronous mode (chain of tasks, no chords).
92
+
93
+ Returns:
94
+ list: List of signatures.
95
+ """
96
+ from secator.celery import forward_results
97
+ sigs = []
98
+ for task_name, task_opts in obj.items():
99
+ # Task opts can be None
100
+ task_opts = task_opts or {}
101
+
102
+ # If it's a group, process the sublevel tasks as a Celery chord.
103
+ if task_name == '_group':
104
+ tasks = self.get_tasks(
105
+ task_opts,
106
+ targets,
107
+ workflow_opts,
108
+ run_opts
109
+ )
110
+ sig = chord((tasks), forward_results.s().set(queue='io'))
111
+ elif task_name == '_chain':
112
+ tasks = self.get_tasks(
113
+ task_opts,
114
+ targets,
115
+ workflow_opts,
116
+ run_opts
117
+ )
118
+ sig = chain(*tasks)
119
+ else:
120
+ # Get task class
121
+ task = Task.get_task_class(task_name)
122
+
123
+ # Merge task options (order of priority with overrides)
124
+ opts = merge_opts(workflow_opts, task_opts, run_opts)
125
+
126
+ # Add task context and hooks to options
127
+ opts['hooks'] = {task: self._hooks.get(Task, {})}
128
+ opts['context'] = self.context.copy()
129
+ opts['name'] = task_name
130
+
131
+ # Create task signature
132
+ sig = task.s(targets, **opts).set(queue=task.profile)
133
+ self.output_types.extend(task.output_types)
134
+ sigs.append(sig)
135
+ return sigs
@@ -0,0 +1,8 @@
1
+ __all__ = [
2
+ 'JSONSerializer',
3
+ 'RegexSerializer',
4
+ 'DataclassEncoder',
5
+ ]
6
+ from secator.serializers.json import JSONSerializer
7
+ from secator.serializers.regex import RegexSerializer
8
+ from secator.serializers.dataclass import DataclassEncoder
@@ -0,0 +1,33 @@
1
+ import json
2
+ from secator.output_types import OUTPUT_TYPES
3
+
4
+
5
+ class DataclassEncoder(json.JSONEncoder):
6
+ def default(self, obj):
7
+ if hasattr(obj, 'toDict'):
8
+ return obj.toDict()
9
+ else:
10
+ return json.JSONEncoder.default(self, obj)
11
+
12
+
13
+ def get_output_cls(type):
14
+ try:
15
+ return [cls for cls in OUTPUT_TYPES if cls.get_name() == type][0]
16
+ except IndexError:
17
+ return None
18
+
19
+
20
+ def dataclass_decoder(obj):
21
+ if '_type' in obj:
22
+ output_cls = get_output_cls(obj['_type'])
23
+ if output_cls:
24
+ return output_cls.load(obj)
25
+ return obj
26
+
27
+
28
+ def dumps_dataclass(obj, indent=None):
29
+ return json.dumps(obj, cls=DataclassEncoder, indent=indent)
30
+
31
+
32
+ def loads_dataclass(obj):
33
+ return json.loads(obj, object_hook=dataclass_decoder)
@@ -0,0 +1,15 @@
1
+ import yaml
2
+
3
+
4
+ class JSONSerializer:
5
+
6
+ def run(self, line):
7
+ start_index = line.find('{')
8
+ end_index = line.rfind('}')
9
+ if start_index == -1 or end_index == -1:
10
+ return None
11
+ try:
12
+ json_obj = line[start_index:end_index+1]
13
+ return yaml.safe_load(json_obj)
14
+ except yaml.YAMLError:
15
+ return None
@@ -0,0 +1,17 @@
1
+ import re
2
+
3
+
4
+ class RegexSerializer:
5
+
6
+ def __init__(self, regex, fields=[]):
7
+ self.regex = re.compile(regex)
8
+ self.fields = fields
9
+
10
+ def run(self, line):
11
+ match = self.regex.match(line)
12
+ output = {}
13
+ if not match:
14
+ return None
15
+ for field in self.fields:
16
+ output[field] = match.group(field)
17
+ return output
@@ -0,0 +1,10 @@
1
+ from secator.utils import discover_internal_tasks, discover_external_tasks
2
+ INTERNAL_TASKS = discover_internal_tasks()
3
+ EXTERNAL_TASKS = discover_external_tasks()
4
+ ALL_TASKS = INTERNAL_TASKS + EXTERNAL_TASKS
5
+ __all__ = [
6
+ cls.__name__
7
+ for cls in ALL_TASKS
8
+ ]
9
+ for cls in INTERNAL_TASKS:
10
+ exec(f'from .{cls.__name__} import {cls.__name__}')
@@ -0,0 +1,304 @@
1
+ import json
2
+ import logging
3
+ import os
4
+
5
+ import requests
6
+ from bs4 import BeautifulSoup
7
+ from cpe import CPE
8
+
9
+ from secator.definitions import (CIDR_RANGE, CONFIDENCE, CVSS_SCORE,
10
+ DEFAULT_HTTP_WORDLIST, DELAY, DEPTH, DESCRIPTION,
11
+ FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
12
+ FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID,
13
+ MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
14
+ MATCH_WORDS, METHOD, NAME, PATH, PROVIDER,
15
+ PROXY, RATE_LIMIT, REFERENCES, RETRIES,
16
+ SEVERITY, TAGS, DATA_FOLDER, THREADS, TIMEOUT,
17
+ URL, USER_AGENT, USERNAME, WORDLIST)
18
+ from secator.output_types import (Ip, Port, Subdomain, Tag, Url, UserAccount,
19
+ Vulnerability)
20
+ from secator.runners import Command
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+ OPTS = {
25
+ HEADER: {'type': str, 'help': 'Custom header to add to each request in the form "KEY1:VALUE1; KEY2:VALUE2"'},
26
+ DELAY: {'type': float, 'short': 'd', 'help': 'Delay to add between each requests'},
27
+ DEPTH: {'type': int, 'help': 'Scan depth', 'default': 2},
28
+ FILTER_CODES: {'type': str, 'short': 'fc', 'help': 'Filter out responses with HTTP codes'},
29
+ FILTER_REGEX: {'type': str, 'short': 'fr', 'help': 'Filter out responses with regular expression'},
30
+ FILTER_SIZE: {'type': str, 'short': 'fs', 'help': 'Filter out responses with size'},
31
+ FILTER_WORDS: {'type': str, 'short': 'fw', 'help': 'Filter out responses with word count'},
32
+ FOLLOW_REDIRECT: {'is_flag': True, 'short': 'frd', 'help': 'Follow HTTP redirects'},
33
+ MATCH_CODES: {'type': str, 'short': 'mc', 'help': 'Match HTTP status codes e.g "201,300,301"'},
34
+ MATCH_REGEX: {'type': str, 'short': 'mr', 'help': 'Match responses with regular expression'},
35
+ MATCH_SIZE: {'type': str, 'short': 'ms', 'help': 'Match respones with size'},
36
+ MATCH_WORDS: {'type': str, 'short': 'mw', 'help': 'Match responses with word count'},
37
+ METHOD: {'type': str, 'help': 'HTTP method to use for requests'},
38
+ PROXY: {'type': str, 'help': 'HTTP(s) / SOCKS5 proxy'},
39
+ RATE_LIMIT: {'type': int, 'short': 'rl', 'help': 'Rate limit, i.e max number of requests per second'},
40
+ RETRIES: {'type': int, 'help': 'Retries'},
41
+ THREADS: {'type': int, 'help': 'Number of threads to run', 'default': 50},
42
+ TIMEOUT: {'type': int, 'help': 'Request timeout'},
43
+ USER_AGENT: {'type': str, 'short': 'ua', 'help': 'User agent, e.g "Mozilla Firefox 1.0"'},
44
+ WORDLIST: {'type': str, 'short': 'w', 'default': DEFAULT_HTTP_WORDLIST, 'help': 'Wordlist to use'}
45
+ }
46
+
47
+ OPTS_HTTP = [
48
+ HEADER, DELAY, FOLLOW_REDIRECT, METHOD, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, USER_AGENT
49
+ ]
50
+
51
+ OPTS_HTTP_CRAWLERS = OPTS_HTTP + [
52
+ DEPTH, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, FILTER_REGEX, FILTER_CODES, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
53
+ MATCH_CODES
54
+ ]
55
+
56
+ OPTS_HTTP_FUZZERS = OPTS_HTTP_CRAWLERS + [WORDLIST]
57
+
58
+ OPTS_RECON = [
59
+ DELAY, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT
60
+ ]
61
+
62
+ OPTS_VULN = [
63
+ HEADER, DELAY, FOLLOW_REDIRECT, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT, USER_AGENT
64
+ ]
65
+
66
+ OPTS_OSINT = [
67
+
68
+ ]
69
+
70
+
71
+ #---------------#
72
+ # HTTP category #
73
+ #---------------#
74
+
75
+ class Http(Command):
76
+ meta_opts = {k: OPTS[k] for k in OPTS_HTTP_CRAWLERS}
77
+ input_type = URL
78
+ output_types = [Url]
79
+
80
+
81
+ class HttpCrawler(Command):
82
+ meta_opts = {k: OPTS[k] for k in OPTS_HTTP_CRAWLERS}
83
+ input_type = URL
84
+ output_types = [Url]
85
+
86
+
87
+ class HttpFuzzer(Command):
88
+ meta_opts = {k: OPTS[k] for k in OPTS_HTTP_FUZZERS}
89
+ input_type = URL
90
+ output_types = [Url]
91
+
92
+
93
+ #----------------#
94
+ # Recon category #
95
+ #----------------#
96
+
97
+ class Recon(Command):
98
+ meta_opts = {k: OPTS[k] for k in OPTS_RECON}
99
+ output_types = [Subdomain, UserAccount, Ip, Port]
100
+
101
+
102
+ class ReconDns(Recon):
103
+ input_type = HOST
104
+ output_types = [Subdomain]
105
+
106
+
107
+ class ReconUser(Recon):
108
+ input_type = USERNAME
109
+ output_types = [UserAccount]
110
+
111
+
112
+ class ReconIp(Recon):
113
+ input_type = CIDR_RANGE
114
+ output_types = [Ip]
115
+
116
+
117
+ class ReconPort(Recon):
118
+ input_type = HOST
119
+ output_types = [Port]
120
+
121
+
122
+ #---------------#
123
+ # Vuln category #
124
+ #---------------#
125
+
126
+ class Vuln(Command):
127
+ meta_opts = {k: OPTS[k] for k in OPTS_VULN}
128
+ output_types = [Vulnerability]
129
+
130
+ @staticmethod
131
+ def lookup_local_cve(cve_id):
132
+ cve_path = f'{DATA_FOLDER}/cves/{cve_id}.json'
133
+ if os.path.exists(cve_path):
134
+ with open(cve_path, 'r') as f:
135
+ return json.load(f)
136
+ return None
137
+
138
+ # @staticmethod
139
+ # def lookup_exploitdb(exploit_id):
140
+ # print('looking up exploit')
141
+ # try:
142
+ # cve_info = requests.get(f'https://exploit-db.com/exploits/{exploit_id}', timeout=5).content
143
+ # print(cve_info)
144
+ # except Exception:
145
+ # logger.error(f'Could not fetch exploit info for exploit {exploit_id}. Skipping.')
146
+ # return None
147
+ # return cve_info
148
+
149
+ @staticmethod
150
+ def lookup_cve(cve_id, cpes=[]):
151
+ """Search for a CVE in local db or using cve.circl.lu and return vulnerability data.
152
+
153
+ Args:
154
+ cve_id (str): CVE ID in the form CVE-*
155
+ cpes (str, Optional): CPEs to match for.
156
+
157
+ Returns:
158
+ dict: vulnerability data.
159
+ """
160
+ cve_info = Vuln.lookup_local_cve(cve_id)
161
+ if not cve_info:
162
+ # logger.debug(f'{cve_id} not found locally. Use `secator utils download-cves` to update the local database.')
163
+ try:
164
+ cve_info = requests.get(f'https://cve.circl.lu/api/cve/{cve_id}', timeout=5).json()
165
+ if not cve_info:
166
+ logger.error(f'Could not fetch CVE info for cve {cve_id}. Skipping.')
167
+ return
168
+ except Exception:
169
+ logger.error(f'Could not fetch CVE info for cve {cve_id}. Skipping.')
170
+ return None
171
+
172
+ # Match the CPE string against the affected products CPE FS strings from the CVE data if a CPE was passed.
173
+ # This allow to limit the number of False positives (high) that we get from nmap NSE vuln scripts like vulscan
174
+ # and ensure we keep only right matches.
175
+ # The check is not executed if no CPE was passed (sometimes nmap cannot properly detect a CPE) or if the CPE
176
+ # version cannot be determined.
177
+ cpe_match = False
178
+ tags = []
179
+ if cpes:
180
+ for cpe in cpes:
181
+ cpe_obj = CPE(cpe)
182
+ cpe_fs = cpe_obj.as_fs()
183
+ # cpe_version = cpe_obj.get_version()[0]
184
+ vulnerable_fs = cve_info['vulnerable_product']
185
+ # logger.debug(f'Matching CPE {cpe} against {len(vulnerable_fs)} vulnerable products for {cve_id}')
186
+ for fs in vulnerable_fs:
187
+ if fs == cpe_fs:
188
+ # logger.debug(f'Found matching CPE FS {cpe_fs} ! The CPE is vulnerable to CVE {cve_id}')
189
+ cpe_match = True
190
+ tags.append('cpe-match')
191
+ if not cpe_match:
192
+ return None
193
+
194
+ # Parse CVE id and CVSS
195
+ name = id = cve_info['id']
196
+ cvss = cve_info.get('cvss') or 0
197
+ # exploit_ids = cve_info.get('refmap', {}).get('exploit-db', [])
198
+ # osvdb_ids = cve_info.get('refmap', {}).get('osvdb', [])
199
+
200
+ # Get description
201
+ description = cve_info.get('summary')
202
+ if description is not None:
203
+ description = description.replace(id, '').strip()
204
+
205
+ # Get references
206
+ references = cve_info.get(REFERENCES, [])
207
+ cve_ref_url = f'https://cve.circl.lu/cve/{id}'
208
+ references.append(cve_ref_url)
209
+
210
+ # Get CWE ID
211
+ vuln_cwe_id = cve_info.get('cwe')
212
+ if vuln_cwe_id is None:
213
+ tags.append(vuln_cwe_id)
214
+
215
+ # Parse capecs for a better vuln name / type
216
+ capecs = cve_info.get('capec', [])
217
+ if capecs and len(capecs) > 0:
218
+ name = capecs[0]['name']
219
+
220
+ # Parse ovals for a better vuln name / type
221
+ ovals = cve_info.get('oval', [])
222
+ if ovals:
223
+ if description == 'none':
224
+ description = ovals[0]['title']
225
+ family = ovals[0]['family']
226
+ tags.append(family)
227
+
228
+ # Set vulnerability severity based on CVSS score
229
+ severity = None
230
+ if cvss:
231
+ if cvss < 4:
232
+ severity = 'low'
233
+ elif cvss < 7:
234
+ severity = 'medium'
235
+ elif cvss < 9:
236
+ severity = 'high'
237
+ else:
238
+ severity = 'critical'
239
+
240
+ # Set confidence
241
+ confidence = 'low' if not cpe_match else 'high'
242
+ vuln = {
243
+ ID: id,
244
+ NAME: name,
245
+ PROVIDER: 'cve.circl.lu',
246
+ SEVERITY: severity,
247
+ CVSS_SCORE: cvss,
248
+ TAGS: tags,
249
+ REFERENCES: [f'https://cve.circl.lu/cve/{id}'] + references,
250
+ DESCRIPTION: description,
251
+ CONFIDENCE: confidence
252
+ }
253
+ return vuln
254
+
255
+ @staticmethod
256
+ def lookup_ghsa(ghsa_id):
257
+ """Search for a GHSA on Github and and return associated CVE vulnerability data.
258
+
259
+ Args:
260
+ ghsa (str): CVE ID in the form GHSA-*
261
+
262
+ Returns:
263
+ dict: vulnerability data.
264
+ """
265
+ reference = f'https://github.com/advisories/{ghsa_id}'
266
+ response = requests.get(reference)
267
+ soup = BeautifulSoup(response.text, 'lxml')
268
+ sidebar_items = soup.find_all('div', {'class': 'discussion-sidebar-item'})
269
+ cve_id = sidebar_items[2].find('div').text.strip()
270
+ data = Vuln.lookup_cve(cve_id)
271
+ if data:
272
+ data[TAGS].append('ghsa')
273
+ return data
274
+ return None
275
+
276
+
277
+ class VulnHttp(Vuln):
278
+ input_type = HOST
279
+
280
+
281
+ class VulnCode(Vuln):
282
+ input_type = PATH
283
+
284
+
285
+ class VulnMulti(Vuln):
286
+ input_type = HOST
287
+ output_types = [Vulnerability]
288
+
289
+
290
+ #--------------#
291
+ # Tag category #
292
+ #--------------#
293
+
294
+ class Tagger(Command):
295
+ input_type = URL
296
+ output_types = [Tag]
297
+
298
+ #----------------#
299
+ # osint category #
300
+ #----------------#
301
+
302
+
303
+ class OSInt(Command):
304
+ output_types = [UserAccount]