secator 0.15.1__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (106) hide show
  1. secator/celery.py +40 -24
  2. secator/celery_signals.py +71 -68
  3. secator/celery_utils.py +43 -27
  4. secator/cli.py +520 -280
  5. secator/cli_helper.py +394 -0
  6. secator/click.py +87 -0
  7. secator/config.py +67 -39
  8. secator/configs/profiles/http_headless.yaml +6 -0
  9. secator/configs/profiles/http_record.yaml +6 -0
  10. secator/configs/profiles/tor.yaml +1 -1
  11. secator/configs/scans/domain.yaml +4 -2
  12. secator/configs/scans/host.yaml +1 -1
  13. secator/configs/scans/network.yaml +1 -4
  14. secator/configs/scans/subdomain.yaml +13 -1
  15. secator/configs/scans/url.yaml +1 -2
  16. secator/configs/workflows/cidr_recon.yaml +6 -4
  17. secator/configs/workflows/code_scan.yaml +1 -1
  18. secator/configs/workflows/host_recon.yaml +29 -3
  19. secator/configs/workflows/subdomain_recon.yaml +67 -16
  20. secator/configs/workflows/url_crawl.yaml +44 -15
  21. secator/configs/workflows/url_dirsearch.yaml +4 -4
  22. secator/configs/workflows/url_fuzz.yaml +25 -17
  23. secator/configs/workflows/url_params_fuzz.yaml +7 -0
  24. secator/configs/workflows/url_vuln.yaml +33 -8
  25. secator/configs/workflows/user_hunt.yaml +2 -1
  26. secator/configs/workflows/wordpress.yaml +5 -3
  27. secator/cve.py +718 -0
  28. secator/decorators.py +0 -454
  29. secator/definitions.py +49 -30
  30. secator/exporters/_base.py +2 -2
  31. secator/exporters/console.py +2 -2
  32. secator/exporters/table.py +4 -3
  33. secator/exporters/txt.py +1 -1
  34. secator/hooks/mongodb.py +2 -4
  35. secator/installer.py +77 -49
  36. secator/loader.py +116 -0
  37. secator/output_types/_base.py +3 -0
  38. secator/output_types/certificate.py +63 -63
  39. secator/output_types/error.py +4 -5
  40. secator/output_types/info.py +2 -2
  41. secator/output_types/ip.py +3 -1
  42. secator/output_types/progress.py +5 -9
  43. secator/output_types/state.py +17 -17
  44. secator/output_types/tag.py +3 -0
  45. secator/output_types/target.py +10 -2
  46. secator/output_types/url.py +19 -7
  47. secator/output_types/vulnerability.py +11 -7
  48. secator/output_types/warning.py +2 -2
  49. secator/report.py +27 -15
  50. secator/rich.py +18 -10
  51. secator/runners/_base.py +446 -233
  52. secator/runners/_helpers.py +133 -24
  53. secator/runners/command.py +182 -102
  54. secator/runners/scan.py +33 -5
  55. secator/runners/task.py +13 -7
  56. secator/runners/workflow.py +105 -72
  57. secator/scans/__init__.py +2 -2
  58. secator/serializers/dataclass.py +20 -20
  59. secator/tasks/__init__.py +4 -4
  60. secator/tasks/_categories.py +39 -27
  61. secator/tasks/arjun.py +9 -5
  62. secator/tasks/bbot.py +53 -21
  63. secator/tasks/bup.py +19 -5
  64. secator/tasks/cariddi.py +24 -3
  65. secator/tasks/dalfox.py +26 -7
  66. secator/tasks/dirsearch.py +10 -4
  67. secator/tasks/dnsx.py +70 -25
  68. secator/tasks/feroxbuster.py +11 -3
  69. secator/tasks/ffuf.py +42 -6
  70. secator/tasks/fping.py +20 -8
  71. secator/tasks/gau.py +3 -1
  72. secator/tasks/gf.py +3 -3
  73. secator/tasks/gitleaks.py +2 -2
  74. secator/tasks/gospider.py +7 -1
  75. secator/tasks/grype.py +5 -4
  76. secator/tasks/h8mail.py +2 -1
  77. secator/tasks/httpx.py +18 -5
  78. secator/tasks/katana.py +35 -15
  79. secator/tasks/maigret.py +4 -4
  80. secator/tasks/mapcidr.py +3 -3
  81. secator/tasks/msfconsole.py +4 -4
  82. secator/tasks/naabu.py +2 -2
  83. secator/tasks/nmap.py +12 -14
  84. secator/tasks/nuclei.py +3 -3
  85. secator/tasks/searchsploit.py +4 -5
  86. secator/tasks/subfinder.py +2 -2
  87. secator/tasks/testssl.py +264 -263
  88. secator/tasks/trivy.py +5 -5
  89. secator/tasks/wafw00f.py +21 -3
  90. secator/tasks/wpprobe.py +90 -83
  91. secator/tasks/wpscan.py +6 -5
  92. secator/template.py +218 -104
  93. secator/thread.py +15 -15
  94. secator/tree.py +196 -0
  95. secator/utils.py +131 -123
  96. secator/utils_test.py +60 -19
  97. secator/workflows/__init__.py +2 -2
  98. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/METADATA +36 -36
  99. secator-0.16.0.dist-info/RECORD +132 -0
  100. secator/configs/profiles/default.yaml +0 -8
  101. secator/configs/workflows/url_nuclei.yaml +0 -11
  102. secator/tasks/dnsxbrute.py +0 -42
  103. secator-0.15.1.dist-info/RECORD +0 -128
  104. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/WHEEL +0 -0
  105. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/entry_points.txt +0 -0
  106. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/licenses/LICENSE +0 -0
secator/runners/task.py CHANGED
@@ -1,7 +1,6 @@
1
- import uuid
2
1
  from secator.config import CONFIG
3
2
  from secator.runners import Runner
4
- from secator.utils import discover_tasks
3
+ from secator.loader import discover_tasks
5
4
  from celery import chain
6
5
 
7
6
 
@@ -47,16 +46,23 @@ class Task(Runner):
47
46
  opts['hooks'] = hooks
48
47
  opts['context'] = self.context.copy()
49
48
  opts['reports_folder'] = str(self.reports_folder)
50
- opts['enable_reports'] = False # Task will handle reports
51
- opts['enable_duplicate_check'] = False # Task will handle duplicate check
49
+
50
+ # Task class will handle those
51
+ opts['enable_reports'] = False
52
+ opts['enable_profiles'] = False
53
+ opts['enable_duplicate_check'] = False
54
+ opts['print_start'] = False
55
+ opts['print_end'] = False
56
+ opts['print_target'] = False
52
57
  opts['has_parent'] = False
53
58
  opts['skip_if_no_inputs'] = False
54
59
  opts['caller'] = 'Task'
55
60
 
56
61
  # Create task signature
57
- task_id = str(uuid.uuid4())
58
- sig = run_command.si(self.results, self.config.name, self.inputs, opts).set(queue=task_cls.profile, task_id=task_id)
59
- self.add_subtask(task_id, self.config.name, self.config.description or '')
62
+ profile = task_cls.profile(opts) if callable(task_cls.profile) else task_cls.profile
63
+ sig = run_command.si(self.results, self.config.name, self.inputs, opts).set(queue=profile)
64
+ task_id = sig.freeze().task_id
65
+ self.add_subtask(task_id, self.config.name, self.description)
60
66
  return chain(sig)
61
67
 
62
68
  @staticmethod
@@ -1,8 +1,10 @@
1
- import uuid
1
+ from dotmap import DotMap
2
2
 
3
3
  from secator.config import CONFIG
4
+ from secator.output_types import Info
4
5
  from secator.runners._base import Runner
5
6
  from secator.runners.task import Task
7
+ from secator.tree import build_runner_tree, walk_runner_tree
6
8
  from secator.utils import merge_opts
7
9
 
8
10
 
@@ -30,16 +32,18 @@ class Workflow(Runner):
30
32
  celery.Signature: Celery task signature.
31
33
  """
32
34
  from celery import chain
33
- from secator.celery import mark_runner_started, mark_runner_completed
35
+ from secator.celery import mark_runner_started, mark_runner_completed, forward_results
34
36
 
35
37
  # Prepare run options
36
38
  opts = self.run_opts.copy()
37
39
  opts.pop('output', None)
38
40
  opts.pop('no_poll', False)
41
+ opts.pop('print_profiles', False)
39
42
 
40
43
  # Set hooks and reports
41
- self.enable_reports = True # Workflow will handle reports
42
44
  self.enable_hooks = False # Celery will handle hooks
45
+ self.enable_reports = True # Workflow will handle reports
46
+ self.print_item = not self.sync
43
47
 
44
48
  # Get hooks
45
49
  hooks = self._hooks.get(Task, {})
@@ -52,84 +56,113 @@ class Workflow(Runner):
52
56
  opts['skip_if_no_inputs'] = True
53
57
  opts['caller'] = 'Workflow'
54
58
 
55
- forwarded_opts = {}
56
- if chain_previous_results:
57
- forwarded_opts = {k: v for k, v in self.run_opts.items() if k.endswith('_')}
58
-
59
- # Build task signatures
60
- sigs = self.get_tasks(
61
- self.config.tasks.toDict(),
62
- self.inputs,
63
- self.config.options,
64
- opts,
65
- forwarded_opts=forwarded_opts
66
- )
59
+ # Remove workflow config prefix from opts
60
+ for k, v in opts.copy().items():
61
+ if k.startswith(self.config.name + '_'):
62
+ opts[k.replace(self.config.name + '_', '')] = v
67
63
 
68
- start_sig = mark_runner_started.si([], self, enable_hooks=True).set(queue='results')
69
- if chain_previous_results:
70
- start_sig = mark_runner_started.s(self, enable_hooks=True).set(queue='results')
64
+ # Remove dynamic opts from parent runner
65
+ opts = {k: v for k, v in opts.items() if k not in self.dynamic_opts}
71
66
 
72
- # Build workflow chain with lifecycle management
73
- return chain(
74
- start_sig,
75
- *sigs,
76
- mark_runner_completed.s(self, enable_hooks=True).set(queue='results'),
77
- )
67
+ # Forward workflow opts to first task if needed
68
+ forwarded_opts = {}
69
+ if chain_previous_results:
70
+ forwarded_opts = self.dynamic_opts
78
71
 
79
- def get_tasks(self, config, inputs, workflow_opts, run_opts, forwarded_opts={}):
80
- """Get tasks recursively as Celery chains / chords.
72
+ # Build workflow tree
73
+ tree = build_runner_tree(self.config)
74
+ current_id = tree.root_nodes[0].id
75
+ ix = 0
76
+ sigs = []
81
77
 
82
- Args:
83
- config (dict): Tasks config dict.
84
- inputs (list): Inputs.
85
- workflow_opts (dict): Workflow options.
86
- run_opts (dict): Run options.
87
- forwarded_opts (dict): Opts forwarded from parent runner (e.g: scan).
88
- sync (bool): Synchronous mode (chain of tasks, no chords).
78
+ def process_task(node, force=False, parent_ix=None):
79
+ from celery import chain, group
80
+ from secator.utils import debug
81
+ nonlocal ix
82
+ sig = None
83
+
84
+ if node.id is None:
85
+ return
86
+
87
+ if node.type == 'task':
88
+ if node.parent.type == 'group' and not force:
89
+ return
90
+
91
+ # Skip task if condition is not met
92
+ condition = node.opts.pop('if', None)
93
+ local_ns = {'opts': DotMap(opts)}
94
+ if condition:
95
+ # debug(f'{node.id} evaluating {condition} with opts {opts}', sub=self.config.name)
96
+ result = eval(condition, {"__builtins__": {}}, local_ns)
97
+ if not result:
98
+ debug(f'{node.id} skipped task because condition is not met: {condition}', sub=self.config.name)
99
+ self.add_result(Info(message=f'Skipped task [bold gold3]{node.name}[/] because condition is not met: [bold green]{condition}[/]')) # noqa: E501
100
+ return
89
101
 
90
- Returns:
91
- tuple (List[celery.Signature], List[str]): Celery signatures, Celery task ids.
92
- """
93
- from celery import chain, group
94
- sigs = []
95
- ix = 0
96
- for task_name, task_opts in config.items():
97
- # Task opts can be None
98
- task_opts = task_opts or {}
99
-
100
- # If it's a group, process the sublevel tasks as a Celery chord.
101
- if task_name.startswith('_group'):
102
- tasks = self.get_tasks(
103
- task_opts,
104
- inputs,
105
- workflow_opts,
106
- run_opts
107
- )
108
- sig = group(*tasks)
109
- elif task_name == '_chain':
110
- tasks = self.get_tasks(
111
- task_opts,
112
- inputs,
113
- workflow_opts,
114
- run_opts
115
- )
116
- sig = chain(*tasks)
117
- else:
118
102
  # Get task class
119
- task = Task.get_task_class(task_name)
103
+ task = Task.get_task_class(node.name)
120
104
 
121
105
  # Merge task options (order of priority with overrides)
122
- opts = merge_opts(workflow_opts, task_opts, run_opts)
123
- if ix == 0 and forwarded_opts:
124
- opts.update(forwarded_opts)
125
- opts['name'] = task_name
106
+ task_opts = merge_opts(self.config.default_options.toDict(), node.opts, opts)
107
+ if (ix == 0 or parent_ix == 0) and forwarded_opts:
108
+ task_opts.update(forwarded_opts)
126
109
 
127
110
  # Create task signature
128
- task_id = str(uuid.uuid4())
129
- opts['context'] = self.context.copy()
130
- sig = task.s(inputs, **opts).set(queue=task.profile, task_id=task_id)
131
- self.add_subtask(task_id, task_name, task_opts.get('description', ''))
111
+ task_opts['name'] = node.name
112
+ task_opts['context'] = self.context.copy()
113
+ task_opts['context']['node_id'] = node.id
114
+ task_opts['context']['ancestor_id'] = None if (ix == 0 or parent_ix == 0) else current_id
115
+ task_opts['aliases'] = [node.id, node.name]
116
+ if task.__name__ != node.name:
117
+ task_opts['aliases'].append(task.__name__)
118
+ profile = task.profile(task_opts) if callable(task.profile) else task.profile
119
+ sig = task.s(self.inputs, **task_opts).set(queue=profile)
120
+ task_id = sig.freeze().task_id
121
+ debug(f'{node.id} sig built ix: {ix}, parent_ix: {parent_ix}', sub=self.config.name)
122
+ # debug(f'{node.id} opts', obj=task_opts, sub=f'workflow.{self.config.name}')
123
+ debug(f'{node.id} ancestor id: {task_opts.get("context", {}).get("ancestor_id")}', sub=self.config.name)
124
+ self.add_subtask(task_id, node.name, task_opts.get('description', ''))
132
125
  self.output_types.extend(task.output_types)
133
126
  ix += 1
134
- sigs.append(sig)
135
- return sigs
127
+
128
+ elif node.type == 'group' and node.children:
129
+ parent_ix = ix
130
+ tasks = [sig for sig in [process_task(child, force=True, parent_ix=parent_ix) for child in node.children] if sig]
131
+ debug(f'{node.id} group built with {len(tasks)} tasks', sub=self.config.name)
132
+ if len(tasks) == 1:
133
+ debug(f'{node.id} downgraded group to task', sub=self.config.name)
134
+ sig = tasks[0]
135
+ elif len(tasks) > 1:
136
+ sig = group(*tasks)
137
+ last_sig = sigs[-1] if sigs else None
138
+ if sig and isinstance(last_sig, group): # cannot chain 2 groups without bridge task
139
+ debug(f'{node.id} previous is group, adding bridge task forward_results', sub=self.config.name)
140
+ sigs.append(forward_results.s())
141
+ else:
142
+ debug(f'{node.id} group built with 0 tasks', sub=self.config.name)
143
+ ix += 1
144
+
145
+ elif node.type == 'chain' and node.children:
146
+ tasks = [sig for sig in [process_task(child, force=True, parent_ix=ix) for child in node.children] if sig]
147
+ sig = chain(*tasks) if tasks else None
148
+ debug(f'{node.id} chain built with {len(tasks)} tasks', sub=self.config.name)
149
+ ix += 1
150
+
151
+ if sig and node.parent.type != 'group':
152
+ debug(f'{node.id} added to workflow', sub=self.config.name)
153
+ sigs.append(sig)
154
+
155
+ return sig
156
+
157
+ walk_runner_tree(tree, process_task)
158
+
159
+ # Build workflow chain with lifecycle management
160
+ start_sig = mark_runner_started.si([], self, enable_hooks=True).set(queue='results')
161
+ if chain_previous_results:
162
+ start_sig = mark_runner_started.s(self, enable_hooks=True).set(queue='results')
163
+ sig = chain(
164
+ start_sig,
165
+ *sigs,
166
+ mark_runner_completed.s(self, enable_hooks=True).set(queue='results'),
167
+ )
168
+ return sig
secator/scans/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- from secator.cli import ALL_SCANS
1
+ from secator.loader import get_configs_by_type
2
2
  from secator.runners import Scan
3
3
 
4
4
 
@@ -21,7 +21,7 @@ class DynamicScan(Scan):
21
21
 
22
22
 
23
23
  DYNAMIC_SCANS = {}
24
- for scan in ALL_SCANS:
24
+ for scan in get_configs_by_type('scan'):
25
25
  instance = DynamicScan(scan)
26
26
  DYNAMIC_SCANS[scan.name] = instance
27
27
 
@@ -5,35 +5,35 @@ from secator.output_types import OUTPUT_TYPES
5
5
 
6
6
 
7
7
  class DataclassEncoder(json.JSONEncoder):
8
- def default(self, obj):
9
- if hasattr(obj, 'toDict'):
10
- return obj.toDict()
11
- elif isinstance(obj, PosixPath):
12
- return str(obj)
13
- elif isinstance(obj, (datetime, date)):
14
- return obj.isoformat()
15
- else:
16
- return json.JSONEncoder.default(self, obj)
8
+ def default(self, obj):
9
+ if hasattr(obj, 'toDict'):
10
+ return obj.toDict()
11
+ elif isinstance(obj, PosixPath):
12
+ return str(obj)
13
+ elif isinstance(obj, (datetime, date)):
14
+ return obj.isoformat()
15
+ else:
16
+ return json.JSONEncoder.default(self, obj)
17
17
 
18
18
 
19
19
  def get_output_cls(type):
20
- try:
21
- return [cls for cls in OUTPUT_TYPES if cls.get_name() == type][0]
22
- except IndexError:
23
- return None
20
+ try:
21
+ return [cls for cls in OUTPUT_TYPES if cls.get_name() == type][0]
22
+ except IndexError:
23
+ return None
24
24
 
25
25
 
26
26
  def dataclass_decoder(obj):
27
- if '_type' in obj:
28
- output_cls = get_output_cls(obj['_type'])
29
- if output_cls:
30
- return output_cls.load(obj)
31
- return obj
27
+ if '_type' in obj:
28
+ output_cls = get_output_cls(obj['_type'])
29
+ if output_cls:
30
+ return output_cls.load(obj)
31
+ return obj
32
32
 
33
33
 
34
34
  def dumps_dataclass(obj, indent=None):
35
- return json.dumps(obj, cls=DataclassEncoder, indent=indent)
35
+ return json.dumps(obj, cls=DataclassEncoder, indent=indent)
36
36
 
37
37
 
38
38
  def loads_dataclass(obj):
39
- return json.loads(obj, object_hook=dataclass_decoder)
39
+ return json.loads(obj, object_hook=dataclass_decoder)
secator/tasks/__init__.py CHANGED
@@ -1,8 +1,8 @@
1
- from secator.utils import discover_tasks
1
+ from secator.loader import discover_tasks
2
2
  TASKS = discover_tasks()
3
3
  __all__ = [
4
- cls.__name__
5
- for cls in TASKS
4
+ cls.__name__
5
+ for cls in TASKS
6
6
  ]
7
7
  for cls in TASKS:
8
- exec(f'from .{cls.__name__} import {cls.__name__}')
8
+ exec(f'from .{cls.__name__} import {cls.__name__}')
@@ -8,7 +8,7 @@ import requests
8
8
  from bs4 import BeautifulSoup
9
9
  from cpe import CPE
10
10
 
11
- from secator.definitions import (CIDR_RANGE, CVSS_SCORE, DELAY, DEPTH, DESCRIPTION, FILTER_CODES,
11
+ from secator.definitions import (CIDR_RANGE, CVSS_SCORE, DATA, DELAY, DEPTH, DESCRIPTION, FILTER_CODES,
12
12
  FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, HEADER, HOST, ID, IP,
13
13
  MATCH_CODES, MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, NAME, PATH, PROVIDER, PROXY,
14
14
  RATE_LIMIT, REFERENCES, RETRIES, SEVERITY, TAGS, THREADS, TIMEOUT, URL, USER_AGENT,
@@ -16,7 +16,15 @@ from secator.definitions import (CIDR_RANGE, CVSS_SCORE, DELAY, DEPTH, DESCRIPTI
16
16
  from secator.output_types import Ip, Port, Subdomain, Tag, Url, UserAccount, Vulnerability
17
17
  from secator.config import CONFIG
18
18
  from secator.runners import Command
19
- from secator.utils import debug, process_wordlist
19
+ from secator.utils import debug, process_wordlist, headers_to_dict
20
+
21
+
22
+ def process_headers(headers_dict):
23
+ headers = []
24
+ for key, value in headers_dict.items():
25
+ headers.append(f'{key}:{value}')
26
+ return headers
27
+
20
28
 
21
29
  USER_AGENTS = {
22
30
  'chrome_134.0_win10': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36', # noqa: E501
@@ -25,18 +33,19 @@ USER_AGENTS = {
25
33
 
26
34
 
27
35
  OPTS = {
28
- HEADER: {'type': str, 'help': 'Custom header to add to each request in the form "KEY1:VALUE1; KEY2:VALUE2"', 'default': 'User-Agent: ' + USER_AGENTS['chrome_134.0_win10']}, # noqa: E501
36
+ HEADER: {'type': str, 'short': 'H', 'help': 'Custom header to add to each request in the form "KEY1:VALUE1;; KEY2:VALUE2"', 'pre_process': headers_to_dict, 'process': process_headers, 'default': 'User-Agent: ' + USER_AGENTS['chrome_134.0_win10']}, # noqa: E501
37
+ DATA: {'type': str, 'help': 'Data to send in the request body'},
29
38
  DELAY: {'type': float, 'short': 'd', 'help': 'Delay to add between each requests'},
30
39
  DEPTH: {'type': int, 'help': 'Scan depth'},
31
40
  FILTER_CODES: {'type': str, 'short': 'fc', 'help': 'Filter out responses with HTTP codes'},
32
41
  FILTER_REGEX: {'type': str, 'short': 'fr', 'help': 'Filter out responses with regular expression'},
33
- FILTER_SIZE: {'type': str, 'short': 'fs', 'help': 'Filter out responses with size'},
34
- FILTER_WORDS: {'type': str, 'short': 'fw', 'help': 'Filter out responses with word count'},
42
+ FILTER_SIZE: {'type': int, 'short': 'fs', 'help': 'Filter out responses with size'},
43
+ FILTER_WORDS: {'type': int, 'short': 'fw', 'help': 'Filter out responses with word count'},
35
44
  FOLLOW_REDIRECT: {'is_flag': True, 'short': 'frd', 'help': 'Follow HTTP redirects'},
36
45
  MATCH_CODES: {'type': str, 'short': 'mc', 'help': 'Match HTTP status codes e.g "201,300,301"'},
37
46
  MATCH_REGEX: {'type': str, 'short': 'mr', 'help': 'Match responses with regular expression'},
38
- MATCH_SIZE: {'type': str, 'short': 'ms', 'help': 'Match respones with size'},
39
- MATCH_WORDS: {'type': str, 'short': 'mw', 'help': 'Match responses with word count'},
47
+ MATCH_SIZE: {'type': int, 'short': 'ms', 'help': 'Match responses with size'},
48
+ MATCH_WORDS: {'type': int, 'short': 'mw', 'help': 'Match responses with word count'},
40
49
  METHOD: {'type': str, 'help': 'HTTP method to use for requests'},
41
50
  PROXY: {'type': str, 'help': 'HTTP(s) / SOCKS5 proxy'},
42
51
  RATE_LIMIT: {'type': int, 'short': 'rl', 'help': 'Rate limit, i.e max number of requests per second'},
@@ -56,7 +65,7 @@ OPTS_HTTP_CRAWLERS = OPTS_HTTP + [
56
65
  MATCH_CODES
57
66
  ]
58
67
 
59
- OPTS_HTTP_FUZZERS = OPTS_HTTP_CRAWLERS + [WORDLIST]
68
+ OPTS_HTTP_FUZZERS = OPTS_HTTP_CRAWLERS + [WORDLIST, DATA]
60
69
 
61
70
  OPTS_RECON = [
62
71
  DELAY, PROXY, RATE_LIMIT, RETRIES, THREADS, TIMEOUT
@@ -132,7 +141,7 @@ class Vuln(Command):
132
141
  if os.path.exists(cve_path):
133
142
  with open(cve_path, 'r') as f:
134
143
  return json.load(f)
135
- debug(f'CVE {cve_id} not found in cache', sub='cve')
144
+ debug(f'{cve_id}: not found in cache', sub='cve')
136
145
  return None
137
146
 
138
147
  # @staticmethod
@@ -215,10 +224,10 @@ class Vuln(Command):
215
224
  dict: vulnerability data.
216
225
  """
217
226
  if CONFIG.runners.skip_exploit_search:
218
- debug(f'Skip remote query for {exploit_id} since config.runners.skip_exploit_search is set.', sub='cve')
227
+ debug(f'{exploit_id}: skipped remote query since config.runners.skip_exploit_search is set.', sub='cve.vulners')
219
228
  return None
220
229
  if CONFIG.offline_mode:
221
- debug(f'Skip remote query for {exploit_id} since config.offline_mode is set.', sub='cve')
230
+ debug(f'{exploit_id}: skipped remote query since config.offline_mode is set.', sub='cve.vulners')
222
231
  return None
223
232
  try:
224
233
  resp = requests.get(f'https://vulners.com/githubexploit/{exploit_id}', timeout=5)
@@ -234,7 +243,7 @@ class Vuln(Command):
234
243
  cve_regex = re.compile(r'(CVE(?:-|_)\d{4}(?:-|_)\d{4,7})', re.IGNORECASE)
235
244
  matches = cve_regex.findall(str(content))
236
245
  if not matches:
237
- debug(f'{exploit_id}: No CVE found in https://vulners.com/githubexploit/{exploit_id}.', sub='cve')
246
+ debug(f'{exploit_id}: no matching CVE found in https://vulners.com/githubexploit/{exploit_id}.', sub='cve.vulners')
238
247
  return None
239
248
  cve_id = matches[0].replace('_', '-').upper()
240
249
  cve_data = Vuln.lookup_cve(cve_id, *cpes)
@@ -242,7 +251,7 @@ class Vuln(Command):
242
251
  return cve_data
243
252
 
244
253
  except requests.RequestException as e:
245
- debug(f'Failed remote query for {exploit_id} ({str(e)}).', sub='cve')
254
+ debug(f'{exploit_id}: failed remote query ({str(e)}).', sub='cve.vulners')
246
255
  return None
247
256
 
248
257
  @cache
@@ -256,20 +265,26 @@ class Vuln(Command):
256
265
  Returns:
257
266
  dict | None: CVE data, None if no response or empty response.
258
267
  """
268
+ if CONFIG.runners.skip_cve_search:
269
+ debug(f'{cve_id}: skipped remote query since config.runners.skip_cve_search is set.', sub='cve.circl')
270
+ return None
271
+ if CONFIG.offline_mode:
272
+ debug(f'{cve_id}: skipped remote query since config.offline_mode is set.', sub='cve.circl')
273
+ return None
259
274
  try:
260
275
  resp = requests.get(f'https://vulnerability.circl.lu/api/cve/{cve_id}', timeout=5)
261
276
  resp.raise_for_status()
262
277
  cve_info = resp.json()
263
278
  if not cve_info:
264
- debug(f'Empty response from https://vulnerability.circl.lu/api/cve/{cve_id}', sub='cve')
279
+ debug(f'{cve_id}: empty response from https://vulnerability.circl.lu/api/cve/{cve_id}', sub='cve.circl')
265
280
  return None
266
281
  cve_path = f'{CONFIG.dirs.data}/cves/{cve_id}.json'
267
282
  with open(cve_path, 'w') as f:
268
283
  f.write(json.dumps(cve_info, indent=2))
269
- debug(f'Downloaded {cve_id} to {cve_path}', sub='cve')
284
+ debug(f'{cve_id}: downloaded to {cve_path}', sub='cve.circl')
270
285
  return cve_info
271
286
  except requests.RequestException as e:
272
- debug(f'Failed remote query for {cve_id} ({str(e)}).', sub='cve')
287
+ debug(f'{cve_id}: failed remote query ({str(e)}).', sub='cve.circl')
273
288
  return None
274
289
 
275
290
  @cache
@@ -288,12 +303,6 @@ class Vuln(Command):
288
303
 
289
304
  # Online CVE lookup
290
305
  if not cve_info:
291
- if CONFIG.runners.skip_cve_search:
292
- debug(f'Skip remote query for {cve_id} since config.runners.skip_cve_search is set.', sub='cve')
293
- return None
294
- if CONFIG.offline_mode:
295
- debug(f'Skip remote query for {cve_id} since config.offline_mode is set.', sub='cve')
296
- return None
297
306
  cve_info = Vuln.lookup_cve_from_cve_circle(cve_id)
298
307
  if not cve_info:
299
308
  return None
@@ -321,6 +330,10 @@ class Vuln(Command):
321
330
  'cpes': cpes_affected,
322
331
  'references': references
323
332
  }
333
+ if not cpes_affected:
334
+ debug(f'{cve_id}: no CPEs found in CVE data', sub='cve.circl', verbose=True)
335
+ else:
336
+ debug(f'{cve_id}: {len(cpes_affected)} CPEs found in CVE data', sub='cve.circl', verbose=True)
324
337
 
325
338
  # Match the CPE string against the affected products CPE FS strings from the CVE data if a CPE was passed.
326
339
  # This allow to limit the number of False positives (high) that we get from nmap NSE vuln scripts like vulscan
@@ -328,15 +341,14 @@ class Vuln(Command):
328
341
  # The check is not executed if no CPE was passed (sometimes nmap cannot properly detect a CPE) or if the CPE
329
342
  # version cannot be determined.
330
343
  cpe_match = False
331
- tags = [cve_id]
332
- if cpes:
344
+ tags = []
345
+ if cpes and cpes_affected:
333
346
  for cpe in cpes:
334
347
  cpe_fs = Vuln.get_cpe_fs(cpe)
335
348
  if not cpe_fs:
336
349
  debug(f'{cve_id}: Failed to parse CPE {cpe} with CPE parser', sub='cve.match', verbose=True)
337
350
  tags.append('cpe-invalid')
338
351
  continue
339
- # cpe_version = cpe_obj.get_version()[0]
340
352
  for cpe_affected in cpes_affected:
341
353
  cpe_affected_fs = Vuln.get_cpe_fs(cpe_affected)
342
354
  if not cpe_affected_fs:
@@ -345,12 +357,12 @@ class Vuln(Command):
345
357
  debug(f'{cve_id}: Testing {cpe_fs} against {cpe_affected_fs}', sub='cve.match', verbose=True)
346
358
  cpe_match = Vuln.match_cpes(cpe_fs, cpe_affected_fs)
347
359
  if cpe_match:
348
- debug(f'{cve_id}: CPE match found for {cpe}.', sub='cve')
360
+ debug(f'{cve_id}: CPE match found for {cpe}.', sub='cve.match')
349
361
  tags.append('cpe-match')
350
362
  break
351
363
 
352
364
  if not cpe_match:
353
- debug(f'{cve_id}: no CPE match found for {cpe}.', sub='cve')
365
+ debug(f'{cve_id}: no CPE match found for {cpe}.', sub='cve.match')
354
366
 
355
367
  # Parse CVE id and CVSS
356
368
  name = id = cve_info['id']
secator/tasks/arjun.py CHANGED
@@ -4,7 +4,7 @@ import yaml
4
4
  from secator.decorators import task
5
5
  from secator.definitions import (OUTPUT_PATH, RATE_LIMIT, THREADS, DELAY, TIMEOUT, METHOD, WORDLIST,
6
6
  HEADER, URL, FOLLOW_REDIRECT)
7
- from secator.output_types import Info, Url, Warning, Error
7
+ from secator.output_types import Info, Url, Warning
8
8
  from secator.runners import Command
9
9
  from secator.tasks._categories import OPTS
10
10
  from secator.utils import process_wordlist
@@ -14,9 +14,11 @@ from secator.utils import process_wordlist
14
14
  class arjun(Command):
15
15
  """HTTP Parameter Discovery Suite."""
16
16
  cmd = 'arjun'
17
+ input_types = [URL]
18
+ output_types = [Url]
17
19
  tags = ['url', 'fuzz', 'params']
18
20
  input_flag = '-u'
19
- input_types = [URL]
21
+ input_chunk_size = 1
20
22
  version_flag = ' '
21
23
  opts = {
22
24
  'chunk_size': {'type': int, 'help': 'Control query/chunk size'},
@@ -49,7 +51,9 @@ class arjun(Command):
49
51
  'casing': '--casing',
50
52
  'follow_redirect': '--follow-redirect',
51
53
  }
52
- output_types = [Url]
54
+ opt_value_map = {
55
+ HEADER: lambda headers: "\\n".join(c.strip() for c in headers.split(";;"))
56
+ }
53
57
  install_version = '2.2.7'
54
58
  install_cmd = 'pipx install arjun==[install_version] --force'
55
59
  install_github_handle = 's0md3v/Arjun'
@@ -75,7 +79,7 @@ class arjun(Command):
75
79
  @staticmethod
76
80
  def on_cmd_done(self):
77
81
  if not os.path.exists(self.output_path):
78
- yield Error(message=f'Could not find JSON results in {self.output_path}')
82
+ # yield Error(message=f'Could not find JSON results in {self.output_path}')
79
83
  return
80
84
  yield Info(message=f'JSON results saved to {self.output_path}')
81
85
  with open(self.output_path, 'r') as f:
@@ -87,6 +91,6 @@ class arjun(Command):
87
91
  for param in values['params']:
88
92
  yield Url(
89
93
  url=url + '?' + param + '=' + 'FUZZ',
90
- headers=values['headers'],
94
+ request_headers=values['headers'],
91
95
  method=values['method'],
92
96
  )