secator 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (114) hide show
  1. secator/__init__.py +0 -0
  2. secator/celery.py +482 -0
  3. secator/cli.py +617 -0
  4. secator/config.py +137 -0
  5. secator/configs/__init__.py +0 -0
  6. secator/configs/profiles/__init__.py +0 -0
  7. secator/configs/profiles/aggressive.yaml +7 -0
  8. secator/configs/profiles/default.yaml +9 -0
  9. secator/configs/profiles/stealth.yaml +7 -0
  10. secator/configs/scans/__init__.py +0 -0
  11. secator/configs/scans/domain.yaml +18 -0
  12. secator/configs/scans/host.yaml +14 -0
  13. secator/configs/scans/network.yaml +17 -0
  14. secator/configs/scans/subdomain.yaml +8 -0
  15. secator/configs/scans/url.yaml +12 -0
  16. secator/configs/workflows/__init__.py +0 -0
  17. secator/configs/workflows/cidr_recon.yaml +28 -0
  18. secator/configs/workflows/code_scan.yaml +11 -0
  19. secator/configs/workflows/host_recon.yaml +41 -0
  20. secator/configs/workflows/port_scan.yaml +34 -0
  21. secator/configs/workflows/subdomain_recon.yaml +33 -0
  22. secator/configs/workflows/url_crawl.yaml +29 -0
  23. secator/configs/workflows/url_dirsearch.yaml +29 -0
  24. secator/configs/workflows/url_fuzz.yaml +35 -0
  25. secator/configs/workflows/url_nuclei.yaml +11 -0
  26. secator/configs/workflows/url_vuln.yaml +55 -0
  27. secator/configs/workflows/user_hunt.yaml +10 -0
  28. secator/configs/workflows/wordpress.yaml +14 -0
  29. secator/decorators.py +309 -0
  30. secator/definitions.py +165 -0
  31. secator/exporters/__init__.py +12 -0
  32. secator/exporters/_base.py +3 -0
  33. secator/exporters/csv.py +30 -0
  34. secator/exporters/gdrive.py +118 -0
  35. secator/exporters/json.py +15 -0
  36. secator/exporters/table.py +7 -0
  37. secator/exporters/txt.py +25 -0
  38. secator/hooks/__init__.py +0 -0
  39. secator/hooks/mongodb.py +212 -0
  40. secator/output_types/__init__.py +24 -0
  41. secator/output_types/_base.py +95 -0
  42. secator/output_types/exploit.py +50 -0
  43. secator/output_types/ip.py +33 -0
  44. secator/output_types/port.py +45 -0
  45. secator/output_types/progress.py +35 -0
  46. secator/output_types/record.py +34 -0
  47. secator/output_types/subdomain.py +42 -0
  48. secator/output_types/tag.py +46 -0
  49. secator/output_types/target.py +30 -0
  50. secator/output_types/url.py +76 -0
  51. secator/output_types/user_account.py +41 -0
  52. secator/output_types/vulnerability.py +97 -0
  53. secator/report.py +107 -0
  54. secator/rich.py +124 -0
  55. secator/runners/__init__.py +12 -0
  56. secator/runners/_base.py +833 -0
  57. secator/runners/_helpers.py +153 -0
  58. secator/runners/command.py +638 -0
  59. secator/runners/scan.py +65 -0
  60. secator/runners/task.py +106 -0
  61. secator/runners/workflow.py +135 -0
  62. secator/serializers/__init__.py +8 -0
  63. secator/serializers/dataclass.py +33 -0
  64. secator/serializers/json.py +15 -0
  65. secator/serializers/regex.py +17 -0
  66. secator/tasks/__init__.py +10 -0
  67. secator/tasks/_categories.py +304 -0
  68. secator/tasks/cariddi.py +102 -0
  69. secator/tasks/dalfox.py +65 -0
  70. secator/tasks/dirsearch.py +90 -0
  71. secator/tasks/dnsx.py +56 -0
  72. secator/tasks/dnsxbrute.py +34 -0
  73. secator/tasks/feroxbuster.py +91 -0
  74. secator/tasks/ffuf.py +86 -0
  75. secator/tasks/fping.py +44 -0
  76. secator/tasks/gau.py +47 -0
  77. secator/tasks/gf.py +33 -0
  78. secator/tasks/gospider.py +71 -0
  79. secator/tasks/grype.py +79 -0
  80. secator/tasks/h8mail.py +81 -0
  81. secator/tasks/httpx.py +99 -0
  82. secator/tasks/katana.py +133 -0
  83. secator/tasks/maigret.py +78 -0
  84. secator/tasks/mapcidr.py +32 -0
  85. secator/tasks/msfconsole.py +174 -0
  86. secator/tasks/naabu.py +52 -0
  87. secator/tasks/nmap.py +344 -0
  88. secator/tasks/nuclei.py +97 -0
  89. secator/tasks/searchsploit.py +52 -0
  90. secator/tasks/subfinder.py +40 -0
  91. secator/tasks/wpscan.py +179 -0
  92. secator/utils.py +445 -0
  93. secator/utils_test.py +183 -0
  94. secator-0.0.1.dist-info/LICENSE +60 -0
  95. secator-0.0.1.dist-info/METADATA +199 -0
  96. secator-0.0.1.dist-info/RECORD +114 -0
  97. secator-0.0.1.dist-info/WHEEL +5 -0
  98. secator-0.0.1.dist-info/entry_points.txt +2 -0
  99. secator-0.0.1.dist-info/top_level.txt +2 -0
  100. tests/__init__.py +0 -0
  101. tests/integration/__init__.py +0 -0
  102. tests/integration/inputs.py +42 -0
  103. tests/integration/outputs.py +392 -0
  104. tests/integration/test_scans.py +82 -0
  105. tests/integration/test_tasks.py +103 -0
  106. tests/integration/test_workflows.py +163 -0
  107. tests/performance/__init__.py +0 -0
  108. tests/performance/loadtester.py +56 -0
  109. tests/unit/__init__.py +0 -0
  110. tests/unit/test_celery.py +39 -0
  111. tests/unit/test_scans.py +0 -0
  112. tests/unit/test_serializers.py +51 -0
  113. tests/unit/test_tasks.py +348 -0
  114. tests/unit/test_workflows.py +96 -0
@@ -0,0 +1,102 @@
1
+ import json
2
+
3
+ from secator.decorators import task
4
+ from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX,
5
+ FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
6
+ HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
7
+ MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
8
+ OPT_PIPE_INPUT, PROXY, RATE_LIMIT, RETRIES,
9
+ THREADS, TIMEOUT, URL, USER_AGENT)
10
+ from secator.output_types import Tag, Url
11
+ from secator.tasks._categories import HttpCrawler
12
+
13
+
14
+ @task()
15
+ class cariddi(HttpCrawler):
16
+ """Crawl endpoints, secrets, api keys, extensions, tokens..."""
17
+ cmd = 'cariddi -info -s -err -e -ext 1'
18
+ input_type = URL
19
+ input_flag = OPT_PIPE_INPUT
20
+ output_types = [Url, Tag]
21
+ file_flag = OPT_PIPE_INPUT
22
+ json_flag = '-json'
23
+ opt_key_map = {
24
+ HEADER: 'headers',
25
+ DELAY: 'd',
26
+ DEPTH: OPT_NOT_SUPPORTED,
27
+ FILTER_CODES: OPT_NOT_SUPPORTED,
28
+ FILTER_REGEX: OPT_NOT_SUPPORTED,
29
+ FILTER_SIZE: OPT_NOT_SUPPORTED,
30
+ FILTER_WORDS: OPT_NOT_SUPPORTED,
31
+ FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
32
+ MATCH_CODES: OPT_NOT_SUPPORTED,
33
+ MATCH_REGEX: OPT_NOT_SUPPORTED,
34
+ MATCH_SIZE: OPT_NOT_SUPPORTED,
35
+ MATCH_WORDS: OPT_NOT_SUPPORTED,
36
+ METHOD: OPT_NOT_SUPPORTED,
37
+ PROXY: 'proxy',
38
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
39
+ RETRIES: OPT_NOT_SUPPORTED,
40
+ THREADS: 'c',
41
+ TIMEOUT: 't',
42
+ USER_AGENT: 'ua'
43
+ }
44
+ item_loaders = []
45
+ install_cmd = 'go install -v github.com/edoardottt/cariddi/cmd/cariddi@latest'
46
+ encoding = 'ansi'
47
+ proxychains = False
48
+ proxy_socks5 = True # with leaks... https://github.com/edoardottt/cariddi/issues/122
49
+ proxy_http = True # with leaks... https://github.com/edoardottt/cariddi/issues/122
50
+ profile = 'cpu'
51
+
52
+ @staticmethod
53
+ def item_loader(self, line):
54
+ items = []
55
+ try:
56
+ item = json.loads(line)
57
+ url_item = {k: v for k, v in item.items() if k != 'matches'}
58
+ url = url_item[URL]
59
+ items.append(url_item)
60
+ matches = item.get('matches', {})
61
+ params = matches.get('parameters', [])
62
+ errors = matches.get('errors', [])
63
+ secrets = matches.get('secrets', [])
64
+ infos = matches.get('infos', [])
65
+
66
+ for param in params:
67
+ param_name = param['name']
68
+ for attack in param['attacks']:
69
+ extra_data = {'param': param_name, 'source': 'url'}
70
+ item = {
71
+ 'name': attack + ' param',
72
+ 'match': url,
73
+ 'extra_data': extra_data
74
+ }
75
+ items.append(item)
76
+
77
+ for error in errors:
78
+ match = error['match']
79
+ match = (match[:1000] + '...TRUNCATED') if len(match) > 1000 else match # truncate as this can be a very long match
80
+ error['extra_data'] = {'error': match, 'source': 'body'}
81
+ error['match'] = url
82
+ items.append(error)
83
+
84
+ for secret in secrets:
85
+ match = secret['match']
86
+ secret['extra_data'] = {'secret': match, 'source': 'body'}
87
+ secret['match'] = url
88
+ items.append(secret)
89
+
90
+ for info in infos:
91
+ CARIDDI_IGNORE_LIST = ['BTC address']
92
+ if info['name'] in CARIDDI_IGNORE_LIST:
93
+ continue
94
+ match = info['match']
95
+ info['extra_data'] = {'info': match, 'source': 'body'}
96
+ info['match'] = url
97
+ items.append(info)
98
+
99
+ except json.decoder.JSONDecodeError:
100
+ pass
101
+
102
+ return items
@@ -0,0 +1,65 @@
1
+ from urllib.parse import urlparse
2
+
3
+ from secator.decorators import task
4
+ from secator.definitions import (CONFIDENCE, DELAY, EXTRA_DATA, FOLLOW_REDIRECT,
5
+ HEADER, ID, MATCHED_AT, METHOD, NAME,
6
+ OPT_NOT_SUPPORTED, PROVIDER, PROXY, RATE_LIMIT,
7
+ SEVERITY, TAGS, THREADS, TIMEOUT, URL,
8
+ USER_AGENT)
9
+ from secator.output_types import Vulnerability
10
+ from secator.tasks._categories import VulnHttp
11
+
12
+ DALFOX_TYPE_MAP = {
13
+ 'G': 'Grep XSS',
14
+ 'R': 'Reflected XSS',
15
+ 'V': 'Verified XSS'
16
+ }
17
+
18
+
19
+ @task()
20
+ class dalfox(VulnHttp):
21
+ """Powerful open source XSS scanning tool."""
22
+ cmd = 'dalfox'
23
+ input_type = URL
24
+ input_flag = 'url'
25
+ file_flag = 'file'
26
+ json_flag = '--format json'
27
+ opt_prefix = '--'
28
+ opt_key_map = {
29
+ HEADER: 'header',
30
+ DELAY: 'delay',
31
+ FOLLOW_REDIRECT: 'follow-redirects',
32
+ METHOD: 'method',
33
+ PROXY: 'proxy',
34
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
35
+ THREADS: 'worker',
36
+ TIMEOUT: 'timeout',
37
+ USER_AGENT: 'user-agent'
38
+ }
39
+ output_map = {
40
+ Vulnerability: {
41
+ ID: lambda x: None,
42
+ NAME: lambda x: DALFOX_TYPE_MAP[x['type']],
43
+ PROVIDER: 'dalfox',
44
+ TAGS: lambda x: [x['cwe']] if x['cwe'] else [],
45
+ CONFIDENCE: lambda x: 'high',
46
+ MATCHED_AT: lambda x: urlparse(x['data'])._replace(query='').geturl(),
47
+ EXTRA_DATA: lambda x: {
48
+ k: v for k, v in x.items()
49
+ if k not in ['type', 'severity', 'cwe']
50
+ },
51
+ SEVERITY: lambda x: x['severity'].lower()
52
+ }
53
+ }
54
+ install_cmd = 'go install -v github.com/hahwul/dalfox/v2@latest'
55
+ encoding = 'ansi'
56
+ proxychains = False
57
+ proxychains_flavor = 'proxychains4'
58
+ proxy_socks5 = True
59
+ proxy_http = True
60
+ profile = 'cpu'
61
+
62
+ @staticmethod
63
+ def on_line(self, line):
64
+ line = line.rstrip(',')
65
+ return line
@@ -0,0 +1,90 @@
1
+ import os
2
+
3
+ import yaml
4
+
5
+ from secator.decorators import task
6
+ from secator.definitions import (CONTENT_LENGTH, CONTENT_TYPE, DELAY, DEPTH,
7
+ FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
8
+ FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
9
+ MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
10
+ MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, PROXY,
11
+ RATE_LIMIT, RETRIES, STATUS_CODE, DATA_FOLDER,
12
+ THREADS, TIMEOUT, USER_AGENT, WORDLIST)
13
+ from secator.output_types import Url
14
+ from secator.tasks._categories import HttpFuzzer
15
+ from secator.utils import get_file_timestamp
16
+
17
+
18
+ @task()
19
+ class dirsearch(HttpFuzzer):
20
+ """Advanced web path brute-forcer."""
21
+ cmd = 'dirsearch'
22
+ input_flag = '-u'
23
+ file_flag = '-l'
24
+ json_flag = '--format json'
25
+ opt_prefix = '--'
26
+ encoding = 'ansi'
27
+ opt_key_map = {
28
+ HEADER: 'header',
29
+ DELAY: 'delay',
30
+ DEPTH: 'max-recursion-depth',
31
+ FILTER_CODES: 'exclude-status',
32
+ FILTER_REGEX: 'exclude-regex',
33
+ FILTER_SIZE: 'exclude-sizes',
34
+ FILTER_WORDS: OPT_NOT_SUPPORTED,
35
+ FOLLOW_REDIRECT: 'follow-redirects',
36
+ MATCH_CODES: 'include-status',
37
+ MATCH_REGEX: OPT_NOT_SUPPORTED,
38
+ MATCH_SIZE: OPT_NOT_SUPPORTED,
39
+ MATCH_WORDS: OPT_NOT_SUPPORTED,
40
+ METHOD: 'http-method',
41
+ PROXY: 'proxy',
42
+ RATE_LIMIT: 'max-rate',
43
+ RETRIES: 'retries',
44
+ THREADS: 'threads',
45
+ TIMEOUT: 'timeout',
46
+ USER_AGENT: 'user-agent',
47
+ WORDLIST: 'wordlists',
48
+ }
49
+ output_map = {
50
+ Url: {
51
+ CONTENT_LENGTH: 'content-length',
52
+ CONTENT_TYPE: 'content-type',
53
+ STATUS_CODE: 'status'
54
+ }
55
+ }
56
+ install_cmd = 'pip3 install dirsearch'
57
+ proxychains = True
58
+ proxy_socks5 = True
59
+ proxy_http = True
60
+ profile = 'io'
61
+
62
+ def yielder(self):
63
+ prev = self.print_item_count
64
+ self.print_item_count = False
65
+ list(super().yielder())
66
+ if self.return_code != 0:
67
+ return
68
+ self.results = []
69
+ if not self.output_json:
70
+ return
71
+ note = f'dirsearch JSON results saved to {self.output_path}'
72
+ if self.print_line:
73
+ self._print(note)
74
+ if os.path.exists(self.output_path):
75
+ with open(self.output_path, 'r') as f:
76
+ results = yaml.safe_load(f.read()).get('results', [])
77
+ for item in results:
78
+ item = self._process_item(item)
79
+ if not item:
80
+ continue
81
+ yield item
82
+ self.print_item_count = prev
83
+
84
+ @staticmethod
85
+ def on_init(self):
86
+ self.output_path = self.get_opt_value('output_path')
87
+ if not self.output_path:
88
+ timestr = get_file_timestamp()
89
+ self.output_path = f'{DATA_FOLDER}/dirsearch_{timestr}.json'
90
+ self.cmd += f' -o {self.output_path}'
secator/tasks/dnsx.py ADDED
@@ -0,0 +1,56 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import (OPT_PIPE_INPUT, RATE_LIMIT, RETRIES, THREADS)
3
+ from secator.output_types import Record
4
+ from secator.tasks._categories import ReconDns
5
+ import json
6
+
7
+
8
+ @task()
9
+ class dnsx(ReconDns):
10
+ """dnsx is a fast and multi-purpose DNS toolkit designed for running various retryabledns library."""
11
+ cmd = 'dnsx -resp -a -aaaa -cname -mx -ns -txt -srv -ptr -soa -axfr -caa'
12
+ json_flag = '-json'
13
+ input_flag = OPT_PIPE_INPUT
14
+ file_flag = OPT_PIPE_INPUT
15
+ output_types = [Record]
16
+ opt_key_map = {
17
+ RATE_LIMIT: 'rate-limit',
18
+ RETRIES: 'retry',
19
+ THREADS: 'threads',
20
+ }
21
+ opts = {
22
+ 'trace': {'is_flag': True, 'default': False, 'help': 'Perform dns tracing'},
23
+ 'resolver': {'type': str, 'short': 'r', 'help': 'List of resolvers to use (file or comma separated)'},
24
+ 'wildcard_domain': {'type': str, 'short': 'wd', 'help': 'Domain name for wildcard filtering'},
25
+ }
26
+
27
+ install_cmd = 'go install -v github.com/projectdiscovery/dnsx/cmd/dnsx@latest'
28
+ profile = 'io'
29
+
30
+ @staticmethod
31
+ def item_loader(self, line):
32
+ items = []
33
+ try:
34
+ item = json.loads(line)
35
+ if self.orig: # original dnsx output
36
+ return item
37
+ host = item['host']
38
+ record_types = ['a', 'aaaa', 'cname', 'mx', 'ns', 'txt', 'srv', 'ptr', 'soa', 'axfr', 'caa']
39
+ for _type in record_types:
40
+ values = item.get(_type, [])
41
+ for value in values:
42
+ name = value
43
+ extra_data = {}
44
+ if isinstance(value, dict):
45
+ name = value['name']
46
+ extra_data = {k: v for k, v in value.items() if k != 'name'}
47
+ items.append({
48
+ 'host': host,
49
+ 'name': name,
50
+ 'type': _type.upper(),
51
+ 'extra_data': extra_data
52
+ })
53
+ except json.decoder.JSONDecodeError:
54
+ pass
55
+
56
+ return items
@@ -0,0 +1,34 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import (DEFAULT_DNS_WORDLIST, DOMAIN, HOST, RATE_LIMIT, RETRIES, THREADS, WORDLIST, EXTRA_DATA)
3
+ from secator.output_types import Subdomain
4
+ from secator.tasks._categories import ReconDns
5
+
6
+
7
+ @task()
8
+ class dnsxbrute(ReconDns):
9
+ """dnsx is a fast and multi-purpose DNS toolkit designed for running various library."""
10
+ cmd = 'dnsx'
11
+ json_flag = '-json'
12
+ input_flag = '-domain'
13
+ file_flag = '-domain'
14
+ opt_key_map = {
15
+ RATE_LIMIT: 'rate-limit',
16
+ RETRIES: 'retry',
17
+ THREADS: 'threads',
18
+ }
19
+ opts = {
20
+ WORDLIST: {'type': str, 'short': 'w', 'default': DEFAULT_DNS_WORDLIST, 'help': 'Wordlist'},
21
+ 'trace': {'is_flag': True, 'default': False, 'help': 'Perform dns tracing'},
22
+ }
23
+ output_map = {
24
+ Subdomain: {
25
+ HOST: 'host',
26
+ DOMAIN: lambda x: ".".join(x['host'].split('.')[1:]),
27
+ EXTRA_DATA: lambda x: {
28
+ 'resolver': x['resolver'],
29
+ 'status_code': x['status_code']
30
+ }
31
+ }
32
+ }
33
+ install_cmd = 'go install -v github.com/projectdiscovery/dnsx/cmd/dnsx@latest'
34
+ profile = 'cpu'
@@ -0,0 +1,91 @@
1
+ import shlex
2
+ from pathlib import Path
3
+
4
+ from secator.decorators import task
5
+ from secator.definitions import (CONTENT_TYPE, DELAY, DEPTH, FILTER_CODES,
6
+ FILTER_REGEX, FILTER_SIZE, FILTER_WORDS,
7
+ FOLLOW_REDIRECT, HEADER, LINES, MATCH_CODES,
8
+ MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD,
9
+ OPT_NOT_SUPPORTED, OPT_PIPE_INPUT, PROXY,
10
+ RATE_LIMIT, RETRIES, STATUS_CODE, DATA_FOLDER,
11
+ THREADS, TIMEOUT, USER_AGENT, WORDLIST, WORDS, DEFAULT_FEROXBUSTER_FLAGS)
12
+ from secator.output_types import Url
13
+ from secator.tasks._categories import HttpFuzzer
14
+ from secator.utils import get_file_timestamp
15
+
16
+
17
+ @task()
18
+ class feroxbuster(HttpFuzzer):
19
+ """Simple, fast, recursive content discovery tool written in Rust"""
20
+ cmd = f'feroxbuster {DEFAULT_FEROXBUSTER_FLAGS}'
21
+ input_flag = '--url'
22
+ input_chunk_size = 1
23
+ file_flag = OPT_PIPE_INPUT
24
+ json_flag = '--json'
25
+ opt_prefix = '--'
26
+ opts = {
27
+ # 'auto_tune': {'is_flag': True, 'default': False, 'help': 'Automatically lower scan rate when too many errors'},
28
+ 'extract_links': {'is_flag': True, 'default': False, 'help': 'Extract links from response body'},
29
+ 'collect_backups': {'is_flag': True, 'default': False, 'help': 'Request likely backup exts for urls'},
30
+ 'collect_extensions': {'is_flag': True, 'default': False, 'help': 'Discover exts and add to --extensions'},
31
+ 'collect_words': {'is_flag': True, 'default': False, 'help': 'Discover important words and add to wordlist'},
32
+ }
33
+ opt_key_map = {
34
+ HEADER: 'headers',
35
+ DELAY: OPT_NOT_SUPPORTED,
36
+ DEPTH: 'depth',
37
+ FILTER_CODES: 'filter-status',
38
+ FILTER_REGEX: 'filter-regex',
39
+ FILTER_SIZE: 'filter-size',
40
+ FILTER_WORDS: 'filter-words',
41
+ FOLLOW_REDIRECT: 'redirects',
42
+ MATCH_CODES: 'status-codes',
43
+ MATCH_REGEX: OPT_NOT_SUPPORTED,
44
+ MATCH_SIZE: OPT_NOT_SUPPORTED,
45
+ MATCH_WORDS: OPT_NOT_SUPPORTED,
46
+ METHOD: 'methods',
47
+ PROXY: 'proxy',
48
+ RATE_LIMIT: 'rate-limit',
49
+ RETRIES: OPT_NOT_SUPPORTED,
50
+ THREADS: 'threads',
51
+ TIMEOUT: 'timeout',
52
+ USER_AGENT: 'user-agent',
53
+ WORDLIST: 'wordlist'
54
+ }
55
+ output_map = {
56
+ Url: {
57
+ STATUS_CODE: 'status',
58
+ CONTENT_TYPE: lambda x: x['headers'].get('content-type'),
59
+ LINES: 'line_count',
60
+ WORDS: 'word_count'
61
+ }
62
+ }
63
+ install_cmd = (
64
+ 'sudo apt install -y unzip && '
65
+ 'curl -sL https://raw.githubusercontent.com/epi052/feroxbuster/master/install-nix.sh | '
66
+ 'bash && sudo mv feroxbuster /usr/local/bin'
67
+ )
68
+ proxychains = False
69
+ proxy_socks5 = True
70
+ proxy_http = True
71
+ profile = 'cpu'
72
+
73
+ @staticmethod
74
+ def on_init(self):
75
+ self.output_path = self.get_opt_value('output_path')
76
+ if not self.output_path:
77
+ timestr = get_file_timestamp()
78
+ self.output_path = f'{DATA_FOLDER}/feroxbuster_{timestr}.json'
79
+ Path(self.output_path).touch()
80
+ self.cmd += f' --output {self.output_path}'
81
+
82
+ @staticmethod
83
+ def on_start(self):
84
+ if self.input_path:
85
+ self.cmd += ' --stdin'
86
+ self.cmd += f' & tail --pid=$! -f {shlex.quote(self.output_path)}'
87
+ self.shell = True
88
+
89
+ @staticmethod
90
+ def validate_item(self, item):
91
+ return item['type'] == 'response'
secator/tasks/ffuf.py ADDED
@@ -0,0 +1,86 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import (AUTO_CALIBRATION, CONTENT_LENGTH,
3
+ CONTENT_TYPE, DELAY, DEPTH, EXTRA_DATA,
4
+ FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
5
+ FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
6
+ MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
7
+ MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
8
+ PERCENT, PROXY, RATE_LIMIT, RETRIES,
9
+ STATUS_CODE, THREADS, TIME, TIMEOUT,
10
+ USER_AGENT, WORDLIST)
11
+ from secator.output_types import Progress, Url
12
+ from secator.serializers import JSONSerializer, RegexSerializer
13
+ from secator.tasks._categories import HttpFuzzer
14
+
15
+ FFUF_PROGRESS_REGEX = r':: Progress: \[(?P<count>\d+)/(?P<total>\d+)\] :: Job \[\d/\d\] :: (?P<rps>\d+) req/sec :: Duration: \[(?P<duration>[\d:]+)\] :: Errors: (?P<errors>\d+) ::' # noqa: E501
16
+
17
+
18
+ @task()
19
+ class ffuf(HttpFuzzer):
20
+ """Fast web fuzzer written in Go."""
21
+ cmd = 'ffuf -noninteractive -recursion'
22
+ input_flag = '-u'
23
+ input_chunk_size = 1
24
+ file_flag = None
25
+ json_flag = '-json'
26
+ item_loaders = [
27
+ JSONSerializer(),
28
+ RegexSerializer(FFUF_PROGRESS_REGEX, fields=['count', 'total', 'rps', 'duration', 'errors'])
29
+ ]
30
+ opts = {
31
+ AUTO_CALIBRATION: {'is_flag': True, 'short': 'ac', 'help': 'Auto-calibration'},
32
+ }
33
+ opt_key_map = {
34
+ HEADER: 'H',
35
+ DELAY: 'p',
36
+ DEPTH: 'recursion-depth',
37
+ FILTER_CODES: 'fc',
38
+ FILTER_REGEX: 'fr',
39
+ FILTER_SIZE: 'fs',
40
+ FILTER_WORDS: 'fw',
41
+ FOLLOW_REDIRECT: 'r',
42
+ MATCH_CODES: 'mc',
43
+ MATCH_REGEX: 'mr',
44
+ MATCH_SIZE: 'ms',
45
+ MATCH_WORDS: 'mw',
46
+ METHOD: 'X',
47
+ PROXY: 'x',
48
+ RATE_LIMIT: 'rate',
49
+ RETRIES: OPT_NOT_SUPPORTED,
50
+ THREADS: 't',
51
+ TIMEOUT: 'timeout',
52
+ USER_AGENT: OPT_NOT_SUPPORTED,
53
+
54
+ # ffuf opts
55
+ WORDLIST: 'w',
56
+ AUTO_CALIBRATION: 'ac',
57
+ }
58
+ output_types = [Url, Progress]
59
+ output_map = {
60
+ Url: {
61
+ STATUS_CODE: 'status',
62
+ CONTENT_LENGTH: 'length',
63
+ CONTENT_TYPE: 'content-type',
64
+ TIME: lambda x: x['duration'] * 10**-9
65
+ },
66
+ Progress: {
67
+ PERCENT: lambda x: int(int(x['count']) * 100 / int(x['total'])),
68
+ EXTRA_DATA: lambda x: {k: v for k, v in x.items() if k not in ['count', 'total', 'errors']}
69
+ },
70
+ }
71
+ encoding = 'ansi'
72
+ install_cmd = (
73
+ 'go install -v github.com/ffuf/ffuf@latest && '
74
+ 'sudo git clone https://github.com/danielmiessler/SecLists /usr/share/seclists || true'
75
+ )
76
+ proxychains = False
77
+ proxy_socks5 = True
78
+ proxy_http = True
79
+ profile = 'io'
80
+
81
+ @staticmethod
82
+ def on_item(self, item):
83
+ item.method = self.get_opt_value(METHOD) or 'GET'
84
+ return item
85
+
86
+ # TODO: write custom item_loader to pick up Progress items too
secator/tasks/fping.py ADDED
@@ -0,0 +1,44 @@
1
+ import validators
2
+
3
+ from secator.decorators import task
4
+ from secator.definitions import (DELAY, IP, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT,
5
+ RETRIES, THREADS, TIMEOUT)
6
+ from secator.output_types import Ip
7
+ from secator.tasks._categories import ReconIp
8
+
9
+
10
+ @task()
11
+ class fping(ReconIp):
12
+ """Send ICMP echo probes to network hosts, similar to ping, but much better."""
13
+ cmd = 'fping -a'
14
+ file_flag = '-f'
15
+ input_flag = None
16
+ ignore_return_code = True
17
+ opt_prefix = '--'
18
+ opt_key_map = {
19
+ DELAY: 'period',
20
+ PROXY: OPT_NOT_SUPPORTED,
21
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
22
+ RETRIES: 'retry',
23
+ TIMEOUT: 'timeout',
24
+ THREADS: OPT_NOT_SUPPORTED
25
+ }
26
+ opt_value_map = {
27
+ DELAY: lambda x: x * 1000, # convert s to ms
28
+ TIMEOUT: lambda x: x * 1000 # convert s to ms
29
+ }
30
+ input_type = IP
31
+ output_types = [Ip]
32
+ install_cmd = 'sudo apt install -y fping'
33
+
34
+ @staticmethod
35
+ def item_loader(self, line):
36
+ if validators.ipv4(line) or validators.ipv6(line):
37
+ return {'ip': line, 'alive': True}
38
+ return None
39
+
40
+ @staticmethod
41
+ def on_line(self, line):
42
+ if 'Unreachable' in line:
43
+ return '' # discard line as it pollutes output
44
+ return line
secator/tasks/gau.py ADDED
@@ -0,0 +1,47 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX,
3
+ FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
4
+ HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
5
+ MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
6
+ OPT_PIPE_INPUT, PROXY, RATE_LIMIT, RETRIES,
7
+ THREADS, TIMEOUT, USER_AGENT)
8
+ from secator.tasks._categories import HttpCrawler
9
+
10
+
11
+ @task()
12
+ class gau(HttpCrawler):
13
+ """Fetch known URLs from AlienVault's Open Threat Exchange, the Wayback Machine, Common Crawl, and URLScan."""
14
+ cmd = 'gau'
15
+ file_flag = OPT_PIPE_INPUT
16
+ json_flag = '--json'
17
+ opt_prefix = '--'
18
+ opt_key_map = {
19
+ HEADER: OPT_NOT_SUPPORTED,
20
+ DELAY: OPT_NOT_SUPPORTED,
21
+ DEPTH: OPT_NOT_SUPPORTED,
22
+ FILTER_CODES: 'fc',
23
+ FILTER_REGEX: OPT_NOT_SUPPORTED,
24
+ FILTER_SIZE: OPT_NOT_SUPPORTED,
25
+ FILTER_WORDS: OPT_NOT_SUPPORTED,
26
+ MATCH_CODES: 'mc',
27
+ MATCH_REGEX: OPT_NOT_SUPPORTED,
28
+ MATCH_SIZE: OPT_NOT_SUPPORTED,
29
+ MATCH_WORDS: OPT_NOT_SUPPORTED,
30
+ FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
31
+ METHOD: OPT_NOT_SUPPORTED,
32
+ PROXY: 'proxy',
33
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
34
+ RETRIES: 'retries',
35
+ THREADS: 'threads',
36
+ TIMEOUT: 'timeout',
37
+ USER_AGENT: OPT_NOT_SUPPORTED,
38
+ }
39
+ install_cmd = 'go install -v github.com/lc/gau/v2/cmd/gau@latest'
40
+ proxychains = False
41
+ proxy_socks5 = True
42
+ proxy_http = True
43
+ profile = 'io'
44
+
45
+ # @staticmethod
46
+ # def validate_item(self, item):
47
+ # return item['url'] == 'response'
secator/tasks/gf.py ADDED
@@ -0,0 +1,33 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import OPT_PIPE_INPUT, URL
3
+ from secator.output_types import Tag
4
+ from secator.tasks._categories import Tagger
5
+
6
+
7
+ @task()
8
+ class gf(Tagger):
9
+ """Wrapper around grep, to help you grep for things."""
10
+ cmd = 'gf'
11
+ file_flag = OPT_PIPE_INPUT
12
+ input_flag = OPT_PIPE_INPUT
13
+ opts = {
14
+ 'pattern': {'type': str, 'help': 'Pattern names to match against (comma-delimited)'}
15
+ }
16
+ opt_key_map = {
17
+ 'pattern': ''
18
+ }
19
+ input_type = URL
20
+ install_cmd = (
21
+ 'go install -v github.com/tomnomnom/gf@latest && '
22
+ 'git clone https://github.com/1ndianl33t/Gf-Patterns $HOME/.gf || true'
23
+ )
24
+ output_types = [Tag]
25
+
26
+ @staticmethod
27
+ def item_loader(self, line):
28
+ return {'match': line, 'name': self.get_opt_value('pattern').rstrip() + ' pattern'} # noqa: E731,E501
29
+
30
+ @staticmethod
31
+ def on_item(self, item):
32
+ item.extra_data = {'source': 'url'}
33
+ return item