secator 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. secator/.gitignore +162 -0
  2. secator/__init__.py +0 -0
  3. secator/celery.py +453 -0
  4. secator/celery_signals.py +138 -0
  5. secator/celery_utils.py +320 -0
  6. secator/cli.py +2035 -0
  7. secator/cli_helper.py +395 -0
  8. secator/click.py +87 -0
  9. secator/config.py +670 -0
  10. secator/configs/__init__.py +0 -0
  11. secator/configs/profiles/__init__.py +0 -0
  12. secator/configs/profiles/aggressive.yaml +8 -0
  13. secator/configs/profiles/all_ports.yaml +7 -0
  14. secator/configs/profiles/full.yaml +31 -0
  15. secator/configs/profiles/http_headless.yaml +7 -0
  16. secator/configs/profiles/http_record.yaml +8 -0
  17. secator/configs/profiles/insane.yaml +8 -0
  18. secator/configs/profiles/paranoid.yaml +8 -0
  19. secator/configs/profiles/passive.yaml +11 -0
  20. secator/configs/profiles/polite.yaml +8 -0
  21. secator/configs/profiles/sneaky.yaml +8 -0
  22. secator/configs/profiles/tor.yaml +5 -0
  23. secator/configs/scans/__init__.py +0 -0
  24. secator/configs/scans/domain.yaml +31 -0
  25. secator/configs/scans/host.yaml +23 -0
  26. secator/configs/scans/network.yaml +30 -0
  27. secator/configs/scans/subdomain.yaml +27 -0
  28. secator/configs/scans/url.yaml +19 -0
  29. secator/configs/workflows/__init__.py +0 -0
  30. secator/configs/workflows/cidr_recon.yaml +48 -0
  31. secator/configs/workflows/code_scan.yaml +29 -0
  32. secator/configs/workflows/domain_recon.yaml +46 -0
  33. secator/configs/workflows/host_recon.yaml +95 -0
  34. secator/configs/workflows/subdomain_recon.yaml +120 -0
  35. secator/configs/workflows/url_bypass.yaml +15 -0
  36. secator/configs/workflows/url_crawl.yaml +98 -0
  37. secator/configs/workflows/url_dirsearch.yaml +62 -0
  38. secator/configs/workflows/url_fuzz.yaml +68 -0
  39. secator/configs/workflows/url_params_fuzz.yaml +66 -0
  40. secator/configs/workflows/url_secrets_hunt.yaml +23 -0
  41. secator/configs/workflows/url_vuln.yaml +91 -0
  42. secator/configs/workflows/user_hunt.yaml +29 -0
  43. secator/configs/workflows/wordpress.yaml +38 -0
  44. secator/cve.py +718 -0
  45. secator/decorators.py +7 -0
  46. secator/definitions.py +168 -0
  47. secator/exporters/__init__.py +14 -0
  48. secator/exporters/_base.py +3 -0
  49. secator/exporters/console.py +10 -0
  50. secator/exporters/csv.py +37 -0
  51. secator/exporters/gdrive.py +123 -0
  52. secator/exporters/json.py +16 -0
  53. secator/exporters/table.py +36 -0
  54. secator/exporters/txt.py +28 -0
  55. secator/hooks/__init__.py +0 -0
  56. secator/hooks/gcs.py +80 -0
  57. secator/hooks/mongodb.py +281 -0
  58. secator/installer.py +694 -0
  59. secator/loader.py +128 -0
  60. secator/output_types/__init__.py +49 -0
  61. secator/output_types/_base.py +108 -0
  62. secator/output_types/certificate.py +78 -0
  63. secator/output_types/domain.py +50 -0
  64. secator/output_types/error.py +42 -0
  65. secator/output_types/exploit.py +58 -0
  66. secator/output_types/info.py +24 -0
  67. secator/output_types/ip.py +47 -0
  68. secator/output_types/port.py +55 -0
  69. secator/output_types/progress.py +36 -0
  70. secator/output_types/record.py +36 -0
  71. secator/output_types/stat.py +41 -0
  72. secator/output_types/state.py +29 -0
  73. secator/output_types/subdomain.py +45 -0
  74. secator/output_types/tag.py +69 -0
  75. secator/output_types/target.py +38 -0
  76. secator/output_types/url.py +112 -0
  77. secator/output_types/user_account.py +41 -0
  78. secator/output_types/vulnerability.py +101 -0
  79. secator/output_types/warning.py +30 -0
  80. secator/report.py +140 -0
  81. secator/rich.py +130 -0
  82. secator/runners/__init__.py +14 -0
  83. secator/runners/_base.py +1240 -0
  84. secator/runners/_helpers.py +218 -0
  85. secator/runners/celery.py +18 -0
  86. secator/runners/command.py +1178 -0
  87. secator/runners/python.py +126 -0
  88. secator/runners/scan.py +87 -0
  89. secator/runners/task.py +81 -0
  90. secator/runners/workflow.py +168 -0
  91. secator/scans/__init__.py +29 -0
  92. secator/serializers/__init__.py +8 -0
  93. secator/serializers/dataclass.py +39 -0
  94. secator/serializers/json.py +45 -0
  95. secator/serializers/regex.py +25 -0
  96. secator/tasks/__init__.py +8 -0
  97. secator/tasks/_categories.py +487 -0
  98. secator/tasks/arjun.py +113 -0
  99. secator/tasks/arp.py +53 -0
  100. secator/tasks/arpscan.py +70 -0
  101. secator/tasks/bbot.py +372 -0
  102. secator/tasks/bup.py +118 -0
  103. secator/tasks/cariddi.py +193 -0
  104. secator/tasks/dalfox.py +87 -0
  105. secator/tasks/dirsearch.py +84 -0
  106. secator/tasks/dnsx.py +186 -0
  107. secator/tasks/feroxbuster.py +93 -0
  108. secator/tasks/ffuf.py +135 -0
  109. secator/tasks/fping.py +85 -0
  110. secator/tasks/gau.py +102 -0
  111. secator/tasks/getasn.py +60 -0
  112. secator/tasks/gf.py +36 -0
  113. secator/tasks/gitleaks.py +96 -0
  114. secator/tasks/gospider.py +84 -0
  115. secator/tasks/grype.py +109 -0
  116. secator/tasks/h8mail.py +75 -0
  117. secator/tasks/httpx.py +167 -0
  118. secator/tasks/jswhois.py +36 -0
  119. secator/tasks/katana.py +203 -0
  120. secator/tasks/maigret.py +87 -0
  121. secator/tasks/mapcidr.py +42 -0
  122. secator/tasks/msfconsole.py +179 -0
  123. secator/tasks/naabu.py +85 -0
  124. secator/tasks/nmap.py +487 -0
  125. secator/tasks/nuclei.py +151 -0
  126. secator/tasks/search_vulns.py +225 -0
  127. secator/tasks/searchsploit.py +109 -0
  128. secator/tasks/sshaudit.py +299 -0
  129. secator/tasks/subfinder.py +48 -0
  130. secator/tasks/testssl.py +283 -0
  131. secator/tasks/trivy.py +130 -0
  132. secator/tasks/trufflehog.py +240 -0
  133. secator/tasks/urlfinder.py +100 -0
  134. secator/tasks/wafw00f.py +106 -0
  135. secator/tasks/whois.py +34 -0
  136. secator/tasks/wpprobe.py +116 -0
  137. secator/tasks/wpscan.py +202 -0
  138. secator/tasks/x8.py +94 -0
  139. secator/tasks/xurlfind3r.py +83 -0
  140. secator/template.py +294 -0
  141. secator/thread.py +24 -0
  142. secator/tree.py +196 -0
  143. secator/utils.py +922 -0
  144. secator/utils_test.py +297 -0
  145. secator/workflows/__init__.py +29 -0
  146. secator-0.22.0.dist-info/METADATA +447 -0
  147. secator-0.22.0.dist-info/RECORD +150 -0
  148. secator-0.22.0.dist-info/WHEEL +4 -0
  149. secator-0.22.0.dist-info/entry_points.txt +2 -0
  150. secator-0.22.0.dist-info/licenses/LICENSE +60 -0
secator/tasks/ffuf.py ADDED
@@ -0,0 +1,135 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import (AUTO_CALIBRATION, DATA, DELAY, DEPTH, EXTRA_DATA,
3
+ FILTER_CODES, FILTER_REGEX, FILTER_SIZE,
4
+ FILTER_WORDS, FOLLOW_REDIRECT, HEADER,
5
+ MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
6
+ MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
7
+ PERCENT, PROXY, RATE_LIMIT, RETRIES,
8
+ THREADS, TIMEOUT, USER_AGENT, WORDLIST, URL)
9
+ from secator.output_types import Progress, Url, Subdomain, Info, Warning
10
+ from secator.serializers import JSONSerializer, RegexSerializer
11
+ from secator.tasks._categories import HttpFuzzer
12
+ from secator.utils import extract_domain_info
13
+
14
+
15
+ FFUF_PROGRESS_REGEX = r':: Progress: \[(?P<count>\d+)/(?P<total>\d+)\] :: Job \[\d/\d\] :: (?P<rps>\d+) req/sec :: Duration: \[(?P<duration>[\d:]+)\] :: Errors: (?P<errors>\d+) ::' # noqa: E501
16
+
17
+
18
+ @task()
19
+ class ffuf(HttpFuzzer):
20
+ """Fast web fuzzer written in Go."""
21
+ cmd = 'ffuf -noninteractive'
22
+ input_types = [URL]
23
+ output_types = [Url, Subdomain, Progress]
24
+ tags = ['url', 'fuzz']
25
+ input_flag = '-u'
26
+ input_chunk_size = 1
27
+ file_flag = None
28
+ json_flag = '-json'
29
+ version_flag = '-V'
30
+ item_loaders = [
31
+ JSONSerializer(strict=True),
32
+ RegexSerializer(FFUF_PROGRESS_REGEX, fields=['count', 'total', 'rps', 'duration', 'errors'])
33
+ ]
34
+ opts = {
35
+ AUTO_CALIBRATION: {'is_flag': True, 'default': True, 'short': 'ac', 'help': 'Auto-calibration'},
36
+ 'recursion': {'is_flag': True, 'default': False, 'short': 'recursion', 'help': 'Recursion'},
37
+ 'stop_on_error': {'is_flag': True, 'default': False, 'short': 'soe', 'help': 'Stop on error'},
38
+ 'fuzz_host_header': {'is_flag': True, 'default': False, 'internal': True, 'short': 'fhh', 'help': 'Fuzz host header'},
39
+ }
40
+ opt_key_map = {
41
+ HEADER: 'H',
42
+ DATA: 'd',
43
+ DELAY: 'p',
44
+ DEPTH: 'recursion-depth',
45
+ FILTER_CODES: 'fc',
46
+ FILTER_REGEX: 'fr',
47
+ FILTER_SIZE: 'fs',
48
+ FILTER_WORDS: 'fw',
49
+ FOLLOW_REDIRECT: 'r',
50
+ MATCH_CODES: 'mc',
51
+ MATCH_REGEX: 'mr',
52
+ MATCH_SIZE: 'ms',
53
+ MATCH_WORDS: 'mw',
54
+ METHOD: 'X',
55
+ PROXY: 'x',
56
+ RATE_LIMIT: 'rate',
57
+ RETRIES: OPT_NOT_SUPPORTED,
58
+ THREADS: 't',
59
+ TIMEOUT: 'timeout',
60
+ USER_AGENT: OPT_NOT_SUPPORTED,
61
+
62
+ # ffuf opts
63
+ WORDLIST: 'w',
64
+ AUTO_CALIBRATION: 'ac',
65
+ 'stop_on_error': 'sa',
66
+ }
67
+ output_map = {
68
+ Progress: {
69
+ PERCENT: lambda x: int(int(x['count']) * 100 / int(x['total'])),
70
+ EXTRA_DATA: lambda x: x
71
+ },
72
+ }
73
+ encoding = 'ansi'
74
+ install_version = 'v2.1.0'
75
+ install_cmd = 'go install -v github.com/ffuf/ffuf/v2@[install_version]'
76
+ github_handle = 'ffuf/ffuf'
77
+ proxychains = False
78
+ proxy_socks5 = True
79
+ proxy_http = True
80
+
81
+ @staticmethod
82
+ def before_init(self):
83
+ # Add /FUZZ to URL if recursion is enabled
84
+ if self.get_opt_value('recursion') and not len(self.inputs) > 1 and not self.inputs[0].endswith('FUZZ'):
85
+ self._print(Info(message='Adding /FUZZ to URL as it is needed when recursion is enabled'), rich=True)
86
+ self.inputs[0] = self.inputs[0].rstrip('/') + '/FUZZ'
87
+
88
+ @staticmethod
89
+ def on_cmd_opts(self, opts):
90
+ # Fuzz host header
91
+ if self.get_opt_value('fuzz_host_header') and len(self.inputs) > 0:
92
+ host = self.inputs[0].split('://')[1].split('/')[0]
93
+ opts['header']['value']['Host'] = f'FUZZ.{host}'
94
+ self.headers = opts['header']['value'].copy()
95
+
96
+ # Check FUZZ keyword
97
+ data = self.get_opt_value('data') or ''
98
+ fuzz_in_headers = any('FUZZ' in v for v in self.headers.values())
99
+ if len(self.inputs) > 0 and 'FUZZ' not in self.inputs[0] and not fuzz_in_headers and 'FUZZ' not in data:
100
+ self.add_result(Warning(message='Keyword FUZZ is not present in the URL, header or body'))
101
+ return opts
102
+
103
+ @staticmethod
104
+ def on_json_loaded(self, item):
105
+ if 'host' in item:
106
+ self.current_host = item['host']
107
+ headers = self.headers.copy()
108
+ if 'FUZZ' in headers.get('Host', ''):
109
+ headers['Host'] = self.current_host
110
+ yield Url(
111
+ url=item['url'],
112
+ host=item['host'],
113
+ status_code=item['status'],
114
+ content_length=item['length'],
115
+ content_type=item['content-type'],
116
+ time=item['duration'] * 10**-9,
117
+ method=self.get_opt_value(METHOD) or 'GET',
118
+ request_headers=headers,
119
+ )
120
+ if self.get_opt_value('fuzz_host_header'):
121
+ yield Subdomain(
122
+ host=item['host'],
123
+ verified=False,
124
+ domain=extract_domain_info(item['host'], domain_only=True),
125
+ sources=['http_host_header']
126
+ )
127
+
128
+ @staticmethod
129
+ def on_item(self, item):
130
+ if isinstance(item, Url):
131
+ item.method = self.get_opt_value(METHOD) or 'GET'
132
+ item.request_headers = self.headers.copy()
133
+ if 'FUZZ' in self.headers.get('Host', ''):
134
+ item.request_headers['Host'] = self.current_host
135
+ return item
secator/tasks/fping.py ADDED
@@ -0,0 +1,85 @@
1
+ import validators
2
+
3
+ from secator.decorators import task
4
+ from secator.definitions import (DELAY, IP, HOST, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT,
5
+ RETRIES, THREADS, TIMEOUT, CIDR_RANGE)
6
+ from secator.output_types import Ip
7
+ from secator.tasks._categories import ReconIp
8
+ from secator.utils import validate_cidr_range
9
+
10
+
11
+ @task()
12
+ class fping(ReconIp):
13
+ """Send ICMP echo probes to network hosts, similar to ping, but much better."""
14
+ cmd = 'fping -a -A'
15
+ input_types = [IP, HOST, CIDR_RANGE]
16
+ output_types = [Ip]
17
+ tags = ['ip', 'recon']
18
+ file_flag = '-f'
19
+ input_flag = None
20
+ opts = {
21
+ 'count': {'type': int, 'default': None, 'help': 'Number of request packets to send to each target'},
22
+ 'show_name': {'is_flag': True, 'default': False, 'help': 'Show network addresses as well as hostnames'},
23
+ 'use_dns': {'is_flag': True, 'default': False, 'help': 'Use DNS to lookup address of return packet (same as -n but will force reverse-DNS lookup for hostnames)'}, # noqa: E501
24
+ 'summary': {'is_flag': True, 'default': False, 'help': 'Print cumulative statistics upon exit'},
25
+ }
26
+ opt_prefix = '--'
27
+ opt_key_map = {
28
+ DELAY: 'period',
29
+ PROXY: OPT_NOT_SUPPORTED,
30
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
31
+ RETRIES: 'retry',
32
+ TIMEOUT: 'timeout',
33
+ THREADS: OPT_NOT_SUPPORTED,
34
+ 'count': '-c',
35
+ 'show_name': '-n',
36
+ 'use_dns': '-d',
37
+ 'summary': '-s',
38
+ }
39
+ opt_value_map = {
40
+ DELAY: lambda x: int(x) * 1000, # convert s to ms
41
+ TIMEOUT: lambda x: int(x) * 1000 # convert s to ms
42
+ }
43
+ github_handle = 'schweikert/fping'
44
+ install_github_bin = False
45
+ install_version = 'v5.1'
46
+ install_pre = {'*': ['fping']}
47
+ ignore_return_code = True
48
+
49
+ @staticmethod
50
+ def before_init(self):
51
+ for input in self.inputs:
52
+ if validate_cidr_range(input):
53
+ self.file_flag = None
54
+ self.input_chunk_size = 1
55
+ self.input_flag = '-g'
56
+
57
+ @staticmethod
58
+ def item_loader(self, line):
59
+ if '(' in line:
60
+
61
+ line_part = line.split(' : ')[0] if ' : ' in line else line # Removing the stat parts that appears when using -c
62
+
63
+ start_paren = line_part.find('(')
64
+ end_paren = line_part.find(')', start_paren)
65
+
66
+ if start_paren != -1 and end_paren != -1:
67
+ host = line_part[:start_paren].strip()
68
+ ip = line_part[start_paren+1:end_paren].strip()
69
+
70
+ if (validators.ipv4(host) or validators.ipv6(host)):
71
+ host = ''
72
+ else:
73
+ return
74
+ else:
75
+ ip = line.strip()
76
+ host = ''
77
+ if not (validators.ipv4(ip) or validators.ipv6(ip)):
78
+ return
79
+ yield Ip(ip=ip, alive=True, host=host, extra_data={'protocol': 'icmp'})
80
+
81
+ @staticmethod
82
+ def on_line(self, line):
83
+ if 'Unreachable' in line:
84
+ return '' # discard line as it pollutes output
85
+ return line
secator/tasks/gau.py ADDED
@@ -0,0 +1,102 @@
1
+ import json
2
+ from collections import defaultdict
3
+ from urllib.parse import urlparse, urlunparse, parse_qs
4
+
5
+ from secator.decorators import task
6
+ from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX,
7
+ FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT,
8
+ HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE,
9
+ MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED,
10
+ OPT_PIPE_INPUT, PROXY, RATE_LIMIT, RETRIES,
11
+ THREADS, TIMEOUT, USER_AGENT, URL, HOST)
12
+ from secator.output_types import Subdomain, Url, Warning
13
+ from secator.serializers import JSONSerializer
14
+ from secator.tasks._categories import HttpCrawler
15
+ from secator.utils import extract_domain_info
16
+
17
+
18
+ @task()
19
+ class gau(HttpCrawler):
20
+ """Fetch known URLs from AlienVault's Open Threat Exchange, the Wayback Machine, Common Crawl, and URLScan."""
21
+ cmd = 'gau --verbose'
22
+ input_types = [URL, HOST]
23
+ output_types = [Url, Subdomain]
24
+ tags = ['pattern', 'scan']
25
+ file_flag = OPT_PIPE_INPUT
26
+ json_flag = '--json'
27
+ opt_prefix = '--'
28
+ encoding = 'ansi'
29
+ opts = {
30
+ 'providers': {'type': str, 'default': None, 'help': 'List of providers to use (wayback,commoncrawl,otx,urlscan)'},
31
+ 'subs': {'is_flag': True, 'default': False, 'help': 'Output subdomains as well asURLs'},
32
+ 'max_param_occurrences': {'type': int, 'help': 'Max occurrences for the same parameter in the same URL before discarding next results', 'required': False, 'default': 10, 'internal': True}, # noqa: E501
33
+ }
34
+ opt_key_map = {
35
+ HEADER: OPT_NOT_SUPPORTED,
36
+ DELAY: OPT_NOT_SUPPORTED,
37
+ DEPTH: OPT_NOT_SUPPORTED,
38
+ FILTER_CODES: 'fc',
39
+ FILTER_REGEX: OPT_NOT_SUPPORTED,
40
+ FILTER_SIZE: OPT_NOT_SUPPORTED,
41
+ FILTER_WORDS: OPT_NOT_SUPPORTED,
42
+ MATCH_CODES: 'mc',
43
+ MATCH_REGEX: OPT_NOT_SUPPORTED,
44
+ MATCH_SIZE: OPT_NOT_SUPPORTED,
45
+ MATCH_WORDS: OPT_NOT_SUPPORTED,
46
+ FOLLOW_REDIRECT: OPT_NOT_SUPPORTED,
47
+ METHOD: OPT_NOT_SUPPORTED,
48
+ PROXY: 'proxy',
49
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
50
+ RETRIES: 'retries',
51
+ THREADS: 'threads',
52
+ TIMEOUT: 'timeout',
53
+ USER_AGENT: OPT_NOT_SUPPORTED,
54
+ }
55
+ item_loaders = [JSONSerializer()]
56
+ install_pre = {'apk': ['libc6-compat']}
57
+ install_version = 'v2.2.4'
58
+ install_cmd = 'go install -v github.com/lc/gau/v2/cmd/gau@[install_version]'
59
+ github_handle = 'lc/gau'
60
+ proxychains = False
61
+ proxy_socks5 = True
62
+ proxy_http = True
63
+ profile = 'io'
64
+
65
+ @staticmethod
66
+ def on_init(self):
67
+ self.max_param_occurrences = self.get_opt_value('max_param_occurrences')
68
+ self.seen_params = defaultdict(lambda: defaultdict(int))
69
+ self.subdomains = []
70
+
71
+ @staticmethod
72
+ def on_line(self, line):
73
+ if 'level=warning' in line and 'error reading config' not in line:
74
+ msg = line.split('msg=')[-1].rstrip('""').lstrip('"')
75
+ if not msg.startswith('http'):
76
+ msg = msg.capitalize()
77
+ return json.dumps({'message': msg, '_type': 'warning'})
78
+ return line
79
+
80
+ @staticmethod
81
+ def on_json_loaded(self, item):
82
+ if item.get('message'):
83
+ yield Warning(message=item['message'])
84
+ return
85
+ url = item['url']
86
+ parsed_url = urlparse(url)
87
+ base_url = urlunparse(parsed_url._replace(query="", fragment="")) # Remove query & fragment
88
+ query_params = parse_qs(parsed_url.query)
89
+ current_params = set(query_params.keys())
90
+ for param in current_params:
91
+ self.seen_params[base_url][param] += 1
92
+ if self.seen_params[base_url][param] > int(self.max_param_occurrences):
93
+ return
94
+ if self.get_opt_value('subs'):
95
+ domain = extract_domain_info(parsed_url.hostname, domain_only=True)
96
+ if domain:
97
+ subdomain = Subdomain(host=parsed_url.hostname, domain=domain)
98
+ if subdomain not in self.subdomains:
99
+ self.subdomains.append(subdomain)
100
+ yield subdomain
101
+ else:
102
+ yield Url(url=item['url'], host=parsed_url.hostname)
@@ -0,0 +1,60 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import (DELAY, OPT_PIPE_INPUT, IP, HOST)
3
+ from secator.output_types import Tag
4
+ from secator.tasks._categories import Command
5
+
6
+
7
+ @task()
8
+ class getasn(Command):
9
+ """Get ASN information from IP address."""
10
+ cmd = 'getasn'
11
+ input_chunk_size = 1
12
+ input_types = [IP, HOST]
13
+ input_flag = OPT_PIPE_INPUT
14
+ file_flag = None
15
+ output_types = [Tag]
16
+ tags = ['ip', 'probe']
17
+ opts = {}
18
+ opt_key_map = {
19
+ # HEADER: 'header',
20
+ # DELAY: 'delay',
21
+ # DEPTH: OPT_NOT_SUPPORTED,
22
+ # FILTER_CODES: 'filter-code',
23
+ # FILTER_REGEX: 'filter-regex',
24
+ # FILTER_SIZE: 'filter-length',
25
+ # FILTER_WORDS: 'filter-word-count',
26
+ # FOLLOW_REDIRECT: 'follow-redirects',
27
+ # MATCH_CODES: 'match-code',
28
+ # MATCH_REGEX: 'match-regex',
29
+ # MATCH_SIZE: 'match-length',
30
+ # MATCH_WORDS: 'match-word-count',
31
+ # METHOD: 'x',
32
+ # PROXY: 'proxy',
33
+ # RATE_LIMIT: 'rate-limit',
34
+ # RETRIES: 'retries',
35
+ # THREADS: 'threads',
36
+ # TIMEOUT: 'timeout',
37
+ # USER_AGENT: OPT_NOT_SUPPORTED,
38
+ # 'store_responses': 'sr',
39
+ }
40
+ opt_value_map = {
41
+ DELAY: lambda x: str(x) + 's' if x else None,
42
+ }
43
+ install_version = 'latest'
44
+ install_cmd = 'go install github.com/Vulnpire/getasn@[install_version]'
45
+ install_github_bin = False
46
+ github_handle = 'Vulnpire/getasn'
47
+ proxychains = False
48
+ proxy_socks5 = True
49
+ proxy_http = False
50
+
51
+ @staticmethod
52
+ def item_loader(self, line):
53
+ tag = Tag(
54
+ category='info',
55
+ name='asn',
56
+ match=self.inputs[0],
57
+ value=line.strip(),
58
+ )
59
+ if tag not in self.self_results:
60
+ yield tag
secator/tasks/gf.py ADDED
@@ -0,0 +1,36 @@
1
+ from secator.decorators import task
2
+ from secator.definitions import OPT_PIPE_INPUT, OPT_NOT_SUPPORTED
3
+ from secator.output_types import Tag
4
+ from secator.tasks._categories import Tagger
5
+
6
+
7
+ @task()
8
+ class gf(Tagger):
9
+ """Wrapper around grep, to help you grep for things."""
10
+ cmd = 'gf'
11
+ input_types = None # anything
12
+ output_types = [Tag]
13
+ tags = ['pattern', 'scan']
14
+ file_flag = OPT_PIPE_INPUT
15
+ input_flag = OPT_PIPE_INPUT
16
+ version_flag = OPT_NOT_SUPPORTED
17
+ opts = {
18
+ 'pattern': {'type': str, 'help': 'Pattern names to match against (comma-delimited)', 'required': True}
19
+ }
20
+ opt_key_map = {
21
+ 'pattern': ''
22
+ }
23
+ install_cmd = (
24
+ 'go install -v github.com/tomnomnom/gf@latest && '
25
+ 'git clone https://github.com/1ndianl33t/Gf-Patterns $HOME/.gf || true'
26
+ )
27
+
28
+ @staticmethod
29
+ def item_loader(self, line):
30
+ yield {'match': line, 'name': self.get_opt_value('pattern').rstrip(), 'category': 'url_pattern', 'value': line} # noqa: E731,E501
31
+
32
+ @staticmethod
33
+ def on_item(self, item):
34
+ if isinstance(item, Tag):
35
+ item.extra_data = {'source': 'url'}
36
+ return item
@@ -0,0 +1,96 @@
1
+ import click
2
+ import os
3
+ import shlex
4
+ import yaml
5
+
6
+ from pathlib import Path
7
+
8
+ from secator.config import CONFIG
9
+ from secator.decorators import task
10
+ from secator.runners import Command
11
+ from secator.definitions import (OUTPUT_PATH, PATH)
12
+ from secator.utils import caml_to_snake
13
+ from secator.output_types import Tag, Info, Error
14
+ from secator.rich import console
15
+
16
+ GITLEAKS_MODES = ['git', 'dir']
17
+
18
+
19
+ def convert_mode(mode):
20
+ return 'dir' if mode == 'filesystem' else 'git' if mode == 'git' else mode
21
+
22
+
23
+ @task()
24
+ class gitleaks(Command):
25
+ """Tool for detecting secrets like passwords, API keys, and tokens in git repos, files, and stdin."""
26
+ cmd = 'gitleaks'
27
+ tags = ['secret', 'scan']
28
+ input_types = [PATH]
29
+ input_flag = None
30
+ json_flag = '-f json'
31
+ opt_prefix = '--'
32
+ opts = {
33
+ 'ignore_path': {'type': str, 'help': 'Path to .gitleaksignore file or folder containing one'},
34
+ 'mode': {'type': click.Choice(GITLEAKS_MODES), 'help': f'Scan mode ({", ".join(GITLEAKS_MODES)})', 'internal': True}, # noqa: E501
35
+ 'config': {'type': str, 'short': 'config', 'help': 'Config file path'}
36
+ }
37
+ opt_key_map = {
38
+ "ignore_path": "gitleaks-ignore-path"
39
+ }
40
+ opt_value_map = {
41
+ 'mode': lambda x: convert_mode(x)
42
+ }
43
+ input_type = "folder"
44
+ output_types = [Tag]
45
+ install_version = 'v8.29.1'
46
+ install_cmd_pre = {'*': ['git', 'make']}
47
+ install_cmd = (
48
+ f'git clone --single-branch -b [install_version] https://github.com/gitleaks/gitleaks.git {CONFIG.dirs.share}/gitleaks_[install_version] || true &&' # noqa: E501
49
+ f'cd {CONFIG.dirs.share}/gitleaks_[install_version] && make build &&'
50
+ f'mv {CONFIG.dirs.share}/gitleaks_[install_version]/gitleaks {CONFIG.dirs.bin}'
51
+ )
52
+ github_handle = 'gitleaks/gitleaks'
53
+
54
+ @staticmethod
55
+ def on_cmd(self):
56
+ mode = self.cmd_options.get('mode', {}).get('value')
57
+ if mode and mode not in GITLEAKS_MODES:
58
+ raise Exception(f'Invalid mode: {mode}')
59
+ if not mode and len(self.inputs) > 0:
60
+ git_path = Path(self.inputs[0]).joinpath('.git')
61
+ if git_path.exists():
62
+ mode = 'git'
63
+ else:
64
+ mode = 'dir'
65
+ console.print(Info(message=f'Auto mode detected: {mode} for input: {self.inputs[0]}'))
66
+ self.cmd = self.cmd.replace(f'{gitleaks.cmd} ', f'{gitleaks.cmd} {mode} ')
67
+
68
+ # add output path
69
+ output_path = self.get_opt_value(OUTPUT_PATH)
70
+ if not output_path:
71
+ output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json'
72
+ self.output_path = output_path
73
+ self.cmd += f' -r {shlex.quote(self.output_path)}'
74
+ self.cmd += ' --exit-code 0'
75
+
76
+ @staticmethod
77
+ def on_cmd_done(self):
78
+ if not os.path.exists(self.output_path):
79
+ yield Error(message=f'Could not find JSON results in {self.output_path}')
80
+ return
81
+
82
+ yield Info(message=f'JSON results saved to {self.output_path}')
83
+ with open(self.output_path, 'r') as f:
84
+ results = yaml.safe_load(f.read())
85
+ for result in results:
86
+ extra_data = {
87
+ caml_to_snake(k): v for k, v in result.items()
88
+ if k not in ['RuleID', 'File', 'Secret']
89
+ }
90
+ yield Tag(
91
+ category='secret',
92
+ name=result['RuleID'].replace('-', '_'),
93
+ value=result.get('Secret', ''),
94
+ match='{File}:{StartLine}:{StartColumn}'.format(**result),
95
+ extra_data=extra_data
96
+ )
@@ -0,0 +1,84 @@
1
+ from furl import furl
2
+
3
+ from secator.decorators import task
4
+ from secator.definitions import (CONTENT_LENGTH, DELAY, DEPTH, FILTER_CODES,
5
+ FILTER_REGEX, FILTER_SIZE, FILTER_WORDS,
6
+ FOLLOW_REDIRECT, HEADER, MATCH_CODES,
7
+ MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD,
8
+ OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES,
9
+ STATUS_CODE, THREADS, TIMEOUT, URL, USER_AGENT)
10
+ from secator.output_types import Url
11
+ from secator.serializers import JSONSerializer
12
+ from secator.tasks._categories import HttpCrawler
13
+
14
+
15
+ @task()
16
+ class gospider(HttpCrawler):
17
+ """Fast web spider written in Go."""
18
+ cmd = 'gospider'
19
+ input_types = [URL]
20
+ output_types = [Url]
21
+ tags = ['url', 'crawl']
22
+ file_flag = '-S'
23
+ input_flag = '-s'
24
+ json_flag = '--json'
25
+ opt_prefix = '--'
26
+ opt_key_map = {
27
+ HEADER: 'header',
28
+ DELAY: 'delay',
29
+ DEPTH: 'depth',
30
+ FILTER_CODES: OPT_NOT_SUPPORTED,
31
+ FILTER_REGEX: OPT_NOT_SUPPORTED,
32
+ FILTER_SIZE: OPT_NOT_SUPPORTED,
33
+ FILTER_WORDS: OPT_NOT_SUPPORTED,
34
+ FOLLOW_REDIRECT: 'no-redirect',
35
+ MATCH_CODES: OPT_NOT_SUPPORTED,
36
+ MATCH_REGEX: OPT_NOT_SUPPORTED,
37
+ MATCH_SIZE: OPT_NOT_SUPPORTED,
38
+ MATCH_WORDS: OPT_NOT_SUPPORTED,
39
+ METHOD: OPT_NOT_SUPPORTED,
40
+ PROXY: 'proxy',
41
+ RATE_LIMIT: OPT_NOT_SUPPORTED,
42
+ RETRIES: OPT_NOT_SUPPORTED,
43
+ THREADS: 'threads',
44
+ TIMEOUT: 'timeout',
45
+ USER_AGENT: 'user-agent',
46
+ }
47
+ opt_value_map = {
48
+ FOLLOW_REDIRECT: lambda x: not x,
49
+ DELAY: lambda x: round(x) if isinstance(x, float) else x
50
+ }
51
+ item_loaders = [JSONSerializer()]
52
+ output_map = {
53
+ Url: {
54
+ URL: 'output',
55
+ STATUS_CODE: 'status',
56
+ CONTENT_LENGTH: 'length',
57
+ }
58
+ }
59
+ install_version = 'v1.1.6'
60
+ install_cmd = 'go install -v github.com/jaeles-project/gospider@[install_version]'
61
+ github_handle = 'jaeles-project/gospider'
62
+ proxychains = False
63
+ proxy_socks5 = True # with leaks... https://github.com/jaeles-project/gospider/issues/61
64
+ proxy_http = True # with leaks... https://github.com/jaeles-project/gospider/issues/61
65
+ profile = 'io'
66
+
67
+ @staticmethod
68
+ def validate_item(self, item):
69
+ """Keep only items that match the same host."""
70
+ if not isinstance(item, dict):
71
+ return False
72
+ try:
73
+ netloc_in = furl(item['input']).netloc
74
+ netloc_out = furl(item['output']).netloc
75
+ if netloc_in != netloc_out:
76
+ return False
77
+ except ValueError: # gospider returns invalid URLs for output sometimes
78
+ return False
79
+ return True
80
+
81
+ @staticmethod
82
+ def on_json_loaded(self, item):
83
+ item['request_headers'] = self.get_opt_value('header', preprocess=True)
84
+ yield item