secator 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (114) hide show
  1. secator/__init__.py +0 -0
  2. secator/celery.py +482 -0
  3. secator/cli.py +617 -0
  4. secator/config.py +137 -0
  5. secator/configs/__init__.py +0 -0
  6. secator/configs/profiles/__init__.py +0 -0
  7. secator/configs/profiles/aggressive.yaml +7 -0
  8. secator/configs/profiles/default.yaml +9 -0
  9. secator/configs/profiles/stealth.yaml +7 -0
  10. secator/configs/scans/__init__.py +0 -0
  11. secator/configs/scans/domain.yaml +18 -0
  12. secator/configs/scans/host.yaml +14 -0
  13. secator/configs/scans/network.yaml +17 -0
  14. secator/configs/scans/subdomain.yaml +8 -0
  15. secator/configs/scans/url.yaml +12 -0
  16. secator/configs/workflows/__init__.py +0 -0
  17. secator/configs/workflows/cidr_recon.yaml +28 -0
  18. secator/configs/workflows/code_scan.yaml +11 -0
  19. secator/configs/workflows/host_recon.yaml +41 -0
  20. secator/configs/workflows/port_scan.yaml +34 -0
  21. secator/configs/workflows/subdomain_recon.yaml +33 -0
  22. secator/configs/workflows/url_crawl.yaml +29 -0
  23. secator/configs/workflows/url_dirsearch.yaml +29 -0
  24. secator/configs/workflows/url_fuzz.yaml +35 -0
  25. secator/configs/workflows/url_nuclei.yaml +11 -0
  26. secator/configs/workflows/url_vuln.yaml +55 -0
  27. secator/configs/workflows/user_hunt.yaml +10 -0
  28. secator/configs/workflows/wordpress.yaml +14 -0
  29. secator/decorators.py +309 -0
  30. secator/definitions.py +165 -0
  31. secator/exporters/__init__.py +12 -0
  32. secator/exporters/_base.py +3 -0
  33. secator/exporters/csv.py +30 -0
  34. secator/exporters/gdrive.py +118 -0
  35. secator/exporters/json.py +15 -0
  36. secator/exporters/table.py +7 -0
  37. secator/exporters/txt.py +25 -0
  38. secator/hooks/__init__.py +0 -0
  39. secator/hooks/mongodb.py +212 -0
  40. secator/output_types/__init__.py +24 -0
  41. secator/output_types/_base.py +95 -0
  42. secator/output_types/exploit.py +50 -0
  43. secator/output_types/ip.py +33 -0
  44. secator/output_types/port.py +45 -0
  45. secator/output_types/progress.py +35 -0
  46. secator/output_types/record.py +34 -0
  47. secator/output_types/subdomain.py +42 -0
  48. secator/output_types/tag.py +46 -0
  49. secator/output_types/target.py +30 -0
  50. secator/output_types/url.py +76 -0
  51. secator/output_types/user_account.py +41 -0
  52. secator/output_types/vulnerability.py +97 -0
  53. secator/report.py +107 -0
  54. secator/rich.py +124 -0
  55. secator/runners/__init__.py +12 -0
  56. secator/runners/_base.py +833 -0
  57. secator/runners/_helpers.py +153 -0
  58. secator/runners/command.py +638 -0
  59. secator/runners/scan.py +65 -0
  60. secator/runners/task.py +106 -0
  61. secator/runners/workflow.py +135 -0
  62. secator/serializers/__init__.py +8 -0
  63. secator/serializers/dataclass.py +33 -0
  64. secator/serializers/json.py +15 -0
  65. secator/serializers/regex.py +17 -0
  66. secator/tasks/__init__.py +10 -0
  67. secator/tasks/_categories.py +304 -0
  68. secator/tasks/cariddi.py +102 -0
  69. secator/tasks/dalfox.py +65 -0
  70. secator/tasks/dirsearch.py +90 -0
  71. secator/tasks/dnsx.py +56 -0
  72. secator/tasks/dnsxbrute.py +34 -0
  73. secator/tasks/feroxbuster.py +91 -0
  74. secator/tasks/ffuf.py +86 -0
  75. secator/tasks/fping.py +44 -0
  76. secator/tasks/gau.py +47 -0
  77. secator/tasks/gf.py +33 -0
  78. secator/tasks/gospider.py +71 -0
  79. secator/tasks/grype.py +79 -0
  80. secator/tasks/h8mail.py +81 -0
  81. secator/tasks/httpx.py +99 -0
  82. secator/tasks/katana.py +133 -0
  83. secator/tasks/maigret.py +78 -0
  84. secator/tasks/mapcidr.py +32 -0
  85. secator/tasks/msfconsole.py +174 -0
  86. secator/tasks/naabu.py +52 -0
  87. secator/tasks/nmap.py +344 -0
  88. secator/tasks/nuclei.py +97 -0
  89. secator/tasks/searchsploit.py +52 -0
  90. secator/tasks/subfinder.py +40 -0
  91. secator/tasks/wpscan.py +179 -0
  92. secator/utils.py +445 -0
  93. secator/utils_test.py +183 -0
  94. secator-0.0.1.dist-info/LICENSE +60 -0
  95. secator-0.0.1.dist-info/METADATA +199 -0
  96. secator-0.0.1.dist-info/RECORD +114 -0
  97. secator-0.0.1.dist-info/WHEEL +5 -0
  98. secator-0.0.1.dist-info/entry_points.txt +2 -0
  99. secator-0.0.1.dist-info/top_level.txt +2 -0
  100. tests/__init__.py +0 -0
  101. tests/integration/__init__.py +0 -0
  102. tests/integration/inputs.py +42 -0
  103. tests/integration/outputs.py +392 -0
  104. tests/integration/test_scans.py +82 -0
  105. tests/integration/test_tasks.py +103 -0
  106. tests/integration/test_workflows.py +163 -0
  107. tests/performance/__init__.py +0 -0
  108. tests/performance/loadtester.py +56 -0
  109. tests/unit/__init__.py +0 -0
  110. tests/unit/test_celery.py +39 -0
  111. tests/unit/test_scans.py +0 -0
  112. tests/unit/test_serializers.py +51 -0
  113. tests/unit/test_tasks.py +348 -0
  114. tests/unit/test_workflows.py +96 -0
secator/config.py ADDED
@@ -0,0 +1,137 @@
1
+ import glob
2
+ import os
3
+ from pathlib import Path
4
+
5
+ import yaml
6
+ from dotmap import DotMap
7
+
8
+ from secator.rich import console
9
+ from secator.definitions import CONFIGS_FOLDER, EXTRA_CONFIGS_FOLDER
10
+
11
+ CONFIGS_DIR_KEYS = ['workflow', 'scan', 'profile']
12
+
13
+
14
+ def load_config(name):
15
+ """Load a config by name.
16
+
17
+ Args:
18
+ name: Name of the config, for instances profiles/aggressive or workflows/domain_scan.
19
+
20
+ Returns:
21
+ dict: Loaded config.
22
+ """
23
+ path = Path(CONFIGS_FOLDER) / f'{name}.yaml'
24
+ if not path.exists():
25
+ console.log(f'Config "{name}" could not be loaded.')
26
+ return
27
+ with path.open('r') as f:
28
+ return yaml.load(f.read(), Loader=yaml.Loader)
29
+
30
+
31
+ def find_configs():
32
+ results = {'scan': [], 'workflow': [], 'profile': []}
33
+ dirs_type = [CONFIGS_FOLDER]
34
+ if EXTRA_CONFIGS_FOLDER:
35
+ dirs_type.append(EXTRA_CONFIGS_FOLDER)
36
+ paths = []
37
+ for dir in dirs_type:
38
+ dir_paths = [
39
+ os.path.abspath(path)
40
+ for path in glob.glob(dir.rstrip('/') + '/**/*.y*ml', recursive=True)
41
+ ]
42
+ paths.extend(dir_paths)
43
+ for path in paths:
44
+ with open(path, 'r') as f:
45
+ try:
46
+ config = yaml.load(f.read(), yaml.Loader)
47
+ type = config.get('type')
48
+ if type:
49
+ results[type].append(path)
50
+ except yaml.YAMLError as exc:
51
+ console.log(f'Unable to load config at {path}')
52
+ console.log(str(exc))
53
+ return results
54
+
55
+
56
+ class ConfigLoader(DotMap):
57
+
58
+ def __init__(self, input={}, name=None, **kwargs):
59
+ if name:
60
+ name = name.replace('-', '_') # so that workflows have a nice '-' in CLI
61
+ config = self._load_from_name(name)
62
+ elif isinstance(input, str):
63
+ config = self._load_from_file(input)
64
+ else:
65
+ config = input
66
+ super().__init__(config)
67
+
68
+ def _load_from_file(self, path):
69
+ if not os.path.exists(path):
70
+ console.log(f'Config path {path} does not exists', style='bold red')
71
+ return
72
+ if path and os.path.exists(path):
73
+ with open(path, 'r') as f:
74
+ return yaml.load(f.read(), Loader=yaml.Loader)
75
+
76
+ def _load_from_name(self, name):
77
+ return load_config(name)
78
+
79
+ @classmethod
80
+ def load_all(cls):
81
+ configs = find_configs()
82
+ return ConfigLoader({
83
+ key: [ConfigLoader(path) for path in configs[key]]
84
+ for key in CONFIGS_DIR_KEYS
85
+ })
86
+
87
+ def get_tasks_class(self):
88
+ from secator.runners import Task
89
+ tasks = []
90
+ for name, conf in self.tasks.items():
91
+ if name == '_group':
92
+ group_conf = ConfigLoader(input={'tasks': conf})
93
+ tasks.extend(group_conf.get_tasks_class())
94
+ else:
95
+ tasks.append(Task.get_task_class(name))
96
+ return tasks
97
+
98
+ def get_workflows(self):
99
+ return [ConfigLoader(name=f'workflows/{name}') for name, _ in self.workflows.items()]
100
+
101
+ def get_workflow_supported_opts(self):
102
+ opts = {}
103
+ tasks = self.get_tasks_class()
104
+ for task_cls in tasks:
105
+ task_opts = task_cls.get_supported_opts()
106
+ for name, conf in task_opts.items():
107
+ supported = opts.get(name, {}).get('supported', False)
108
+ opts[name] = conf
109
+ opts[name]['supported'] = conf['supported'] or supported
110
+ return opts
111
+
112
+ def get_scan_supported_opts(self):
113
+ opts = {}
114
+ workflows = self.get_workflows()
115
+ for workflow in workflows:
116
+ workflow_opts = workflow.get_workflow_supported_opts()
117
+ for name, conf in workflow_opts.items():
118
+ supported = opts.get(name, {}).get('supported', False)
119
+ opts[name] = conf
120
+ opts[name]['supported'] = conf['supported'] or supported
121
+ return opts
122
+
123
+ @property
124
+ def supported_opts(self):
125
+ return self.get_supported_opts()
126
+
127
+ def get_supported_opts(self):
128
+ opts = {}
129
+ if self.type == 'workflow':
130
+ opts = self.get_workflow_supported_opts()
131
+ elif self.type == 'scan':
132
+ opts = self.get_scan_supported_opts()
133
+ elif self.type == 'task':
134
+ tasks = self.get_tasks_class()
135
+ if tasks:
136
+ opts = tasks[0].get_supported_opts()
137
+ return dict(sorted(opts.items()))
File without changes
File without changes
@@ -0,0 +1,7 @@
1
+ type: profile
2
+ name: aggressive
3
+ options:
4
+ rate_limit: 100000
5
+ delay: 0
6
+ proxy: random
7
+ user_agent: random
@@ -0,0 +1,9 @@
1
+ type: profile
2
+ name: default
3
+ options:
4
+ rate_limit: 1000
5
+ delay: 1
6
+ proxy: null
7
+ user_agent: 'Mozilla ...'
8
+ nuclei.retries: 5
9
+ nuclei.timeout: 15
@@ -0,0 +1,7 @@
1
+ type: profile
2
+ name: stealth
3
+ options:
4
+ rate_limit: 100
5
+ delay: 1
6
+ proxy: proxychains
7
+ user_agent: random
File without changes
@@ -0,0 +1,18 @@
1
+ type: scan
2
+ name: domain
3
+ description: Domain scan
4
+ profile: default
5
+ input_types:
6
+ - host
7
+ workflows:
8
+ subdomain_recon:
9
+ host_recon:
10
+ targets_:
11
+ - target.name
12
+ - subdomain.host
13
+ url_crawl:
14
+ targets_:
15
+ - url.url
16
+ url_vuln:
17
+ targets_:
18
+ - url.url
@@ -0,0 +1,14 @@
1
+ type: scan
2
+ name: host
3
+ description: Host scan
4
+ profile: default
5
+ input_types:
6
+ - host
7
+ workflows:
8
+ host_recon:
9
+ url_crawl:
10
+ targets_:
11
+ - url.url
12
+ url_vuln:
13
+ targets_:
14
+ - url.url
@@ -0,0 +1,17 @@
1
+ type: scan
2
+ name: network
3
+ description: Internal network scan
4
+ profile: default
5
+ input_types:
6
+ - cidr_range
7
+ workflows:
8
+ cidr_recon:
9
+ url_nuclei:
10
+ targets_:
11
+ - url.url
12
+ url_crawl:
13
+ targets_:
14
+ - url.url
15
+ url_vuln:
16
+ targets_:
17
+ - url.url
@@ -0,0 +1,8 @@
1
+ type: scan
2
+ name: subdomain
3
+ description: Subdomain scan
4
+ profile: default
5
+ input_types:
6
+ - host
7
+ workflows:
8
+ subdomain_recon:
@@ -0,0 +1,12 @@
1
+ type: scan
2
+ name: url
3
+ description: URL scan
4
+ profile: default
5
+ input_types:
6
+ - url
7
+ workflows:
8
+ url_crawl:
9
+ url_nuclei:
10
+ url_vuln:
11
+ targets_:
12
+ - url.url
File without changes
@@ -0,0 +1,28 @@
1
+ type: workflow
2
+ name: cidr_recon
3
+ alias: cidrrec
4
+ description: Local network recon
5
+ tags: [recon, cidr, network]
6
+ input_types:
7
+ - cidr_range
8
+ tasks:
9
+ mapcidr:
10
+ description: Find CIDR range IPs
11
+ fping:
12
+ description: Check for alive IPs
13
+ targets_: ip.ip
14
+ naabu:
15
+ description: Scan alive IPs' ports
16
+ targets_:
17
+ - type: ip
18
+ field: ip
19
+ condition: item.alive
20
+ httpx:
21
+ description: Probe HTTP services on open ports
22
+ targets_:
23
+ - type: port
24
+ field: '{ip}:{port}'
25
+ results:
26
+ - type: ip
27
+ condition: item.alive
28
+ - type: url
@@ -0,0 +1,11 @@
1
+ type: workflow
2
+ name: code_scan
3
+ alias: codescan
4
+ description: Code vulnerability scan
5
+ tags: [vuln, code]
6
+ input_types:
7
+ - path
8
+ - docker_image_name
9
+ tasks:
10
+ grype:
11
+ description: Run code vulnerability scan
@@ -0,0 +1,41 @@
1
+ type: workflow
2
+ name: host_recon
3
+ alias: hostrec
4
+ description: Host recon
5
+ tags: [recon, network, http]
6
+ input_types:
7
+ - host
8
+ - cidr_range
9
+ tasks:
10
+ naabu:
11
+ description: Find open ports
12
+ nmap:
13
+ description: Search for vulnerabilities on open ports
14
+ targets_: port.host
15
+ ports_: port.port
16
+ httpx:
17
+ description: Probe HTTP services on open ports
18
+ targets_:
19
+ - type: port
20
+ field: '{host}:{port}'
21
+ condition: item._source == 'nmap'
22
+ _group:
23
+ nuclei/network:
24
+ description: Scan network and SSL vulnerabilities
25
+ tags: [network, ssl]
26
+ nuclei/url:
27
+ description: Search for vulnerabilities on alive HTTP services
28
+ exclude_tags: [network, ssl, file, dns, osint, token-spray, headers]
29
+ targets_:
30
+ - type: url
31
+ field: url
32
+ condition: item.status_code != 0
33
+ results:
34
+ - type: port
35
+ condition: item._source == 'nmap'
36
+
37
+ - type: vulnerability
38
+ # condition: item.confidence == 'high'
39
+
40
+ - type: url
41
+ condition: item.status_code != 0
@@ -0,0 +1,34 @@
1
+ type: workflow
2
+ name: port_scan
3
+ alias: pscan
4
+ description: Port scan
5
+ tags: [recon, network, http, vuln]
6
+ input_types:
7
+ - host
8
+ tasks:
9
+ naabu:
10
+ description: Find open ports
11
+ nmap:
12
+ description: Search for vulnerabilities on open ports
13
+ targets_: port.host
14
+ ports_: port.port
15
+ _group:
16
+ searchsploit:
17
+ description: Search for related exploits
18
+ targets_:
19
+ - type: port
20
+ field: '{host}~{service_name}'
21
+ condition: item._source == 'nmap' and len(item.service_name.split('/')) > 1
22
+ httpx:
23
+ description: Probe HTTP services on open ports
24
+ targets_:
25
+ - type: port
26
+ field: '{host}:{port}'
27
+ condition: item._source == 'nmap'
28
+ results:
29
+ - type: port
30
+
31
+ - type: url
32
+ condition: item.status_code != 0
33
+
34
+ - type: vulnerability
@@ -0,0 +1,33 @@
1
+ type: workflow
2
+ name: subdomain_recon
3
+ alias: subrec
4
+ description: Subdomain discovery
5
+ tags: [recon, dns, takeovers]
6
+ input_types:
7
+ - host
8
+ tasks:
9
+ subfinder:
10
+ description: List subdomains (passive)
11
+ # TODO: add subdomain bruteforcers
12
+ # gobuster:
13
+ # input: vhost
14
+ # domain_:
15
+ # - target.name
16
+ # wordlist: /usr/share/seclists/Discovery/DNS/combined_subdomains.txt
17
+ # gobuster:
18
+ # input: dns
19
+ # domain_:
20
+ # - target.name
21
+ # wordlist: /usr/share/seclists/Discovery/DNS/combined_subdomains.txt
22
+ _group:
23
+ nuclei:
24
+ description: Check for subdomain takeovers
25
+ targets_:
26
+ - target.name
27
+ - subdomain.host
28
+ tags: [takeover, dns]
29
+ httpx:
30
+ description: Run HTTP probes on subdomains
31
+ targets_:
32
+ - target.name
33
+ - subdomain.host
@@ -0,0 +1,29 @@
1
+ type: workflow
2
+ name: url_crawl
3
+ alias: urlcrawl
4
+ description: URL crawl (fast)
5
+ tags: [http, crawl]
6
+ options:
7
+ match_codes: 200,204,301,302,307,401,403,405,500
8
+ input_types:
9
+ - url
10
+ tasks:
11
+ _group:
12
+ # gau:
13
+ # description: Search for passive URLs
14
+ # gospider:
15
+ # description: Crawl URLs
16
+ cariddi:
17
+ description: Hunt URLs patterns
18
+ katana:
19
+ description: Crawl URLs
20
+ httpx:
21
+ description: Run HTTP probes on crawled URLs
22
+ targets_:
23
+ type: url
24
+ field: url
25
+ results:
26
+ - type: url
27
+ condition: item._source == 'httpx'
28
+
29
+ - type: tag
@@ -0,0 +1,29 @@
1
+ type: workflow
2
+ name: url_dirsearch
3
+ alias: dirfind
4
+ description: URL directory search
5
+ tags: [http, dir]
6
+ input_types:
7
+ - url
8
+ tasks:
9
+ ffuf:
10
+ description: Search for HTTP directories
11
+ wordlist: /usr/share/seclists/Discovery/Web-Content/directory-list-2.3-small.txt
12
+ targets_:
13
+ - type: target
14
+ field: '{name}/FUZZ'
15
+ cariddi:
16
+ description: Crawl HTTP directories for content
17
+ targets_:
18
+ - target.name
19
+ - url.url
20
+ httpx:
21
+ description: Run HTTP probes on crawled URLs
22
+ follow_redirects: True
23
+ targets_:
24
+ - type: url
25
+ field: url
26
+ condition: item.status_code == 0
27
+ results:
28
+ - type: url
29
+ condition: item.status_code != 0
@@ -0,0 +1,35 @@
1
+ type: workflow
2
+ name: url_fuzz
3
+ alias: urlfuzz
4
+ description: URL fuzz (slow)
5
+ tags: [http, fuzz]
6
+ input_types:
7
+ - url
8
+ # options:
9
+ # match_codes: 200,204,301,302,307,401,403,405,500
10
+ tasks:
11
+ _group:
12
+ # dirsearch:
13
+ # description: Fuzz URLs
14
+ # feroxbuster:
15
+ # description: Fuzz URLs
16
+ ffuf:
17
+ description: Fuzz URLs
18
+ targets_:
19
+ - type: target
20
+ field: '{name}/FUZZ'
21
+ httpx:
22
+ description: Run HTTP probes on crawled URLs
23
+ targets_:
24
+ type: url
25
+ field: url
26
+ katana:
27
+ description: Run crawler on found directories
28
+ targets_:
29
+ type: url
30
+ field: url
31
+ condition: "'Index of' in item.title"
32
+ results:
33
+ - type: url
34
+ condition: item._source == 'httpx'
35
+ # TODO: add deduplication based on the 'url' field
@@ -0,0 +1,11 @@
1
+ type: workflow
2
+ name: url_nuclei
3
+ alias: url_nuclei
4
+ description: URL vulnerability scan (nuclei)
5
+ tags: [http, nuclei]
6
+ input_types:
7
+ - url
8
+ tasks:
9
+ nuclei:
10
+ description: Search for HTTP vulns
11
+ exclude_tags: [network, ssl, file, dns, osint, token-spray, headers]
@@ -0,0 +1,55 @@
1
+ type: workflow
2
+ name: url_vuln
3
+ alias: url_vuln
4
+ description: URL vulnerability scan (gf, dalfox)
5
+ tags: [http, vulnerability]
6
+ input_types:
7
+ - url
8
+ tasks:
9
+ _group:
10
+ gf/xss:
11
+ description: Hunt XSS params
12
+ pattern: xss
13
+ gf/lfi:
14
+ description: Hunt LFI params
15
+ pattern: lfi
16
+ gf/ssrf:
17
+ description: Hunt SSRF params
18
+ pattern: ssrf
19
+ gf/rce:
20
+ description: Hunt RCE params
21
+ pattern: rce
22
+ gf/interestingparams:
23
+ description: Hunt interest params
24
+ pattern: interestingparams
25
+ gf/idor:
26
+ description: Hunt Idor params
27
+ pattern: idor
28
+ gf/debug_logic:
29
+ description: Hunt debug params
30
+ pattern: debug_logic
31
+
32
+ dalfox:
33
+ description: Attack XSS vulnerabilities
34
+ targets_:
35
+ - type: tag
36
+ field: match
37
+ condition: item._source == "gf"
38
+
39
+ # TODO: Add support for SQLMap
40
+ # sqlmap:
41
+ # description: Attack SQLI vulnerabilities
42
+ # targets_:
43
+ # - type: tag
44
+ # field: match
45
+ # condition: item.name in ['sqli']
46
+
47
+ # TODO: Make this work, need transform functions to replace a parameter fetched dynamically by the keyword 'FUZZ'
48
+ # ffuf:
49
+ # description: Attack LFI vulnerabilities
50
+ # targets_:
51
+ # - type: tag
52
+ # field: match
53
+ # transform:
54
+ # qsreplace: FUZZ
55
+ # condition: item.name in ['lfi']
@@ -0,0 +1,10 @@
1
+ type: workflow
2
+ name: user_hunt
3
+ alias: userhunt
4
+ description: User account search
5
+ tags: [user_account]
6
+ input_types:
7
+ - username
8
+ tasks:
9
+ maigret:
10
+ description: Hunt user accounts
@@ -0,0 +1,14 @@
1
+ type: workflow
2
+ name: wordpress
3
+ alias: wordpress
4
+ description: Wordpress vulnerability scan
5
+ tags: [http, wordpress, vulnerability]
6
+ input_types:
7
+ - url
8
+ tasks:
9
+ _group:
10
+ nuclei:
11
+ description: Nuclei Wordpress scan
12
+ tags: wordpress
13
+ wpscan:
14
+ description: WPScan