secator 0.15.1__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (106) hide show
  1. secator/celery.py +40 -24
  2. secator/celery_signals.py +71 -68
  3. secator/celery_utils.py +43 -27
  4. secator/cli.py +520 -280
  5. secator/cli_helper.py +394 -0
  6. secator/click.py +87 -0
  7. secator/config.py +67 -39
  8. secator/configs/profiles/http_headless.yaml +6 -0
  9. secator/configs/profiles/http_record.yaml +6 -0
  10. secator/configs/profiles/tor.yaml +1 -1
  11. secator/configs/scans/domain.yaml +4 -2
  12. secator/configs/scans/host.yaml +1 -1
  13. secator/configs/scans/network.yaml +1 -4
  14. secator/configs/scans/subdomain.yaml +13 -1
  15. secator/configs/scans/url.yaml +1 -2
  16. secator/configs/workflows/cidr_recon.yaml +6 -4
  17. secator/configs/workflows/code_scan.yaml +1 -1
  18. secator/configs/workflows/host_recon.yaml +29 -3
  19. secator/configs/workflows/subdomain_recon.yaml +67 -16
  20. secator/configs/workflows/url_crawl.yaml +44 -15
  21. secator/configs/workflows/url_dirsearch.yaml +4 -4
  22. secator/configs/workflows/url_fuzz.yaml +25 -17
  23. secator/configs/workflows/url_params_fuzz.yaml +7 -0
  24. secator/configs/workflows/url_vuln.yaml +33 -8
  25. secator/configs/workflows/user_hunt.yaml +2 -1
  26. secator/configs/workflows/wordpress.yaml +5 -3
  27. secator/cve.py +718 -0
  28. secator/decorators.py +0 -454
  29. secator/definitions.py +49 -30
  30. secator/exporters/_base.py +2 -2
  31. secator/exporters/console.py +2 -2
  32. secator/exporters/table.py +4 -3
  33. secator/exporters/txt.py +1 -1
  34. secator/hooks/mongodb.py +2 -4
  35. secator/installer.py +77 -49
  36. secator/loader.py +116 -0
  37. secator/output_types/_base.py +3 -0
  38. secator/output_types/certificate.py +63 -63
  39. secator/output_types/error.py +4 -5
  40. secator/output_types/info.py +2 -2
  41. secator/output_types/ip.py +3 -1
  42. secator/output_types/progress.py +5 -9
  43. secator/output_types/state.py +17 -17
  44. secator/output_types/tag.py +3 -0
  45. secator/output_types/target.py +10 -2
  46. secator/output_types/url.py +19 -7
  47. secator/output_types/vulnerability.py +11 -7
  48. secator/output_types/warning.py +2 -2
  49. secator/report.py +27 -15
  50. secator/rich.py +18 -10
  51. secator/runners/_base.py +446 -233
  52. secator/runners/_helpers.py +133 -24
  53. secator/runners/command.py +182 -102
  54. secator/runners/scan.py +33 -5
  55. secator/runners/task.py +13 -7
  56. secator/runners/workflow.py +105 -72
  57. secator/scans/__init__.py +2 -2
  58. secator/serializers/dataclass.py +20 -20
  59. secator/tasks/__init__.py +4 -4
  60. secator/tasks/_categories.py +39 -27
  61. secator/tasks/arjun.py +9 -5
  62. secator/tasks/bbot.py +53 -21
  63. secator/tasks/bup.py +19 -5
  64. secator/tasks/cariddi.py +24 -3
  65. secator/tasks/dalfox.py +26 -7
  66. secator/tasks/dirsearch.py +10 -4
  67. secator/tasks/dnsx.py +70 -25
  68. secator/tasks/feroxbuster.py +11 -3
  69. secator/tasks/ffuf.py +42 -6
  70. secator/tasks/fping.py +20 -8
  71. secator/tasks/gau.py +3 -1
  72. secator/tasks/gf.py +3 -3
  73. secator/tasks/gitleaks.py +2 -2
  74. secator/tasks/gospider.py +7 -1
  75. secator/tasks/grype.py +5 -4
  76. secator/tasks/h8mail.py +2 -1
  77. secator/tasks/httpx.py +18 -5
  78. secator/tasks/katana.py +35 -15
  79. secator/tasks/maigret.py +4 -4
  80. secator/tasks/mapcidr.py +3 -3
  81. secator/tasks/msfconsole.py +4 -4
  82. secator/tasks/naabu.py +2 -2
  83. secator/tasks/nmap.py +12 -14
  84. secator/tasks/nuclei.py +3 -3
  85. secator/tasks/searchsploit.py +4 -5
  86. secator/tasks/subfinder.py +2 -2
  87. secator/tasks/testssl.py +264 -263
  88. secator/tasks/trivy.py +5 -5
  89. secator/tasks/wafw00f.py +21 -3
  90. secator/tasks/wpprobe.py +90 -83
  91. secator/tasks/wpscan.py +6 -5
  92. secator/template.py +218 -104
  93. secator/thread.py +15 -15
  94. secator/tree.py +196 -0
  95. secator/utils.py +131 -123
  96. secator/utils_test.py +60 -19
  97. secator/workflows/__init__.py +2 -2
  98. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/METADATA +36 -36
  99. secator-0.16.0.dist-info/RECORD +132 -0
  100. secator/configs/profiles/default.yaml +0 -8
  101. secator/configs/workflows/url_nuclei.yaml +0 -11
  102. secator/tasks/dnsxbrute.py +0 -42
  103. secator-0.15.1.dist-info/RECORD +0 -128
  104. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/WHEEL +0 -0
  105. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/entry_points.txt +0 -0
  106. {secator-0.15.1.dist-info → secator-0.16.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,6 @@
1
+ type: profile
2
+ name: http_headless
3
+ description: "Headless HTTP requests"
4
+ opts:
5
+ headless: true
6
+ system_chrome: true
@@ -0,0 +1,6 @@
1
+ type: profile
2
+ name: http_record
3
+ description: "Record HTTP requests / responses and take screenshots"
4
+ opts:
5
+ screenshot: true
6
+ store_responses: true
@@ -2,4 +2,4 @@ type: profile
2
2
  name: tor
3
3
  description: "Anonymous scan using Tor network"
4
4
  opts:
5
- proxy: tor
5
+ proxy: auto
@@ -8,11 +8,13 @@ workflows:
8
8
  subdomain_recon:
9
9
  host_recon:
10
10
  targets_:
11
- - target.name
11
+ - type: target
12
+ field: name
13
+ condition: target.type == 'host'
12
14
  - subdomain.host
13
15
  url_crawl:
14
16
  targets_:
15
17
  - url.url
16
18
  url_vuln:
17
19
  targets_:
18
- - url.url
20
+ - url.url
@@ -11,4 +11,4 @@ workflows:
11
11
  - url.url
12
12
  url_vuln:
13
13
  targets_:
14
- - url.url
14
+ - url.url
@@ -6,12 +6,9 @@ input_types:
6
6
  - cidr_range
7
7
  workflows:
8
8
  cidr_recon:
9
- url_nuclei:
10
- targets_:
11
- - url.url
12
9
  url_crawl:
13
10
  targets_:
14
11
  - url.url
15
12
  url_vuln:
16
13
  targets_:
17
- - url.url
14
+ - url.url
@@ -5,4 +5,16 @@ profile: default
5
5
  input_types:
6
6
  - host
7
7
  workflows:
8
- subdomain_recon:
8
+ subdomain_recon:
9
+ host_recon:
10
+ targets_:
11
+ - type: target
12
+ field: name
13
+ condition: target.type == 'host'
14
+ - subdomain.host
15
+ url_crawl:
16
+ targets_:
17
+ - url.url
18
+ url_vuln:
19
+ targets_:
20
+ - url.url
@@ -6,7 +6,6 @@ input_types:
6
6
  - url
7
7
  workflows:
8
8
  url_crawl:
9
- url_nuclei:
10
9
  url_vuln:
11
10
  targets_:
12
- - url.url
11
+ - url.url
@@ -5,24 +5,26 @@ description: Local network recon
5
5
  tags: [recon, cidr, network]
6
6
  input_types:
7
7
  - cidr_range
8
+ - ip
9
+
8
10
  tasks:
9
11
  mapcidr:
10
12
  description: Find CIDR range IPs
13
+
11
14
  fping:
12
15
  description: Check for alive IPs
13
16
  targets_: ip.ip
17
+
14
18
  nmap:
15
19
  description: Scan alive IPs' ports
16
20
  targets_:
17
21
  - type: ip
18
22
  field: ip
19
23
  condition: item.alive
24
+
20
25
  httpx:
21
26
  description: Probe HTTP services on open ports
27
+ tech_detect: True
22
28
  targets_:
23
29
  - type: port
24
30
  field: '{ip}:{port}'
25
- results:
26
- - type: ip
27
- condition: item.alive
28
- - type: url
@@ -8,4 +8,4 @@ input_types:
8
8
  - docker_image_name
9
9
  tasks:
10
10
  grype:
11
- description: Run code vulnerability scan
11
+ description: Run code vulnerability scan
@@ -6,36 +6,61 @@ tags: [recon, network, http]
6
6
  input_types:
7
7
  - host
8
8
  - cidr_range
9
+
10
+ options:
11
+ nuclei:
12
+ is_flag: True
13
+ default: False
14
+ help: Run nuclei scans (slow)
15
+
16
+ full:
17
+ is_flag: True
18
+ default: False
19
+ help: "Run full port scan (default: top 100 ports)"
20
+
9
21
  tasks:
10
22
  naabu:
23
+ description: Find open ports
24
+ if: opts.ports or not opts.full
25
+
26
+ naabu/full:
11
27
  description: Find open ports
12
28
  ports: "-" # scan all ports
29
+ if: opts.full and not opts.ports
30
+
13
31
  nmap:
14
32
  description: Search for vulnerabilities on open ports
15
33
  version_detection: True
16
34
  script: vulners
17
35
  targets_:
18
36
  - port.host
19
- - target.name
20
37
  ports_:
21
- - port.port
22
- ports: "-" # default if no port found by naabu
38
+ - type: port
39
+ field: port
40
+ condition: port.host in targets
41
+
23
42
  _group/1:
24
43
  httpx:
25
44
  description: Probe HTTP services on open ports
45
+ tech_detect: True
26
46
  targets_:
27
47
  - type: port
28
48
  field: '{host}:{port}'
49
+
29
50
  searchsploit:
30
51
  description: Search for related exploits
31
52
  targets_:
32
53
  - type: port
33
54
  field: service_name
34
55
  condition: len(item.service_name.split('/')) > 1
56
+
35
57
  _group/2:
36
58
  nuclei/network:
37
59
  description: Scan network and SSL vulnerabilities
38
60
  tags: [network, ssl]
61
+ exclude_tags: []
62
+ if: opts.nuclei
63
+
39
64
  nuclei/url:
40
65
  description: Search for vulnerabilities on alive HTTP services
41
66
  exclude_tags: [network, ssl, file, dns, osint, token-spray, headers]
@@ -43,3 +68,4 @@ tasks:
43
68
  - type: url
44
69
  field: url
45
70
  condition: item.status_code != 0
71
+ if: opts.nuclei
@@ -5,29 +5,80 @@ description: Subdomain discovery
5
5
  tags: [recon, dns, takeovers]
6
6
  input_types:
7
7
  - host
8
+
9
+ options:
10
+ probe_http:
11
+ is_flag: True
12
+ help: Probe domain and subdomains (HTTP)
13
+ default: True
14
+
15
+ probe_dns:
16
+ is_flag: True
17
+ help: Probe domain and subdomains (DNS)
18
+ default: False
19
+
20
+ brute_http:
21
+ is_flag: True
22
+ help: Bruteforce subdomains with HTTP Host header (ffuf)
23
+ short: bhttp
24
+ default: False
25
+
26
+ brute_dns:
27
+ is_flag: True
28
+ help: Bruteforce subdomains with DNS queries (dnsx)
29
+ short: bdns
30
+ default: False
31
+
8
32
  tasks:
9
- subfinder:
10
- description: List subdomains (passive)
11
- # TODO: add subdomain bruteforcers
12
- # gobuster:
13
- # input: vhost
14
- # domain_:
15
- # - target.name
16
- # wordlist: combined_subdomains
17
- # gobuster:
18
- # input: dns
19
- # domain_:
20
- # - target.name
21
- # wordlist: combined_subdomains
22
- _group:
33
+ _group/1:
34
+ subfinder:
35
+ description: List subdomains (passive)
36
+
37
+ dnsx/brute:
38
+ description: Bruteforce subdomains (DNS)
39
+ subdomains_only: True
40
+ wordlist: combined_subdomains
41
+ if: opts.brute_dns
42
+
43
+ httpx:
44
+ description: Run HTTP probe on domain
45
+ tech_detect: True
46
+ targets_:
47
+ - target.name
48
+ if: opts.probe_http or opts.brute_http
49
+
50
+ _group/2:
51
+ dnsx/probe:
52
+ description: Probe DNS records on subdomains
53
+ subdomains_only: True
54
+ wordlist: False
55
+ targets_:
56
+ - subdomain.host
57
+ if: opts.probe_dns
58
+
23
59
  nuclei:
24
60
  description: Check for subdomain takeovers
25
61
  targets_:
26
62
  - target.name
27
63
  - subdomain.host
28
- tags: [takeover, dns]
64
+ tags: [takeover]
65
+
66
+ ffuf:
67
+ description: Bruteforce subdomains (Host header)
68
+ header: "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.0.0 Safari/537.36"
69
+ fuzz_host_header: True
70
+ auto_calibration: True
71
+ wordlist: combined_subdomains
72
+ stop_on_error: True
73
+ targets_:
74
+ - type: url
75
+ field: url
76
+ if: opts.brute_http
77
+
29
78
  httpx:
30
79
  description: Run HTTP probes on subdomains
80
+ tech_detect: True
31
81
  targets_:
32
82
  - target.name
33
- - subdomain.host
83
+ - subdomain.host
84
+ if: opts.probe_http
@@ -3,27 +3,56 @@ name: url_crawl
3
3
  alias: urlcrawl
4
4
  description: URL crawl (fast)
5
5
  tags: [http, crawl]
6
- options:
7
- match_codes: 200,204,301,302,307,401,403,405,500
8
6
  input_types:
9
7
  - url
8
+
9
+ options:
10
+ crawlers:
11
+ type: list
12
+ help: Crawlers to use (katana, gospider)
13
+ default: ['gau', 'katana']
14
+ internal: True
15
+
16
+ hunt_patterns:
17
+ is_flag: True
18
+ help: Hunt patterns in HTTP responses (cariddi)
19
+ default: True
20
+ short: hp
21
+
22
+ default_options:
23
+ match_codes: 200,204,301,302,307,401,403,405,500
24
+
10
25
  tasks:
11
- _group:
12
- # gau:
13
- # description: Search for passive URLs
14
- # gospider:
15
- # description: Crawl URLs
16
- cariddi:
17
- description: Hunt URLs patterns
26
+ _group/crawl:
27
+ gau:
28
+ description: Search for passive URLs
29
+ if: "'gau' in opts.crawlers"
30
+
18
31
  katana:
19
32
  description: Crawl URLs
33
+ if: "'katana' in opts.crawlers"
34
+
35
+ gospider:
36
+ description: Crawl URLs
37
+ if: "'gospider' in opts.crawlers"
38
+
39
+ cariddi:
40
+ description: Hunt URLs patterns
41
+ info: True
42
+ secrets: True
43
+ errors: True
44
+ juicy_extensions: 1
45
+ juicy_endpoints: True
46
+ targets_:
47
+ - target.name
48
+ - url.url
49
+ if: opts.hunt_patterns
50
+
20
51
  httpx:
21
52
  description: Run HTTP probes on crawled URLs
53
+ tech_detect: True
22
54
  targets_:
23
- type: url
55
+ - target.name
56
+ - type: url
24
57
  field: url
25
- results:
26
- - type: url
27
- condition: item._source == 'httpx'
28
-
29
- - type: tag
58
+ condition: url.status_code != 0
@@ -5,6 +5,7 @@ description: URL directory search
5
5
  tags: [http, dir]
6
6
  input_types:
7
7
  - url
8
+
8
9
  tasks:
9
10
  ffuf:
10
11
  description: Search for HTTP directories
@@ -12,6 +13,7 @@ tasks:
12
13
  targets_:
13
14
  - type: target
14
15
  field: '{name}/FUZZ'
16
+
15
17
  cariddi:
16
18
  description: Crawl HTTP directories for content
17
19
  info: True
@@ -22,13 +24,11 @@ tasks:
22
24
  targets_:
23
25
  - target.name
24
26
  - url.url
27
+
25
28
  httpx:
26
29
  description: Run HTTP probes on crawled URLs
27
- follow_redirects: True
30
+ tech_detect: True
28
31
  targets_:
29
32
  - type: url
30
33
  field: url
31
34
  condition: item.status_code == 0
32
- results:
33
- - type: url
34
- condition: item.status_code != 0
@@ -5,31 +5,39 @@ description: URL fuzz (slow)
5
5
  tags: [http, fuzz]
6
6
  input_types:
7
7
  - url
8
- # options:
9
- # match_codes: 200,204,301,302,307,401,403,405,500
8
+
9
+ default_options:
10
+ match_codes: 200,204,301,302,307,401,403,405,500
11
+
12
+ options:
13
+ fuzzers:
14
+ type: list
15
+ required: True
16
+ help: Fuzzers to use (dirsearch, feroxbuster, ffuf)
17
+ default: ['ffuf']
18
+
10
19
  tasks:
11
- _group:
12
- # dirsearch:
13
- # description: Fuzz URLs
14
- # feroxbuster:
15
- # description: Fuzz URLs
20
+ _group/fuzz:
21
+ dirsearch:
22
+ description: Fuzz URLs
23
+ if: "'dirsearch' in opts.fuzzers"
24
+
25
+ feroxbuster:
26
+ description: Fuzz URLs
27
+ if: "'feroxbuster' in opts.fuzzers"
28
+
16
29
  ffuf:
17
30
  description: Fuzz URLs
31
+ if: "'ffuf' in opts.fuzzers"
18
32
  targets_:
19
33
  - type: target
20
34
  field: '{name}/FUZZ'
35
+
21
36
  httpx:
22
37
  description: Run HTTP probes on crawled URLs
38
+ tech_detect: True
23
39
  targets_:
24
40
  type: url
25
41
  field: url
26
- katana:
27
- description: Run crawler on found directories
28
- targets_:
29
- type: url
30
- field: url
31
- condition: "'Index of' in item.title"
32
- results:
33
- - type: url
34
- condition: item._source == 'httpx'
35
- # TODO: add deduplication based on the 'url' field
42
+ condition: url.status_code != 0 or opts.screenshot or opts.headless
43
+ # enrich: true # TODO: add enrich capabilities
@@ -5,9 +5,15 @@ description: Extract parameters from an URL and fuzz them
5
5
  tags: [http, fuzz]
6
6
  input_types:
7
7
  - url
8
+
8
9
  tasks:
9
10
  arjun:
10
11
  description: Extract parameters from URLs
12
+ targets_:
13
+ - type: target
14
+ field: name
15
+ condition: "'?' not in target.name"
16
+
11
17
  ffuf:
12
18
  description: Fuzz URL params
13
19
  wordlist: https://raw.githubusercontent.com/danielmiessler/SecLists/refs/heads/master/Discovery/Web-Content/burp-parameter-names.txt
@@ -17,6 +23,7 @@ tasks:
17
23
  - type: url
18
24
  field: url
19
25
  condition: item._source.startswith('arjun')
26
+
20
27
  httpx:
21
28
  description: Probe fuzzed URLs
22
29
  targets_:
@@ -5,36 +5,61 @@ description: URL vulnerability scan (gf, dalfox)
5
5
  tags: [http, vulnerability]
6
6
  input_types:
7
7
  - url
8
+
9
+ options:
10
+ nuclei:
11
+ is_flag: True
12
+ default: False
13
+ help: Run nuclei on tagged URLs (slow)
14
+
8
15
  tasks:
9
- _group:
16
+ _group/pattern_analysis:
10
17
  gf/xss:
11
18
  description: Hunt XSS params
12
19
  pattern: xss
20
+
13
21
  gf/lfi:
14
22
  description: Hunt LFI params
15
23
  pattern: lfi
24
+
16
25
  gf/ssrf:
17
26
  description: Hunt SSRF params
18
27
  pattern: ssrf
28
+
19
29
  gf/rce:
20
30
  description: Hunt RCE params
21
31
  pattern: rce
32
+
22
33
  gf/interestingparams:
23
34
  description: Hunt interest params
24
35
  pattern: interestingparams
36
+
25
37
  gf/idor:
26
38
  description: Hunt Idor params
27
39
  pattern: idor
40
+
28
41
  gf/debug_logic:
29
42
  description: Hunt debug params
30
43
  pattern: debug_logic
31
44
 
32
- dalfox:
33
- description: Attack XSS vulnerabilities
34
- targets_:
35
- - type: tag
36
- field: match
37
- condition: item._source.startswith("gf")
45
+ _group/vuln_scan:
46
+ dalfox:
47
+ description: Attack XSS vulnerabilities
48
+ targets_:
49
+ - type: tag
50
+ field: match
51
+ condition: item._source.startswith("gf")
52
+
53
+ nuclei:
54
+ description: Search for HTTP vulns
55
+ exclude_tags: [network, ssl, file, dns, osint, token-spray, headers]
56
+ targets_:
57
+ - type: target
58
+ field: name
59
+ - type: tag
60
+ field: match
61
+ condition: item._source.startswith("gf")
62
+ if: opts.nuclei
38
63
 
39
64
  # TODO: Add support for SQLMap
40
65
  # sqlmap:
@@ -52,4 +77,4 @@ tasks:
52
77
  # field: match
53
78
  # transform:
54
79
  # qsreplace: FUZZ
55
- # condition: item.name in ['lfi']
80
+ # condition: item.name in ['lfi']
@@ -5,6 +5,7 @@ description: User account search
5
5
  tags: [user_account]
6
6
  input_types:
7
7
  - username
8
+
8
9
  tasks:
9
10
  maigret:
10
- description: Hunt user accounts
11
+ description: Hunt user accounts
@@ -5,13 +5,15 @@ description: Wordpress vulnerability scan
5
5
  tags: [http, wordpress, vulnerability]
6
6
  input_types:
7
7
  - url
8
+
8
9
  tasks:
9
- _group:
10
+ _group/hunt_wordpress:
10
11
  nuclei:
11
12
  description: Nuclei Wordpress scan
12
- tags: wordpress
13
+ tags: [wordpress]
14
+
13
15
  wpscan:
14
16
  description: WPScan
17
+
15
18
  wpprobe:
16
19
  description: WPProbe
17
- tags: wordpress