nexpose_servicenow 0.7.3 → 0.8.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,164 +0,0 @@
1
- require 'nexpose'
2
- require 'fileutils'
3
- require_relative './connection_helper'
4
- require_relative '../queries/nexpose_queries'
5
- require_relative '../nx_logger'
6
- require_relative '../csv_compare'
7
-
8
- module NexposeServiceNow
9
- class NexposeConsoleHelper < ConnectionHelper
10
- def initialize(url, port, username, password, silo='')
11
- super(url, port, username, password, silo)
12
-
13
- @nsc = connect
14
- end
15
-
16
- def generate_report(query_name, ids, id_type, output_dir, query_options={})
17
- output_dir = File.expand_path(output_dir.to_s)
18
-
19
- #A single report doesn't use site filters
20
- ids = [-1] if NexposeQueries.single_report?(query_name)
21
-
22
- #If running latest_scans, send up information
23
- if query_name == 'latest_scans'
24
- @log.on_connect(@url, @port, @nsc.session_id, '{}')
25
- end
26
-
27
- query_options[:cvss_v] = get_cvss_version_strings(query_options[:cvss_v3])
28
-
29
- ids.each do |id|
30
- report_name = self.class.get_report_name(query_name, id)
31
- clean_up_reports(report_name)
32
-
33
- delta_options = create_query_options(query_options, id)
34
-
35
- query = NexposeQueries.send(query_name, delta_options)
36
-
37
- report_id = generate_config(query, report_name, [id], id_type)
38
-
39
- run_report(report_id, report_name)
40
- local_report_name = save_report(report_name, report_id, output_dir)
41
-
42
- if NexposeQueries.csv_diff_required?(query_name)
43
- @log.log_message "Calculating diff for #{local_report_name}..."
44
- CsvCompare.update_report_with_diff(local_report_name,
45
- NexposeQueries.query_keys(query_name))
46
- end
47
- end
48
-
49
- nil
50
- end
51
-
52
- def clean_up_reports(report_name)
53
- reports = @nsc.list_reports
54
- reports.select! { |r| r.name.start_with? report_name }
55
- reports.each { |r| @nsc.delete_report_config(r.config_id) }
56
- end
57
-
58
- def generate_config(query, report_name, ids, id_type)
59
- @nsc = connect
60
- @log.log_message "Generating report config with name #{report_name}..."
61
- report_config = Nexpose::ReportConfig.new(report_name, nil, 'sql')
62
- report_config.add_filter('version', '2.0.1')
63
- report_config.add_filter('query', query)
64
-
65
- id_type = id_type.to_s.split('_').last
66
- ids.each { |id| report_config.add_filter(id_type, id) unless id == -1 }
67
-
68
-
69
- @log.log_message "Saving report config #{report_name}..."
70
- report_id = report_config.save(@nsc, false)
71
- @log.log_message "Report #{report_name} saved"
72
-
73
- report_id
74
- end
75
-
76
- def run_report(report_id, report_name)
77
- @log.log_message "Running Report #{report_name}, ID: #{report_id}..."
78
- @nsc.generate_report(report_id, false)
79
- wait_for_report(report_id)
80
- end
81
-
82
- def wait_for_report(id)
83
- wait_until(:fail_on_exceptions => true, :on_timeout => "Report generation timed out. Status: #{r = @nsc.last_report(id); r ? r.status : 'unknown'}") {
84
- if %w(Failed Aborted Unknown).include?(@nsc.last_report(id).status)
85
- raise "Report failed to generate! Status <#{@nsc.last_report(id).status}>"
86
- end
87
- @nsc.last_report(id).status == 'Generated'
88
- }
89
- end
90
-
91
- def wait_until(options = {})
92
- polling_interval = 15
93
- time_limit = Time.now + @timeout
94
- loop do
95
- begin
96
- val = yield
97
- return val if val
98
- rescue Exception => error
99
- @log.log_error_message("Error during wait_until: #{error}")
100
- raise error if options[:fail_on_exceptions]
101
- end
102
- if Time.now >= time_limit
103
- if options[:on_timeout]
104
- @log.log_error_message("#{options[:on_timeout]}. Exiting...")
105
- raise options[:on_timeout]
106
- end
107
- error ||= 'Timed out waiting for condition.'
108
- @log.log_error_message("#{error}. Exiting...")
109
- raise error
110
- end
111
- sleep polling_interval
112
- end
113
- end
114
-
115
- def save_report(report_name, report_id, output_dir)
116
- @log.log_message "Saving report - Name: #{report_name}, ID: #{report_id}..."
117
- local_file_name = self.class.get_filepath(report_name, output_dir)
118
- File.delete(local_file_name) if File.exists? local_file_name
119
-
120
- #log 'Downloading report...'
121
- report_details = @nsc.last_report(report_id)
122
- File.open(local_file_name, 'wb') do |f|
123
- f.write(@nsc.download(report_details.uri))
124
- end
125
-
126
- begin
127
- # Refresh the connection
128
- @nsc = connect
129
-
130
- # Got the report, cleanup server-side
131
- @nsc.delete_report_config(report_id)
132
- rescue
133
- @log.log_error_message 'Error deleting report'
134
- end
135
-
136
- local_file_name
137
- end
138
-
139
- def connect
140
- begin
141
- connection = Nexpose::Connection.new(@url, @username, @password, @port)
142
- connection.login
143
- @log.log_message 'Logged in.'
144
- rescue Exception => e
145
- msg = "ERROR: Could not log in. Check log and settings.\n#{e}"
146
- @log.log_error_message msg
147
- $stderr.puts msg
148
- exit -1
149
- end
150
-
151
- connection
152
- end
153
-
154
- # Pulls the collection IDs from Nexpose (e.g. asset groups, sites)
155
- def collection_ids(collection_type)
156
- @nsc.send("#{collection_type}s").map { |s| s.id }.sort
157
- end
158
-
159
- def get_cvss_version_strings(use_v3)
160
- return { choice: '_v3', vector: '_v3', fallback: '' } if use_v3
161
- { choice: '_v2', vector: '', fallback: '' }
162
- end
163
- end
164
- end
@@ -1,102 +0,0 @@
1
- require_relative './nx_logger'
2
-
3
- module NexposeServiceNow
4
- class HistoricalData
5
-
6
- SITE_IDENTIFIER = 'site_id'
7
- SITE_DELTA_VALUE = 'last_scan_id'
8
- SITE_TIMESTAMP_VALUE = 'finished'
9
-
10
- DEFAULT_TIMESTAMP_VALUE = '1985-01-01 12:00:00'
11
-
12
- GROUP_FILE_PATTERN = 'Nexpose-ServiceNow-asset_group*.csv'
13
-
14
- def initialize(output_dir)
15
- local_dir = File.expand_path(output_dir)
16
- @remote_file = File.join(local_dir, 'Nexpose-ServiceNow-latest_scans.csv')
17
- @log = NexposeServiceNow::NxLogger.instance
18
- end
19
-
20
- # Filters out irrelevant site IDs
21
- def filter_ids(site_ids)
22
- original_site_ids = site_ids.keys
23
- default_values = ['0', "\"#{DEFAULT_TIMESTAMP_VALUE}\""]
24
-
25
- # Reject if the delta value matches a default value
26
- site_ids.reject! do |site, delta|
27
- default_values.any? { |v| delta == v }
28
- end
29
-
30
- # Get a list of excluded sites
31
- excluded = original_site_ids - site_ids.keys
32
-
33
- if excluded.count > 0
34
- @log.log_message("Excluding #{excluded} from old vulns query.")
35
- end
36
-
37
- site_ids
38
- end
39
-
40
- # Replaces scan IDs of sites that aren't to be imported with 0
41
- def filter_report(site_ids=[])
42
- @log.log_message 'Filtering report down to sites which will be queried'
43
-
44
- remote_csv = load_scan_id_report
45
- nexpose_ids = site_ids.map(&:to_s)
46
-
47
- return remote_csv if nexpose_ids.first == '0'
48
-
49
- remote_csv.each do |row|
50
- unless nexpose_ids.include?(row[SITE_IDENTIFIER])
51
- row[SITE_DELTA_VALUE] = '0'
52
- row[SITE_TIMESTAMP_VALUE] = DEFAULT_TIMESTAMP_VALUE
53
- end
54
- end
55
-
56
- remote_csv
57
- end
58
-
59
- # Reads the downloaded report containing LATEST scan IDs
60
- def load_scan_id_report
61
- @log.log_message 'Loading scan data report'
62
- unless File.exists? @remote_file
63
- error = 'Latest scan report file could not be found.'
64
- @log.log_message error
65
- raise error
66
- end
67
- CSV.read(@remote_file, headers: true)
68
- end
69
-
70
- #########################################################
71
- # Experimental #
72
- #########################################################
73
-
74
- # These should probably return strings that can be mlog'd
75
- def log_and_print(message)
76
- puts message
77
- @log.log_message message unless @log.nil?
78
- end
79
-
80
- def log_and_error(message)
81
- $stderr.puts "ERROR: #{message}"
82
- @log.log_error_message message unless @log.nil?
83
- end
84
-
85
- # Deletes all of the CSV files matching the pattern
86
- def remove_diff_files(output_dir)
87
- local_path = File.expand_path(output_dir)
88
- group_csv_files = Dir.glob(File.join(local_path, GROUP_FILE_PATTERN))
89
-
90
- group_csv_files.each do |file|
91
- begin
92
- File.delete file
93
- log_and_print "File #{file} deleted."
94
- rescue Exception => e
95
- log_and_error "Error removing file:\n#{e}"
96
- end
97
- end
98
- end
99
- end
100
- end
101
-
102
-
@@ -1,166 +0,0 @@
1
- require 'fileutils'
2
- require 'json'
3
- require 'net/http'
4
- require 'singleton'
5
-
6
- module NexposeServiceNow
7
- class NxLogger
8
- include Singleton
9
- LOG_PATH = "./logs/rapid7_%s.log"
10
- KEY_FORMAT = "external.integration.%s"
11
- PRODUCT_FORMAT = "%s_%s"
12
-
13
- DEFAULT_LOG = 'integration'
14
- PRODUCT_RANGE = 4..30
15
- KEY_RANGE = 3..15
16
-
17
- ENDPOINT = '/data/external/statistic/'
18
-
19
- def initialize()
20
- create_calls
21
- @logger_file = get_log_path @product
22
- setup_logging(true, 'info')
23
- end
24
-
25
- def setup_statistics_collection(vendor, product_name, gem_version)
26
- begin
27
- @statistic_key = get_statistic_key vendor
28
- @product = get_product product_name, gem_version
29
- rescue => e
30
- #Continue
31
- end
32
- end
33
-
34
- def setup_logging(enabled, log_level = 'info', stdout=false)
35
- @stdout = stdout
36
-
37
- log_message('Logging disabled.') unless enabled || @log.nil?
38
- @enabled = enabled
39
- return unless @enabled
40
-
41
- @logger_file = get_log_path @product
42
-
43
- require 'logger'
44
- directory = File.dirname(@logger_file)
45
- FileUtils.mkdir_p(directory) unless File.directory?(directory)
46
- io = IO.for_fd(IO.sysopen(@logger_file, 'a'), 'a')
47
- io.autoclose = false
48
- io.sync = true
49
- @log = Logger.new(io, 'weekly')
50
- @log.level = if log_level.to_s.casecmp('info') == 0
51
- Logger::INFO
52
- else
53
- Logger::DEBUG
54
- end
55
- log_message("Logging enabled at level <#{log_level}>")
56
- end
57
-
58
- def create_calls
59
- levels = [:info, :debug, :error, :warn]
60
- levels.each do |level|
61
- method_name =
62
- define_singleton_method("log_#{level.to_s}_message") do |message|
63
- puts message if @stdout
64
- @log.send(level, message) unless !@enabled || @log.nil?
65
- end
66
- end
67
- end
68
-
69
- def log_message(message)
70
- log_info_message message
71
- end
72
-
73
- def log_stat_message(message)
74
- end
75
-
76
- def get_log_path(product)
77
- product.downcase! unless product.nil?
78
- File.join(File.dirname(__FILE__), LOG_PATH % (product || DEFAULT_LOG))
79
- end
80
-
81
- def get_statistic_key(vendor)
82
- if vendor.nil? || vendor.length < KEY_RANGE.min
83
- log_stat_message("Vendor length is below minimum of <#{KEY_RANGE}>")
84
- return nil
85
- end
86
-
87
- vendor.gsub!('-', '_')
88
- vendor.slice! vendor.rindex('_') until vendor.count('_') <= 1
89
-
90
- vendor.delete! "^A-Za-z0-9\_"
91
-
92
- KEY_FORMAT % vendor[0...KEY_RANGE.max].downcase
93
- end
94
-
95
- def get_product(product, version)
96
- return nil if ((product.nil? || product.empty?) ||
97
- (version.nil? || version.empty?))
98
-
99
- product.gsub!('-', '_')
100
- product.slice! product.rindex('_') until product.count('_') <= 1
101
-
102
- product.delete! "^A-Za-z0-9\_"
103
- version.delete! "^A-Za-z0-9\.\-"
104
-
105
- product = (PRODUCT_FORMAT % [product, version])[0...PRODUCT_RANGE.max]
106
-
107
- product.slice! product.rindex(/[A-Z0-9]/i)+1..-1
108
-
109
- if product.length < PRODUCT_RANGE.min
110
- log_stat_message("Product length below minimum <#{PRODUCT_RANGE.min}>.")
111
- return nil
112
- end
113
- product.downcase
114
- end
115
-
116
- def generate_payload(statistic_value='')
117
- product_name, separator, version = @product.to_s.rpartition('_')
118
- payload_value = {'version' => version}.to_json
119
-
120
- payload = {'statistic-key' => @statistic_key.to_s,
121
- 'statistic-value' => payload_value,
122
- 'product' => product_name}
123
- JSON.generate(payload)
124
- end
125
-
126
- def send(nexpose_address, nexpose_port, session_id, payload)
127
- header = {'Content-Type' => 'application/json',
128
- 'nexposeCCSessionID' => session_id,
129
- 'Cookie' => "nexposeCCSessionID=#{session_id}"}
130
- req = Net::HTTP::Put.new(ENDPOINT, header)
131
- req.body = payload
132
- http_instance = Net::HTTP.new(nexpose_address, nexpose_port)
133
- http_instance.use_ssl = true
134
- http_instance.verify_mode = OpenSSL::SSL::VERIFY_NONE
135
- response = http_instance.start { |http| http.request(req) }
136
- log_stat_message "Received code #{response.code} from Nexpose console."
137
- log_stat_message "Received message #{response.msg} from Nexpose console."
138
- log_stat_message 'Finished sending statistics data to Nexpose.'
139
-
140
- response.code
141
- end
142
-
143
- def on_connect(nexpose_address, nexpose_port, session_id, value)
144
- log_stat_message 'Sending statistics data to Nexpose'
145
-
146
- if @product.nil? || @statistic_key.nil?
147
- log_stat_message('Invalid product name and/or statistics key.')
148
- log_stat_message('Statistics collection not enabled.')
149
- return
150
- end
151
-
152
- begin
153
- payload = generate_payload value
154
- send(nexpose_address, nexpose_port, session_id, payload)
155
- rescue => e
156
- #Let the program continue
157
- end
158
- end
159
-
160
- #Used by net library for debugging
161
- def <<(value)
162
- log_debug_message(value)
163
- end
164
-
165
- end
166
- end
@@ -1,459 +0,0 @@
1
- require_relative './queries_base'
2
-
3
- module NexposeServiceNow
4
- class NexposeQueries < QueriesBase
5
- def self.vulnerabilities(options={})
6
- "SELECT
7
- concat('R7_', vulnerability_id) as ID,
8
- cve.ref as CVE,
9
- cwe.ref as CWE,
10
- concat('Rapid7 Nexpose') as Source,
11
- to_char(date_published, 'yyyy-MM-dd hh:mm:ss') as date_published,
12
- to_char(date_modified, 'yyyy-MM-dd hh:mm:ss') as Last_Modified,
13
- dvc.category,
14
- severity as Severity_Rating,
15
- severity_score as Severity,
16
- pci_status,
17
- pci_adjusted_cvss_score as PCI_Severity,
18
- title as Summary,
19
- proofAsText(description) as Threat,
20
- ROUND(riskscore::numeric, 2) as Riskscore,
21
- coalesce(cvss#{options[:cvss_v][:vector]}_vector,
22
- cvss#{options[:cvss_v][:fallback]}_vector) as cvss_vector,
23
- ROUND(coalesce(cvss#{options[:cvss_v][:choice]}_impact_score::numeric,
24
- cvss#{options[:cvss_v][:fallback]}_impact_score::numeric),
25
- 2) as Impact_Score,
26
- ROUND(coalesce(cvss#{options[:cvss_v][:choice]}_exploit_score::numeric,
27
- cvss#{options[:cvss_v][:fallback]}_exploit_score::numeric),
28
- 2) as Exploit_Score,
29
- cvss_access_complexity_id as Access_Complexity,
30
- cvss_access_vector_id as Access_Vector,
31
- cvss_authentication_id as Authentication,
32
- ROUND(coalesce(cvss#{options[:cvss_v][:choice]}_score::numeric,
33
- cvss#{options[:cvss_v][:fallback]}_score::numeric),
34
- 2) as Vulnerability_Score,
35
- cvss_integrity_impact_id as Integrity_Impact,
36
- cvss_confidentiality_impact_id as Confidentiality_Impact,
37
- cvss_availability_impact_id as Availability_Impact,
38
- CAST(CASE
39
- WHEN exploits > 0
40
- THEN 1
41
- ELSE 0
42
- END AS bit) as Exploitability,
43
- CAST(CASE
44
- WHEN malware_kits > 0
45
- THEN 1
46
- ELSE 0
47
- END AS bit) as Malware_Kits,
48
- array_to_string(sol.solutions, ',', '') as Solution
49
-
50
- FROM
51
- dim_vulnerability
52
-
53
- LEFT OUTER JOIN
54
- (SELECT DISTINCT on(vulnerability_id)
55
- vulnerability_id,
56
- dvr.reference as ref
57
- FROM dim_vulnerability_reference dvr
58
- WHERE source='CWE'
59
- GROUP BY dvr.vulnerability_id, dvr.reference
60
- ) cwe USING (vulnerability_id)
61
-
62
- LEFT OUTER JOIN
63
- (SELECT DISTINCT on (vulnerability_id)
64
- vulnerability_id,
65
- dvr.reference as ref
66
- FROM dim_vulnerability_reference dvr
67
- WHERE source='CVE'
68
- GROUP BY dvr.vulnerability_id, dvr.reference
69
- ) cve USING (vulnerability_id)
70
-
71
- LEFT OUTER JOIN(SELECT DISTINCT on (dvc.vulnerability_id) dvc.vulnerability_id, dvc.category_name as category
72
- FROM dim_vulnerability_category dvc
73
- GROUP BY dvc.vulnerability_id, dvc.category_name) dvc USING (vulnerability_id)
74
-
75
- LEFT OUTER JOIN(SELECT dvr.vulnerability_id, string_agg(dvr.source || ': ' || dvr.reference, '|') as references
76
- FROM dim_vulnerability_reference dvr
77
- GROUP BY dvr.vulnerability_id) ref USING (vulnerability_id)
78
-
79
- LEFT OUTER JOIN(SELECT vulnerability_id,
80
- array_agg(solution_id) as solutions
81
- FROM dim_vulnerability_solution
82
- GROUP BY vulnerability_id) sol USING (vulnerability_id)
83
- WHERE date_modified >= '#{options[:vuln_query_date]}'"
84
- end
85
-
86
-
87
- def self.vulnerability_references(options={})
88
- "SELECT concat('R7_', vulnerability_id) as ID, dvr.Source, dvr.Reference
89
- FROM dim_vulnerability
90
- JOIN
91
- (SELECT vulnerability_id, dvr.Source, dvr.Reference
92
- FROM dim_vulnerability_reference dvr) dvr USING (vulnerability_id)
93
- WHERE date_modified >= '#{options[:vuln_query_date]}'"
94
- end
95
-
96
- def self.vulnerability_category(options={})
97
- "SELECT concat('R7_', vulnerability_id) as ID, dvc.Category
98
- FROM dim_vulnerability
99
- LEFT OUTER JOIN
100
- (SELECT vulnerability_id, category_name as Category
101
- FROM dim_vulnerability_category dvc) dvc USING (vulnerability_id)
102
- WHERE date_modified >= '#{options[:vuln_query_date]}'"
103
- end
104
-
105
- # Filter by site.
106
- def self.assets(options={})
107
- "SELECT coalesce(host_name, CAST(dim_asset.asset_id as text)) as Name,
108
- dim_asset.ip_address,
109
- dim_asset.mac_address,
110
- concat('Rapid7 Nexpose') as Discovery_Source,
111
- CAST(CASE
112
- WHEN dim_host_type.description = 'Virtual Machine' or dim_host_type.description = 'Hypervisor'
113
- THEN 1
114
- ELSE 0
115
- END AS bit) as Is_Virtual,
116
- dim_operating_system.description as Operating_System,
117
- fact_asset.scan_finished as Most_Recent_Discovery,
118
- dim_asset.asset_id as Nexpose_ID,
119
- fact_asset.pci_status,
120
- concat(fact_asset.aggregated_credential_status_id, ' - ', aggregated_credential_status_description) as Credential_Status
121
-
122
- FROM dim_asset
123
- JOIN fact_asset USING (asset_id)
124
- JOIN dim_aggregated_credential_status USING (aggregated_credential_status_id)
125
- LEFT OUTER JOIN dim_operating_system on dim_asset.operating_system_id = dim_operating_system.operating_system_id
126
- LEFT OUTER JOIN dim_host_type USING (host_type_id)"
127
- end
128
-
129
- def self.software_instance(options={})
130
- "SELECT asset_id as Nexpose_ID, CAST(da.asset_id as text) as Installed_On, ds.name, ds.vendor, ds.family, ds.version, ds.cpe
131
- FROM fact_asset_scan_software
132
- LEFT OUTER JOIN (SELECT software_id, name, vendor, family, version, cpe FROM dim_software) ds USING (software_id)
133
- LEFT OUTER JOIN (SELECT asset_id, host_name FROM dim_asset) da USING (asset_id)
134
- WHERE scan_id = lastScan(asset_id)"
135
- end
136
-
137
- def self.service_definition(options={})
138
- "SELECT DISTINCT on(dsf.name, ds.name, dp.name, port)
139
- dsf.name, ds.name as service_name, dp.name as protocol, port
140
-
141
- FROM (SELECT service_id, protocol_id, port, service_fingerprint_id
142
- FROM fact_asset_scan_service
143
- GROUP BY asset_id, service_id, protocol_id, port, service_fingerprint_id
144
- HAVING min(scan_id) > #{options[:delta]} and max(scan_id) = lastScan(asset_id)) fass
145
- JOIN dim_service ds USING (service_id)
146
- JOIN dim_protocol dp USING (protocol_id)
147
- JOIN dim_service_fingerprint dsf USING (service_fingerprint_id)"
148
- end
149
-
150
- # When a service only appears in either the old or latest import,
151
- # then it needs deleted or inserted, respectively.
152
- def self.service_instance(options={})
153
- "SELECT asset_id, dsf.name as name,
154
- ds.name as service_name,
155
- dp.name as protocol, port,
156
- CASE
157
- WHEN scan_id = #{options[:delta]}
158
- THEN 'old'
159
- WHEN scan_id = lastScan(asset_id)
160
- THEN 'new'
161
- ELSE 'current'
162
- END as status
163
-
164
- FROM (SELECT asset_id, service_id, protocol_id, port, min(scan_id) as scan_id, service_fingerprint_id
165
- FROM fact_asset_scan_service
166
- WHERE scan_id = lastScan(asset_id) OR scan_id = #{options[:delta]}
167
- GROUP BY asset_id, service_id, protocol_id, port, service_fingerprint_id
168
- HAVING min(scan_id) = max(scan_id)) fass
169
- JOIN dim_service ds USING (service_id)
170
- JOIN dim_protocol dp USING (protocol_id)
171
- JOIN dim_service_fingerprint dsf USING (service_fingerprint_id)
172
- GROUP BY asset_id, dsf.name, ds.name, dp.name, port, scan_id"
173
- end
174
-
175
- # Need to wipe table each time
176
- def self.group_accounts(options={})
177
- 'SELECT asset_id as Nexpose_ID, daga.name as Group_Account_Name
178
- FROM dim_asset
179
- JOIN dim_asset_group_account daga USING (asset_id)'
180
- end
181
-
182
- # Need to wipe table each time
183
- def self.user_accounts(options={})
184
- 'SELECT da.asset_id as Nexpose_ID, daua.name as User_Account_Name,
185
- daua.full_name as User_Account_Full_Name
186
-
187
- FROM dim_asset da
188
- JOIN dim_asset_user_account daua USING (asset_id)'
189
- end
190
-
191
- def self.asset_groups(options={})
192
- 'SELECT asset_group_id, name as asset_group_name, description, dynamic_membership
193
- FROM dim_asset_group'
194
- end
195
-
196
- def self.asset_group_memberships(options={})
197
- 'select * from dim_asset_group_asset'
198
- end
199
-
200
- # Need to wipe table each time
201
- def self.sites(options={})
202
- 'SELECT asset_id as Nexpose_ID, ds.name as site_name
203
- FROM dim_asset
204
- JOIN dim_site_asset dsa USING (asset_id)
205
- JOIN dim_site ds on dsa.site_id = ds.site_id
206
- ORDER BY ip_address'
207
- end
208
-
209
- # Need to wipe table each time
210
- def self.tags(options={})
211
- 'SELECT asset_id as Nexpose_ID, dt.tag_name
212
- FROM dim_tag_asset dta
213
- JOIN dim_tag dt on dta.tag_id = dt.tag_id'
214
- end
215
-
216
- def self.generate_cve_filter(cves)
217
- return '' if cves == nil || cves.empty?
218
-
219
- cves = cves.map { |c| "reference='#{c}'" }.join(' OR ')
220
-
221
- "JOIN (SELECT vulnerability_id, reference
222
- FROM dim_vulnerability_reference
223
- WHERE #{cves}) dvr USING (vulnerability_id)"
224
- end
225
-
226
- #TODO make sure that for max date first_discovered < date2
227
- def self.generate_date_filter(dates, table_join=true)
228
- return '' if dates == nil
229
-
230
- # No filters applied, so no need to filter
231
- return '' if dates.all? { |d| d == nil || d == '' }
232
-
233
- min_date = dates.first
234
- max_date = dates.last
235
-
236
- # Version of vulnerable new items
237
- unless table_join
238
- filters = []
239
- filters << "first_discovered >= '#{min_date}'" unless min_date.nil?
240
- filters << "first_discovered <= '#{max_date}'" unless max_date.nil?
241
- filters = filters.join(' AND ')
242
- filters = "WHERE #{filters}"
243
-
244
- return filters
245
- end
246
-
247
- date_filters = []
248
-
249
- unless min_date.nil?
250
- date_filters << "scan_finished > '#{min_date}'"
251
- end
252
- unless max_date.nil?
253
- date_filters << "scan_started < '#{max_date}'"
254
- end
255
-
256
- condition = date_filters.join(' AND ')
257
- "JOIN (SELECT scan_id, asset_id
258
- FROM fact_asset_scan
259
- WHERE #{condition}) fas
260
- ON fas.asset_id = da.asset_ID AND
261
- fas.scan_id = first_found"
262
- end
263
-
264
- def self.generate_cvss_filter(cvss_range, cvss_strings)
265
- return '' if cvss_range.nil? || cvss_range.last.nil?
266
-
267
- cvss_min = cvss_range.first
268
- cvss_max = cvss_range.last
269
-
270
- # No need to join if not applying a filter
271
- return '' if cvss_min.to_s == '0' && cvss_max.to_s == '10'
272
-
273
- cvss_score = "(coalesce(cvss#{cvss_strings[:choice]}_score,
274
- cvss#{cvss_strings[:fallback]}_score))"
275
-
276
- "JOIN (SELECT vulnerability_id
277
- FROM dim_vulnerability
278
- WHERE #{cvss_score} >= #{cvss_min} AND #{cvss_score} <= #{cvss_max}) dv
279
- USING (vulnerability_id)"
280
- end
281
-
282
- def self.generate_cvss_table(cvss_range, cvss_strings)
283
- return '' if cvss_range.nil? || cvss_range.last.nil?
284
-
285
- cvss_min = cvss_range.first
286
- cvss_max = cvss_range.last
287
-
288
- return '' if cvss_min.to_s == '0' && cvss_max.to_s == '10'
289
-
290
- cvss_score = "(coalesce(cvss#{cvss_strings[:choice]}_score,
291
- cvss#{cvss_strings[:fallback]}_score))"
292
-
293
- "vulns_cvss AS (
294
- SELECT vulnerability_id FROM dim_vulnerability
295
- WHERE #{cvss_score} >= #{cvss_min} AND #{cvss_score} <= #{cvss_max})"
296
- end
297
-
298
- def self.vulnerable_new_items(options={})
299
- date_filter = self.generate_date_filter(options[:filters][:date], false)
300
-
301
- cvss_table = self.generate_cvss_table(options[:filters][:cvss],
302
- options[:cvss_v])
303
- cvss_filter = ''
304
- if cvss_table != ''
305
- cvss_table = ",#{cvss_table}"
306
- cvss_filter = 'WHERE EXISTS (
307
- SELECT 1 FROM vulns_cvss vc
308
- WHERE nv.vulnerability_id = vc.vulnerability_id)'
309
- end
310
-
311
- "WITH assets AS (
312
- SELECT * FROM dim_site_asset
313
- WHERE site_id=#{options[:site_id]}
314
- ), previous_scan AS (
315
- SELECT asset_id, vulnerability_id
316
- FROM fact_asset_scan_vulnerability_finding
317
- WHERE scan_id=#{options[:delta]}
318
- ), new_vulns AS (
319
- SELECT asset_id, vulnerability_id, vulnerability_instances
320
- FROM assets a
321
- JOIN fact_asset_vulnerability_finding favf USING (asset_id)
322
- WHERE NOT EXISTS (
323
- SELECT 1
324
- FROM previous_scan p
325
- WHERE favf.asset_id = p.asset_id and favf.vulnerability_id = p.vulnerability_id)
326
- )#{cvss_table}
327
- ,vuln_instances AS (
328
- SELECT asset_id, vulnerability_id, vulnerability_instances,
329
- status_id, proofAsText(proof) as proof, port, protocol_id
330
- FROM new_vulns nv
331
- JOIN fact_asset_vulnerability_instance USING (asset_id, vulnerability_id)
332
- #{cvss_filter}
333
- )
334
-
335
- SELECT asset_id as Configuration_Item,
336
- TRUE as Active,
337
- concat('R7_', vulnerability_id) as Vulnerability,
338
- ip_address as IP_Address,
339
- first_discovered as First_Found,
340
- most_recently_discovered as Last_Found,
341
- vulnerability_instances as Times_Found,
342
- string_agg(CONCAT('\"', proof ,'\"'), ',') as proof,
343
- string_agg(DISTINCT port::character, ',') as ports,
344
- string_agg(DISTINCT dp.description, ',') as protocol,
345
- array_to_string(dvsol.solution_ids, ',', '') as Solutions,
346
- string_agg(DISTINCT dvs.description, ',') as Status
347
- FROM vuln_instances
348
- JOIN dim_protocol dp USING (protocol_id)
349
- JOIN dim_asset USING (asset_id)
350
- JOIN dim_vulnerability_status dvs USING (status_id)
351
- JOIN (SELECT asset_id, vulnerability_id,
352
- first_discovered, most_recently_discovered
353
- FROM fact_asset_vulnerability_age #{date_filter}) fasva USING (asset_id, vulnerability_id)
354
- LEFT JOIN (SELECT asset_id, vulnerability_id,
355
- array_agg(DISTINCT solution_id) as solution_ids
356
- FROM dim_asset_vulnerability_solution
357
- GROUP BY asset_id, vulnerability_id) dvsol USING (asset_id, vulnerability_id)
358
- GROUP by asset_id, vulnerability_id, first_discovered, ip_address, most_recently_discovered, vulnerability_instances, dvsol.solution_ids
359
- "
360
- end
361
-
362
- def self.vulnerable_old_items(options={})
363
- standard_filter = if options[:id_type] == 'site'
364
- "MAX(fasv.scan_id) >= #{options[:delta]}"
365
- else
366
- "MAX(fasv.scan_id) >= scanAsOf(fasv.asset_id, '#{options[:delta]}') AND
367
- lastScan(fasv.asset_id) > scanAsOf(fasv.asset_id, '#{options[:delta]}')"
368
- end
369
-
370
- cve_filter = self.generate_cve_filter(options[:filters][:cve])
371
- date_filter = self.generate_date_filter(options[:filters][:date])
372
- cvss_filter = self.generate_cvss_filter(options[:filters][:cvss],
373
- options[:cvss_v])
374
-
375
- # Only perform this operation is necessary
376
- date_field = if date_filter.nil? || date_filter == ''
377
- ''
378
- else
379
- 'MIN(fasv.scan_id) as first_found,'
380
- end
381
-
382
- "SELECT
383
- CAST(da.asset_id as text) Configuration_Item,
384
- FALSE as Active,
385
- concat('R7_', subq.vulnerability_id) as Vulnerability
386
- FROM (
387
- SELECT fasv.asset_id, fasv.vulnerability_id,
388
- #{date_field}
389
- MAX(fasv.scan_id) as latest_found,
390
- s.current_scan
391
- FROM fact_asset_scan_vulnerability_finding fasv
392
- #{cve_filter}
393
- #{cvss_filter}
394
- JOIN (
395
- SELECT asset_id, lastScan(asset_id) AS current_scan FROM dim_asset
396
- ) s ON s.asset_id = fasv.asset_id
397
- GROUP BY fasv.asset_id, fasv.vulnerability_id, s.current_scan
398
- HAVING MAX(fasv.scan_id) < current_scan
399
- AND #{standard_filter}
400
- ) subq
401
- JOIN dim_asset da ON subq.asset_id = da.asset_id
402
- #{date_filter}"
403
-
404
- end
405
-
406
- def self.vulnerability_solutions(options={})
407
- "SELECT DISTINCT (solution_id)
408
- solution_id,
409
- nexpose_id,
410
- coalesce(NULLIF(CONCAT('\"', proofAsText(fix),'\"'), '\"\"'), 'None') as fix,
411
- estimate,
412
- summary,
413
- solution_type,
414
- applies_to,
415
- url,
416
- coalesce(NULLIF(CONCAT('\"',proofAsText(additional_data),'\"'), '\"\"'), 'None') as additional_data,
417
- array_to_string(req_solutions, ',', '') as required_solutions,
418
- array_to_string(super_solutions, ',', '') as superceding_solutions
419
- FROM dim_solution
420
- RIGHT OUTER JOIN (
421
- SELECT DISTINCT (solution_id) solution_id
422
- FROM (
423
- SELECT solution_id, vulnerability_id, date_modified
424
- FROM dim_vulnerability
425
- LEFT JOIN dim_vulnerability_solution idvs USING (vulnerability_id)
426
- ) dvs
427
- WHERE date_modified >= '#{options[:vuln_query_date]}'
428
- UNION
429
- SELECT DISTINCT (solution_id) solution_id
430
- FROM dim_solution
431
- LEFT JOIN (
432
- SELECT solution_id, vulnerability_id, date_modified
433
- FROM dim_vulnerability
434
- LEFT JOIN dim_vulnerability_solution idvs USING (vulnerability_id)
435
- ) ndvs USING (solution_id)
436
- WHERE vulnerability_id IS NULL
437
- ) dvs USING (solution_id)
438
- LEFT JOIN (
439
- SELECT DISTINCT (solution_id) solution_id,
440
- array_agg(required_solution_id) as req_solutions
441
- FROM dim_solution_prerequisite
442
- GROUP BY solution_id
443
- ) dsp USING (solution_id)
444
- JOIN (
445
- SELECT DISTINCT (solution_id) solution_id,
446
- array_agg(superceding_solution_id) as super_solutions
447
- FROM dim_solution_highest_supercedence
448
- GROUP BY solution_id
449
- ) dshs USING (solution_id)
450
- ORDER BY solution_id"
451
- end
452
-
453
- def self.latest_scans(options={})
454
- 'SELECT ds.site_id, ds.name, ds.last_scan_id, dsc.finished
455
- FROM dim_site ds
456
- JOIN dim_scan dsc ON ds.last_scan_id = dsc.scan_id'
457
- end
458
- end
459
- end