nexpose_servicenow 0.6.2 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -63,7 +63,6 @@ module NexposeServiceNow
63
63
  else
64
64
  chunks << chunk
65
65
 
66
- # TODO: Make generic?
67
66
  #Initialise chunk with this row information
68
67
  chunk = { site_id: site_id,
69
68
  start: position,
@@ -3,6 +3,23 @@ require 'tempfile'
3
3
  require 'csv-diff'
4
4
  require 'csv'
5
5
 
6
+
7
+ class CSVDiff
8
+ class CSVSource
9
+ def find_field_indexes(key_fields, field_names)
10
+ key_fields.map do |field|
11
+ if field.is_a?(Integer)
12
+ field
13
+ else
14
+ field_names.index{ |field_name| field.to_s.downcase == field_name.downcase } or
15
+ raise ArgumentError, "Could not locate field '#{field}' in source field names: #{
16
+ field_names.join(', ')}"
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
22
+
6
23
  module NexposeServiceNow
7
24
  class CsvCompare
8
25
  def self.get_columns(csv_file)
@@ -0,0 +1,79 @@
1
+ require_relative '../queries/queries_base'
2
+
3
+ module NexposeServiceNow
4
+ class ConnectionHelper
5
+ NOT_IMPL = 'Error: Method not implemented.'
6
+
7
+ def initialize(url, port, username, password, silo='')
8
+ @log = NexposeServiceNow::NxLogger.instance
9
+ @url = url
10
+ @port = port
11
+ @username = username
12
+ @password = password
13
+ @silo = silo
14
+
15
+ @timeout = 21600
16
+ end
17
+
18
+ def self.get_report_names(query_name, ids)
19
+ if QueriesBase.single_report?(query_name)
20
+ return [ id: -1, report_name: get_report_name(query_name) ]
21
+ end
22
+
23
+ ids.map do |id|
24
+ { id: id.first, report_name: get_report_name(query_name, id.first) }
25
+ end
26
+ end
27
+
28
+ def self.get_report_name(query_name, id=nil)
29
+ name = "Nexpose-ServiceNow-#{query_name}"
30
+ name += "-#{id}" unless QueriesBase.single_report?(query_name) || id.nil?
31
+ name
32
+ end
33
+
34
+ def create_query_options(query_options, nexpose_id=nil)
35
+ options = {}
36
+ options[:vuln_query_date] = query_options[:vuln_query_date]
37
+ options[:site_id] = nexpose_id
38
+ options[:id_type] = 'site'
39
+ options[:filters] = query_options[:filters] || {}
40
+
41
+ # Without a nexpose ID, we don't have a specific delta
42
+ return options if [nil, -1].include? nexpose_id
43
+
44
+ if query_options[:delta_values].empty?
45
+ error_msg = 'No delta values provided. Exiting...'
46
+ @log.log_error_message error_msg
47
+ raise error_msg
48
+ end
49
+
50
+ options[:delta] = "#{query_options[:delta_values][nexpose_id] || 0}"
51
+
52
+ @log.log_message("Query options: #{options}")
53
+
54
+ options
55
+ end
56
+
57
+ def self.get_filepath(report_name, output_dir)
58
+ path = File.join output_dir, "#{report_name}.csv"
59
+ File.expand_path path
60
+ end
61
+
62
+ def connect(username, password)
63
+ raise NOT_IMPL
64
+ end
65
+
66
+ def generate_report(query_name, ids, id_type, output_dir, query_options={})
67
+ raise NOT_IMPL
68
+ end
69
+
70
+ # Pulls the collection IDs from Nexpose (e.g. asset groups, sites)
71
+ def collection_ids(collection_type)
72
+ raise NOT_IMPL
73
+ end
74
+
75
+ def save_report(report_name, report_id, output_dir)
76
+ raise NOT_IMPL
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,134 @@
1
+ require 'pg'
2
+ require_relative './connection_helper'
3
+ require_relative '../queries/warehouse_queries'
4
+
5
+ module NexposeServiceNow
6
+ class DataWarehouseHelper < ConnectionHelper
7
+
8
+ SSL_MODE = 'prefer'
9
+ GRANULARITY = 500
10
+ CHUNK_LIMIT = 4_500_000
11
+
12
+ def initialize(url, port, username, password, silo='')
13
+ super(url, port, username, password, silo)
14
+ end
15
+
16
+ def connect
17
+ @log.log_message 'Creating DWH connection'
18
+ PG::Connection.open(:host => @url,
19
+ :dbname => @silo,
20
+ :port => @port,
21
+ :user => @username,
22
+ :password => @password,
23
+ :sslmode => SSL_MODE)
24
+ end
25
+
26
+ def generate_report(query_name, ids, id_type, output_dir, query_options={})
27
+ output_dir = File.expand_path(output_dir.to_s)
28
+
29
+ #A single report doesn't use site filters
30
+ ids = [-1] if WarehouseQueries.single_report?(query_name)
31
+
32
+ page_size = query_options[:page_size]
33
+ row_limit = query_options[:row_limit]
34
+
35
+ chunks = []
36
+
37
+ base_name = "query_#{query_name}"
38
+
39
+ ids.each do |id|
40
+ delta_options = create_query_options(query_options, id)
41
+ query = WarehouseQueries.send(query_name, delta_options)
42
+
43
+ # Open the CSV file to write as pages are retrieved
44
+ report_name = self.class.get_report_name(query_name, id)
45
+ @log.log_message "Running query for #{report_name}"
46
+
47
+ local_file_name = self.class.get_filepath(report_name, output_dir)
48
+ csvFile = File.open(local_file_name, 'wb')
49
+
50
+ conn = connect
51
+ conn.transaction do
52
+ table_name = "query_#{query_name}"
53
+ table_name = "#{base_name}_#{id}" if id && id.to_i > 0
54
+
55
+ @log.log_message "Creating cursor: #{table_name}"
56
+ conn.exec("DECLARE #{table_name} CURSOR FOR #{query}")
57
+ res = conn.exec("FETCH #{page_size} FROM #{table_name}")
58
+
59
+ # Headers
60
+ headers = res.fields.join(',')
61
+ csvFile.puts(headers)
62
+
63
+ # Declare the initial chunk
64
+ chunk = { start: csvFile.pos, length: 0, row_count: 0 }
65
+ chunk[:site_id] = id unless id.nil? || id == -1
66
+
67
+ # Should we overwrite 'res' to release the memory?
68
+ all_lines = res.values.map { |r| r.join(',') }
69
+
70
+ # Table declared, so keep reading pages until it's consumed
71
+ data_left = true
72
+ while(data_left)
73
+ # Lift out a number of lines in a chunk
74
+ text = all_lines.slice!(0, GRANULARITY)
75
+
76
+ # Try to get the next page
77
+ if text.nil? || text.count == 0
78
+ res = conn.exec("FETCH #{page_size} FROM #{table_name}")
79
+ if res.values.count == 0
80
+ chunks << chunk
81
+ break
82
+ end
83
+ all_lines = res.values.map { |r| r.join(',') }
84
+ next
85
+ end
86
+
87
+ # Work out the details for this chunk
88
+ line_count = text.count
89
+ text = text.join("\n")
90
+ text << "\n"
91
+ byte_size = text.bytesize
92
+
93
+ # Test whether limits would be exceeded
94
+ below_row_limit = chunk[:row_count] + line_count <= row_limit
95
+ below_size_limit = chunk[:length] + byte_size < CHUNK_LIMIT
96
+
97
+ if below_size_limit && below_row_limit
98
+ chunk[:length] += byte_size
99
+ chunk[:row_count] += line_count
100
+ else
101
+ chunks << chunk.dup
102
+ # Store the current pos since the next chunk isn't written
103
+ chunk[:start] = csvFile.pos
104
+ chunk[:length] = byte_size
105
+ chunk[:row_count] = line_count
106
+ end
107
+
108
+ csvFile.write(text)
109
+ end
110
+ end
111
+
112
+ conn.finish
113
+
114
+ # Close the file for this specific report
115
+ csvFile.close
116
+ @log.log_message "Report generated: #{report_name}"
117
+ end
118
+
119
+ chunks.each { |c| c.delete :row_count }
120
+
121
+ @log.log_message "Finished running query: #{query_name}"
122
+ chunks.to_json
123
+ end
124
+
125
+ def collection_ids
126
+ @log.log_message 'Retrieving list of site IDs'
127
+ connection = connect
128
+ query = 'select site_id from dim_site'
129
+ result = connection.query(query)
130
+ result.map { |r| r['site_id'] }
131
+ connection.finish
132
+ end
133
+ end
134
+ end
@@ -1,108 +1,50 @@
1
1
  require 'nexpose'
2
2
  require 'fileutils'
3
- require_relative './queries'
4
- require_relative './nx_logger'
5
- require_relative './csv_compare'
3
+ require_relative './connection_helper'
4
+ require_relative '../queries/nexpose_queries'
5
+ require_relative '../nx_logger'
6
+ require_relative '../csv_compare'
6
7
 
7
8
  module NexposeServiceNow
8
- class NexposeHelper
9
- def initialize(url, port, username, password)
10
- @log = NexposeServiceNow::NxLogger.instance
11
- @url = url
12
- @port = port
13
- @username = username
14
- @password = password
9
+ class NexposeConsoleHelper < ConnectionHelper
10
+ def initialize(url, port, username, password, silo='')
11
+ super(url, port, username, password, silo)
15
12
 
16
- @nsc = connect(username, password)
17
-
18
- @timeout = 21600
19
- end
20
-
21
- def self.get_report_names(query_name, ids)
22
- unless Queries.multiple_reports?(query_name)
23
- return [ id: -1, report_name: get_report_name(query_name) ]
24
- end
25
-
26
- ids.map { |id| { id: id, report_name: get_report_name(query_name, id) } }
27
- end
28
-
29
- def self.get_report_name(query_name, id=nil)
30
- name = "Nexpose-ServiceNow-#{query_name}"
31
- name += "-#{id}" if Queries.multiple_reports?(query_name) && !id.nil?
32
- name
33
- end
34
-
35
- def self.get_filepath(report_name, output_dir)
36
- path = File.join output_dir, "#{report_name}.csv"
37
- File.expand_path path
13
+ @nsc = connect
38
14
  end
39
15
 
40
- def create_report(query_name, ids, id_type, output_dir, query_options={})
16
+ def generate_report(query_name, ids, id_type, output_dir, query_options={})
41
17
  output_dir = File.expand_path(output_dir.to_s)
42
18
 
43
19
  #A single report doesn't use site filters
44
- ids = [-1] unless Queries.multiple_reports?(query_name)
20
+ ids = [-1] if NexposeQueries.single_report?(query_name)
45
21
 
46
22
  #If running latest_scans, send up information
47
23
  if query_name == 'latest_scans'
48
24
  @log.on_connect(@url, @port, @nsc.session_id, '{}')
49
25
  end
50
26
 
51
- reports = []
52
27
  ids.each do |id|
53
28
  report_name = self.class.get_report_name(query_name, id)
54
29
  clean_up_reports(report_name)
55
30
 
56
- # Should use the value from historical_data.rb
57
- default_delta = if id_type.to_s == 'asset_group'
58
- '1985-01-01 00:00:00'
59
- elsif id_type.to_s == 'site'
60
- 0
61
- end
31
+ delta_options = create_query_options(query_options, id)
62
32
 
63
- delta_options = create_query_options(query_options, id, default_delta)
33
+ query = NexposeQueries.send(query_name, delta_options)
64
34
 
65
- # TODO: Refactor this into 'create_query_options'
66
- delta_options[:site_id] = id
67
- delta_options[:id_type] = id_type.to_s
68
- delta_options[:filters] = query_options[:filters] || {}
35
+ report_id = generate_config(query, report_name, [id], id_type)
69
36
 
70
- min_cvss = if delta_options[:filters][:cvss].nil?
71
- 0
72
- else
73
- delta_options[:filters][:cvss].first
74
- end
75
-
76
- query = Queries.send(query_name, delta_options)
77
-
78
- report_id = generate_config(query, report_name, [id], id_type, min_cvss)
79
-
80
- run_report(report_id)
37
+ run_report(report_id, report_name)
81
38
  local_report_name = save_report(report_name, report_id, output_dir)
82
- reports << local_report_name
83
39
 
84
- if Queries.csv_diff_required?(query_name)
40
+ if NexposeQueries.csv_diff_required?(query_name)
85
41
  @log.log_message "Calculating diff for #{local_report_name}..."
86
42
  CsvCompare.update_report_with_diff(local_report_name,
87
- Queries.query_keys(query_name))
43
+ NexposeQueries.query_keys(query_name))
88
44
  end
89
45
  end
90
46
 
91
- reports
92
- end
93
-
94
- def create_query_options(query_options, nexpose_id=nil, default=0)
95
- options = {}
96
- options[:vuln_query_date] = query_options[:vuln_query_date]
97
-
98
- return options if nexpose_id.nil? || nexpose_id == -1
99
- return 0 if query_options[:delta_values].empty?
100
-
101
- options[:delta] = "#{query_options[:delta_values][nexpose_id] || default}"
102
-
103
- @log.log_message("Query options: #{options}")
104
-
105
- options
47
+ nil
106
48
  end
107
49
 
108
50
  def clean_up_reports(report_name)
@@ -111,8 +53,8 @@ module NexposeServiceNow
111
53
  reports.each { |r| @nsc.delete_report_config(r.config_id) }
112
54
  end
113
55
 
114
- def generate_config(query, report_name, ids, id_type, min_severity=0)
115
- @nsc = connect(@username, @password)
56
+ def generate_config(query, report_name, ids, id_type)
57
+ @nsc = connect
116
58
  @log.log_message "Generating report config with name #{report_name}..."
117
59
  report_config = Nexpose::ReportConfig.new(report_name, nil, 'sql')
118
60
  report_config.add_filter('version', '2.0.1')
@@ -129,8 +71,8 @@ module NexposeServiceNow
129
71
  report_id
130
72
  end
131
73
 
132
- def run_report(report_id)
133
- @log.log_message "Running report #{report_id}..."
74
+ def run_report(report_id, report_name)
75
+ @log.log_message "Running Report #{report_name}, ID: #{report_id}..."
134
76
  @nsc.generate_report(report_id, false)
135
77
  wait_for_report(report_id)
136
78
  end
@@ -152,11 +94,16 @@ module NexposeServiceNow
152
94
  val = yield
153
95
  return val if val
154
96
  rescue Exception => error
97
+ @log.log_error_message("Error during wait_until: #{error}")
155
98
  raise error if options[:fail_on_exceptions]
156
99
  end
157
100
  if Time.now >= time_limit
158
- raise options[:on_timeout] if options[:on_timeout]
101
+ if options[:on_timeout]
102
+ @log.log_error_message("#{options[:on_timeout]}. Exiting...")
103
+ raise options[:on_timeout]
104
+ end
159
105
  error ||= 'Timed out waiting for condition.'
106
+ @log.log_error_message("#{error}. Exiting...")
160
107
  raise error
161
108
  end
162
109
  sleep polling_interval
@@ -164,7 +111,7 @@ module NexposeServiceNow
164
111
  end
165
112
 
166
113
  def save_report(report_name, report_id, output_dir)
167
- @log.log_message 'Saving report...'
114
+ @log.log_message "Saving report - Name: #{report_name}, ID: #{report_id}..."
168
115
  local_file_name = self.class.get_filepath(report_name, output_dir)
169
116
  File.delete(local_file_name) if File.exists? local_file_name
170
117
 
@@ -176,9 +123,9 @@ module NexposeServiceNow
176
123
 
177
124
  begin
178
125
  # Refresh the connection
179
- @nsc = connect(@username, @password)
126
+ @nsc = connect
180
127
 
181
- #Got the report, cleanup server-side
128
+ # Got the report, cleanup server-side
182
129
  @nsc.delete_report_config(report_id)
183
130
  rescue
184
131
  @log.log_error_message 'Error deleting report'
@@ -187,9 +134,9 @@ module NexposeServiceNow
187
134
  local_file_name
188
135
  end
189
136
 
190
- def connect(username, password)
137
+ def connect
191
138
  begin
192
- connection = Nexpose::Connection.new(@url, username, password, @port)
139
+ connection = Nexpose::Connection.new(@url, @username, @password, @port)
193
140
  connection.login
194
141
  @log.log_message 'Logged in.'
195
142
  rescue Exception => e
@@ -1,325 +1,72 @@
1
- require_relative './nexpose_helper'
2
1
  require_relative './nx_logger'
3
2
 
4
3
  module NexposeServiceNow
5
4
  class HistoricalData
6
5
 
7
- REPORT_FILE = 'Nexpose-ServiceNow-latest_scans.csv'
8
- STORED_FILE = 'last_scan_data.csv'
9
- TIMESTAMP_FILE = 'last_vuln_run.csv'
10
- NEW_TIMESTAMP_FILE = 'new_vuln_timestamp.csv'
11
-
12
- DAG_TIMESTAMP_FILE = 'last_scan_data_dag.csv'
13
- NEW_DAG_TIMESTAMP_FILE = 'new_dag_timestamp.csv'
14
-
15
- ASSET_GROUP_FILE = 'Nexpose-ServiceNow-asset_groups-old.csv'
16
- DIFFERENTIAL_FILE_REGEX = 'Nexpose-ServiceNow-*-old.csv'
17
-
18
6
  SITE_IDENTIFIER = 'site_id'
19
7
  SITE_DELTA_VALUE = 'last_scan_id'
20
- SITE_BASE_VALUE = 0
8
+ SITE_TIMESTAMP_VALUE = 'finished'
9
+
10
+ DEFAULT_TIMESTAMP_VALUE = '1985-01-01 12:00:00'
21
11
 
22
- DAG_IDENTIFIER = 'asset_group_id'
23
- DAG_DELTA_VALUE = 'last_import'
24
- DAG_BASE_VALUE = '1985-01-01 00:00:00'
12
+ GROUP_FILE_PATTERN = 'Nexpose-ServiceNow-asset_group*.csv'
25
13
 
26
- def initialize(output_dir, nexpose_ids, id_type, start_time)
14
+ def initialize(output_dir)
27
15
  local_dir = File.expand_path(output_dir)
28
- @ids = nexpose_ids
29
- @id_type = id_type
16
+ @remote_file = File.join(local_dir, 'Nexpose-ServiceNow-latest_scans.csv')
17
+ @log = NexposeServiceNow::NxLogger.instance
18
+ end
30
19
 
31
- if @id_type == :site
32
- current_file = STORED_FILE
33
- new_file = REPORT_FILE
34
- elsif @id_type == :asset_group
35
- current_file = DAG_TIMESTAMP_FILE
36
- new_file = NEW_DAG_TIMESTAMP_FILE
37
- end
20
+ # Filters out irrelevant site IDs
21
+ def filter_ids(site_ids)
22
+ original_site_ids = site_ids.keys
23
+ default_values = ['0', "\"#{DEFAULT_TIMESTAMP_VALUE}\""]
38
24
 
39
- @start_time = start_time
25
+ # Reject if the delta value matches a default value
26
+ site_ids.reject! do |site, delta|
27
+ default_values.any? { |v| delta == v }
28
+ end
40
29
 
41
- @local_file = File.join(local_dir, current_file)
42
- @remote_file = File.join(local_dir, new_file)
30
+ # Get a list of excluded sites
31
+ excluded = original_site_ids - site_ids.keys
43
32
 
44
- # File containing the timestamp used in vulnerability queries
45
- @timestamp_file = File.join(local_dir, TIMESTAMP_FILE)
46
- @prev_timestamp_file = File.join(local_dir, NEW_TIMESTAMP_FILE)
33
+ if excluded.count > 0
34
+ @log.log_message("Excluding #{excluded} from old vulns query.")
35
+ end
47
36
 
48
- @log = NexposeServiceNow::NxLogger.instance
37
+ site_ids
49
38
  end
50
39
 
51
- # TODO: Remove site references here? Will there be remote CSV here if we're using DAGs? No scan IDs.
52
- # Filters the saved report down to the sites being queried
53
- # This can then be used as a basis to update last_scan_data
54
- def filter_report
55
- # Create a full last_scan_data if it doesn't already exist
56
- create_base_delta_file unless File.exist? @local_file
57
-
40
+ # Replaces scan IDs of sites that aren't to be imported with 0
41
+ def filter_report(site_ids=[])
58
42
  @log.log_message 'Filtering report down to sites which will be queried'
59
43
 
60
44
  remote_csv = load_scan_id_report
61
- nexpose_ids = @ids.map(&:to_s)
62
- identifier = if @id_type == :site
63
- SITE_IDENTIFIER
64
- elsif @id_type ==:asset_group
65
- DAG_IDENTIFIER
66
- end
45
+ nexpose_ids = site_ids.map(&:to_s)
67
46
 
68
- if @id_type == :asset_group
69
- header = [DAG_IDENTIFIER, DAG_DELTA_VALUE]
70
- rows = []
47
+ return remote_csv if nexpose_ids.first == '0'
71
48
 
72
- @ids.each do |i|
73
- rows << CSV::Row.new(header, [i, @start_time])
49
+ remote_csv.each do |row|
50
+ unless nexpose_ids.include?(row[SITE_IDENTIFIER])
51
+ row[SITE_DELTA_VALUE] = '0'
52
+ row[SITE_TIMESTAMP_VALUE] = DEFAULT_TIMESTAMP_VALUE
74
53
  end
75
-
76
- remote_csv = CSV::Table.new(rows)
77
- end
78
-
79
- # TODO: Why is this done? Aren't these already filtered?
80
- filtered_csv = remote_csv.delete_if do |r|
81
- !nexpose_ids.include?(r[identifier])
82
- end
83
-
84
- File.open(@remote_file, 'w') do |f|
85
- f.write(remote_csv.to_csv)
86
54
  end
87
55
 
88
- puts filtered_csv
56
+ remote_csv
89
57
  end
90
58
 
91
59
  # Reads the downloaded report containing LATEST scan IDs
92
60
  def load_scan_id_report
93
61
  @log.log_message 'Loading scan data report'
94
62
  unless File.exists? @remote_file
95
- @log.log_message 'No existing report file found.'
96
- return nil
63
+ error = 'Latest scan report file could not be found.'
64
+ @log.log_message error
65
+ raise error
97
66
  end
98
67
  CSV.read(@remote_file, headers: true)
99
68
  end
100
69
 
101
- # Loads the last scan data file as CSV.
102
- # It may be necessary to create one first.
103
- def load_last_scan_data
104
- @log.log_message 'Loading last scan data.'
105
-
106
- create_base_delta_file unless File.exist? @local_file
107
- CSV.read(@local_file, headers: true)
108
- end
109
-
110
- def stored_delta_values(nexpose_ids)
111
- return [] if !File.exist? @local_file
112
-
113
- if @id_type == :site
114
- identifier = SITE_IDENTIFIER
115
- delta_column = SITE_DELTA_VALUE
116
- base_value = SITE_BASE_VALUE
117
- elsif @id_type == :asset_group
118
- identifier = DAG_IDENTIFIER
119
- delta_column = DAG_DELTA_VALUE
120
- base_value = DAG_BASE_VALUE
121
- end
122
-
123
- csv = load_last_scan_data
124
- delta_values = {}
125
- nexpose_ids.each do |id|
126
- row = csv.find { |r| r[identifier] == id.to_s }
127
- row ||= { delta_column => base_value }
128
- delta_values[id.to_s] = row[delta_column]
129
- end
130
-
131
- delta_values
132
- end
133
-
134
- # Compares stored scan IDs versus remote scan IDs.
135
- # This determines which scans are included as filters.
136
- def collections_to_import(previously_imported_only=false)
137
- return @ids unless File.exist? @remote_file
138
- @log.log_message "Filtering for #{@id_type}s with new scans"
139
- self.send("#{@id_type}s_to_import", previously_imported_only)
140
- end
141
-
142
- def sites_to_import(previously_imported_only=false)
143
- remote_csv = CSV.read(@remote_file, headers: true)
144
- local_csv = load_last_scan_data
145
-
146
- filtered_sites = []
147
-
148
- @ids.each do |id|
149
- remote_scan_id = remote_csv.find { |r| r['site_id'] == id.to_s } || {}
150
- remote_scan_id = remote_scan_id['last_scan_id'] || 1
151
-
152
- local_scan_id = local_csv.find { |r| r['site_id'] == id.to_s } || {}
153
- local_scan_id = local_scan_id['last_scan_id'] || 0
154
-
155
- # Check if only allowing sites which were previously imported
156
- next if local_scan_id.to_s == '0' && previously_imported_only
157
-
158
- filtered_sites << id if local_scan_id.to_i < remote_scan_id.to_i
159
- end
160
-
161
- @ids = filtered_sites
162
- end
163
-
164
- def asset_groups_to_import(previously_imported_only=false)
165
- filtered_asset_groups = []
166
- local_csv = load_last_scan_data
167
-
168
- @ids.each do |id|
169
- local_id = local_csv.find { |r| r[DAG_IDENTIFIER] == id.to_s } || {}
170
- local_id = local_id[DAG_DELTA_VALUE] || DAG_BASE_VALUE
171
-
172
- next if local_id == DAG_BASE_VALUE && previously_imported_only
173
-
174
- filtered_asset_groups << id
175
- end
176
-
177
- @ids = filtered_asset_groups
178
- end
179
-
180
- # Creates a base last scan data file from a downloaded report
181
- def create_base_delta_file
182
- @log.log_message 'Creating base delta file'
183
- self.send("create_#{@id_type}_base_file")
184
- end
185
-
186
- def create_site_base_file
187
- csv = load_scan_id_report
188
- csv.delete('finished')
189
- csv.each { |l| l['last_scan_id'] = SITE_BASE_VALUE }
190
-
191
- save_last_scan_data(csv)
192
- end
193
-
194
- def create_asset_group_base_file
195
- CSV.open(@local_file, 'w') do |csv|
196
- csv << %w(asset_group_id last_import)
197
- @ids.each do |n|
198
- csv << [n, DAG_BASE_VALUE]
199
- end
200
- end
201
- end
202
-
203
- # Updates only the rows that were affected by this scan
204
- def update_delta_file
205
- @log.log_message 'Updating last scan data'
206
-
207
- if !(File.exist? @local_file) && !(File.exist? @remote_file)
208
- @log.log_message 'Last scan data does not exist yet.'
209
- return
210
- end
211
-
212
- updated_csv = load_last_scan_data
213
- remote_csv = load_scan_id_report
214
-
215
- method = "update_#{@id_type}_delta_file"
216
- updated_csv = self.send(method, updated_csv, remote_csv)
217
-
218
- save_last_scan_data(updated_csv)
219
- end
220
-
221
- def update_site_delta_file(updated_csv, remote_csv)
222
- #merge changes in from remote_csv
223
- remote_csv.each do |row|
224
- updated_row = updated_csv.find { |r| r['site_id'] == row['site_id'] }
225
- if updated_row.nil?
226
- row.delete 'finished'
227
- updated_csv << row
228
- else
229
- updated_row['last_scan_id'] = row['last_scan_id']
230
- end
231
- end
232
-
233
- updated_csv
234
- end
235
-
236
- def update_asset_group_delta_file(updated_csv, remote_csv)
237
- #merge changes in from remote_csv
238
- remote_csv.each do |row|
239
- updated_row = updated_csv.find do |r|
240
- r['asset_group_id'] == row['asset_group_id']
241
- end
242
-
243
- if updated_row.nil?
244
- updated_csv << row
245
- else
246
- updated_row['last_import'] = row['last_import']
247
- end
248
- end
249
-
250
- # Insert any NEW IDs with baseline time
251
- @ids.each do |i|
252
- row = updated_csv.find { |r| r[DAG_IDENTIFIER] == i }
253
- updated_csv << [i, DAG_BASE_VALUE] if row.nil?
254
- end
255
-
256
- updated_csv
257
- end
258
-
259
- # Overwrite the last scan data file with new csv
260
- def save_last_scan_data(csv)
261
- @log.log_message 'Saving last scan data'
262
- File.open(@local_file, 'w') do |f|
263
- f.write(csv.to_csv)
264
- end
265
- end
266
-
267
- def save_vuln_timestamp(nexpose_ids=[])
268
- start_time = Time.new
269
-
270
- #Read timestamp from new timestamp file (substitute base time)
271
- if File.exist? @prev_timestamp_file
272
- file = File.expand_path(@prev_timestamp_file)
273
- csv = CSV.open(file, headers:true)
274
- line = csv.readline
275
- last_run = line['Last Scan Time']
276
- last_sites = line['Sites']
277
- csv.close
278
-
279
- File.delete(file)
280
- end
281
-
282
- last_run ||= Time.new(1985)
283
- last_sites ||= []
284
- last_run = last_run.strftime('%Y-%m-%d') if last_run.class.to_s == 'Time'
285
- create_last_vuln_data(last_run, last_sites)
286
-
287
- file = File.expand_path(@prev_timestamp_file)
288
- CSV.open(file, 'w') do |csv|
289
- csv << ['Last Scan Time', 'Sites']
290
- csv << [start_time.strftime('%Y-%m-%d'), nexpose_ids.join(',')]
291
- end
292
- end
293
-
294
- def create_last_vuln_data(time=nil, nexpose_ids=[])
295
- @log.log_message 'Creating last vulnerability scan time file.'
296
-
297
- time ||= Time.new(1985)
298
- time = time.strftime('%Y-%m-%d') if time.class.to_s == 'Time'
299
- nexpose_ids = nexpose_ids.join(',') if nexpose_ids.class.to_s == 'Array'
300
-
301
- file = File.expand_path(@timestamp_file)
302
-
303
- CSV.open(file, 'w') do |csv|
304
- csv << ['Last Scan Time', 'Sites']
305
- csv << [time, nexpose_ids]
306
- end
307
- end
308
-
309
- # Current IDs are inserted into the updated CSV file.
310
- def last_vuln_run
311
- @log.log_message 'Retrieving the last vulnerability timestamp'
312
-
313
- create_last_vuln_data if !File.exist? @timestamp_file
314
-
315
- file = File.expand_path(@timestamp_file)
316
- csv = CSV.open(file, headers:true)
317
- last_run = csv.readline['Last Scan Time']
318
- csv.close
319
-
320
- last_run
321
- end
322
-
323
70
  #########################################################
324
71
  # Experimental #
325
72
  #########################################################
@@ -335,77 +82,21 @@ module NexposeServiceNow
335
82
  @log.log_error_message message unless @log.nil?
336
83
  end
337
84
 
338
- def set_last_scan(nexpose_id, scan_id)
339
- unless File.exist? @local_file
340
- log_and_error 'Last scan data does not exist.'
341
- log_and_error 'Can\'t set last scan data without existing file.'
342
- exit -1
343
- end
344
-
345
- csv = load_last_scan_data
346
- row = csv.find { |r| r['site_id'] == nexpose_id }
347
-
348
- if row.nil?
349
- csv << [nexpose_id, scan_id]
350
- else
351
- row['last_scan_id'] = scan_id
352
- end
353
-
354
- save_last_scan_data csv
355
-
356
- log_and_print 'Last scan data updated.'
357
- end
358
-
359
- def set_last_vuln(date, sites=nil)
360
- create_last_vuln_data(date, sites)
361
- log_and_print 'Last vuln data updated.'
362
- end
363
-
364
- def remove_local_file(filename, action)
365
- unless File.exist? filename
366
- log_and_error 'Can\'t remove file.'
367
- log_and_error "File #{filename} cannot be located."
368
- return
369
- end
370
-
371
- begin
372
- self.send("by_#{action}", filename)
373
- rescue Exception => e
374
- log_and_error "Error removing file:\n#{e}"
375
- return
376
- end
377
-
378
- log_and_print "File #{filename} removed"
379
- end
380
-
381
- def by_rename(filename)
382
- new_name = "#{filename}.#{Time.new.strftime('%Y-%m-%d_%H-%M-%S')}"
383
- File.delete new_name if File.exist? new_name
384
- File.rename(filename, new_name)
385
- end
386
-
387
- def by_delete(filename)
388
- File.delete filename
389
- end
390
-
391
- def remove_last_scan_data
392
- remove_local_file @local_file, 'rename'
393
- remove_local_file @remote_file, 'delete'
394
- end
395
-
396
- def remove_last_vuln_data
397
- remove_local_file @timestamp_file, 'rename'
398
- remove_local_file @prev_timestamp_file, 'delete'
399
- end
400
-
401
- def remove_last_diff_comparison_data(output_dir)
85
+ # Deletes all of the CSV files matching the pattern
86
+ def remove_diff_files(output_dir)
402
87
  local_path = File.expand_path(output_dir)
403
- remove_local_file (local_path + ASSET_GROUP_FILE), 'rename'
404
-
405
- Dir[local_path + DIFFERENTIAL_FILE_REGEX].each do |file|
406
- remove_local_file file, 'delete'
88
+ group_csv_files = Dir.glob(File.join(local_path, GROUP_FILE_PATTERN))
89
+
90
+ group_csv_files.each do |file|
91
+ begin
92
+ File.delete file
93
+ log_and_print "File #{file} deleted."
94
+ rescue Exception => e
95
+ log_and_error "Error removing file:\n#{e}"
96
+ end
407
97
  end
408
98
  end
409
99
  end
410
100
  end
411
101
 
102
+