nexpose_servicenow 0.4.24 → 0.5.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,3 +1,4 @@
1
+ #TODO: Check if using site_id is OK. If the object is changed, the code on SN will likely need changed.
1
2
 
2
3
  module NexposeServiceNow
3
4
  class Chunker
@@ -12,28 +13,28 @@ module NexposeServiceNow
12
13
 
13
14
  def setup_logging
14
15
  @log = NexposeServiceNow::NxLogger.instance
15
- @log.log_message("Chunker File Limit: #{@size_limit}MB");
16
- @log.log_message("Chunk Row Limit: #{@row_limit}")
16
+ @log.log_message("Chunk File Limit:\t#{@size_limit}MB")
17
+ @log.log_message("Chunk Row Limit:\t#{@row_limit}")
17
18
  end
18
19
 
19
- #Grab the header from the first file
20
+ # Grab the header from the first file
20
21
  def get_header
21
- file = File.open(@report_details.first[:report_name], "r")
22
+ file = File.open(@report_details.first[:report_name], 'r')
22
23
  header = file.readline
23
24
  file.close
24
25
 
25
26
  header
26
27
  end
27
28
 
28
- def preprocess(nexpose_ids=nil)
29
+ def preprocess
29
30
  all_chunks = []
30
31
  @report_details.each do |report|
31
- @log.log_message("Breaking file #{report[:report_name]} down into chunks.")
32
+ @log.log_message("Dividing file #{report[:report_name]} into chunks.")
32
33
  chunks = process_file(report[:report_name], report[:id])
33
34
  all_chunks.concat chunks
34
35
  end
35
36
 
36
- @log.log_message("Files broken down into #{all_chunks.count} chunks")
37
+ @log.log_message("Files divided into #{all_chunks.count} chunks")
37
38
 
38
39
  puts all_chunks.to_json
39
40
  end
@@ -41,12 +42,12 @@ module NexposeServiceNow
41
42
  def process_file(file_path, site_id=nil)
42
43
  relative_size_limit = @size_limit - @header.bytesize
43
44
  chunk = { site_id: site_id,
44
- start: @header.bytesize,
45
+ start: @header.bytesize,
45
46
  length: 0,
46
47
  row_count: 0 }
47
48
 
48
49
  chunks = []
49
- csv_file = CSV.open(file_path, "r", headers: true)
50
+ csv_file = CSV.open(file_path, 'r', headers: true)
50
51
  while(true)
51
52
  position = csv_file.pos
52
53
  line = csv_file.shift
@@ -62,12 +63,13 @@ module NexposeServiceNow
62
63
  else
63
64
  chunks << chunk
64
65
 
66
+ # TODO: Make generic?
65
67
  #Initialise chunk with this row information
66
68
  chunk = { site_id: site_id,
67
69
  start: position,
68
70
  length: row_length,
69
71
  row_count: 1 }
70
- end
72
+ end
71
73
  end
72
74
  csv_file.close
73
75
 
@@ -83,7 +85,7 @@ module NexposeServiceNow
83
85
  end
84
86
 
85
87
  def get_file(site_id=nil)
86
- #-1 indicates a single query report
88
+ # -1 indicates a single query report
87
89
  return @report_details.first[:report_name] if site_id.to_i <= 0
88
90
 
89
91
  report = @report_details.find { |r| r[:id].to_s == site_id.to_s }
@@ -91,12 +93,15 @@ module NexposeServiceNow
91
93
  end
92
94
 
93
95
  def read_chunk(start, length, site_id=nil)
94
- @log.log_message("Returning chunk. Start: #{start}, Length: #{length}, File: #{@file_path}")
96
+ file_path = get_file(site_id)
97
+ msg = "Returning chunk. Start: #{start}, " \
98
+ "Length: #{length}, File: #{file_path}"
99
+ @log.log_message(msg)
95
100
 
96
101
  #If the header isn't in the chunk, prepend it
97
- header = start == 0 ? "" : @header
102
+ header = start == 0 ? '' : @header
98
103
 
99
- file = File.open(get_file(site_id), "rb")
104
+ file = File.open(file_path, 'rb')
100
105
  file.seek(start)
101
106
  puts header + file.read(length)
102
107
  file.close
@@ -3,37 +3,81 @@ require_relative './nx_logger'
3
3
 
4
4
  module NexposeServiceNow
5
5
  class HistoricalData
6
- REPORT_FILE = "Nexpose-ServiceNow-latest_scans.csv"
7
- STORED_FILE = "last_scan_data.csv"
8
- TIMESTAMP_FILE = "last_vuln_run.csv"
9
- NEW_TIMESTAMP_FILE = "new_vuln_timestamp.csv"
10
6
 
11
- def initialize(options)
12
- local_dir = File.expand_path(options[:output_dir])
13
- @ids = options[:nexpose_ids]
7
+ REPORT_FILE = 'Nexpose-ServiceNow-latest_scans.csv'
8
+ STORED_FILE = 'last_scan_data.csv'
9
+ TIMESTAMP_FILE = 'last_vuln_run.csv'
10
+ NEW_TIMESTAMP_FILE = 'new_vuln_timestamp.csv'
11
+
12
+ DAG_TIMESTAMP_FILE = 'last_scan_data_dag.csv'
13
+ NEW_DAG_TIMESTAMP_FILE = 'new_dag_timestamp.csv'
14
+
15
+ SITE_IDENTIFIER = 'site_id'
16
+ SITE_DELTA_VALUE = 'last_scan_id'
17
+ SITE_BASE_VALUE = 0
18
+
19
+ DAG_IDENTIFIER = 'asset_group_id'
20
+ DAG_DELTA_VALUE = 'last_import'
21
+ DAG_BASE_VALUE = '1985-01-01 00:00:00'
22
+
23
+ def initialize(output_dir, nexpose_ids, id_type, start_time)
24
+ local_dir = File.expand_path(output_dir)
25
+ @ids = nexpose_ids
26
+ @id_type = id_type
27
+
28
+ if @id_type == :site
29
+ current_file = STORED_FILE
30
+ new_file = REPORT_FILE
31
+ elsif @id_type == :asset_group
32
+ current_file = DAG_TIMESTAMP_FILE
33
+ new_file = NEW_DAG_TIMESTAMP_FILE
34
+ end
35
+
36
+ @start_time = start_time
14
37
 
15
- @local_file = File.join(local_dir, STORED_FILE)
16
- @remote_file = File.join(local_dir, REPORT_FILE)
38
+ @local_file = File.join(local_dir, current_file)
39
+ @remote_file = File.join(local_dir, new_file)
17
40
 
18
41
  # File containing the timestamp used in vulnerability queries
19
42
  @timestamp_file = File.join(local_dir, TIMESTAMP_FILE)
20
43
  @prev_timestamp_file = File.join(local_dir, NEW_TIMESTAMP_FILE)
21
44
 
22
45
  @log = NexposeServiceNow::NxLogger.instance
23
- @log.log_message "Retrieving environment variables."
24
46
  end
25
47
 
26
- #Filters the saved report down to the sites being queried
27
- #This can then be used as a basis to update last_scan_data
48
+ # TODO: Remove site references here? Will there be remote CSV here if we're using DAGs? No scan IDs.
49
+ # Filters the saved report down to the sites being queried
50
+ # This can then be used as a basis to update last_scan_data
28
51
  def filter_report
29
- #Create a full last_scan_data if it doesn't already exist
30
- create_last_scan_data unless File.exist? @local_file
52
+ # Create a full last_scan_data if it doesn't already exist
53
+ create_base_delta_file unless File.exist? @local_file
31
54
 
32
- @log.log_message 'Filtering report down sites which will be queried'
55
+ @log.log_message 'Filtering report down to sites which will be queried'
33
56
 
34
57
  remote_csv = load_scan_id_report
35
- site_ids = @ids.map(&:to_s)
36
- filtered_csv = remote_csv.delete_if { |r| !site_ids.include?(r['site_id']) }
58
+ nexpose_ids = @ids.map(&:to_s)
59
+ identifier = if @id_type == :site
60
+ SITE_IDENTIFIER
61
+ elsif @id_type ==:asset_group
62
+ DAG_IDENTIFIER
63
+ end
64
+
65
+ if @id_type == :asset_group
66
+ header = [DAG_IDENTIFIER, DAG_DELTA_VALUE]
67
+ rows = []
68
+
69
+ @ids.each do |i|
70
+ rows << CSV::Row.new(header, [i, @start_time])
71
+ end
72
+
73
+ remote_csv = CSV::Table.new(rows)
74
+ end
75
+
76
+ # TODO: Why is this done? Aren't these already filtered?
77
+ filtered_csv = remote_csv.delete_if do |r|
78
+ !nexpose_ids.include?(r[identifier])
79
+ end
80
+
37
81
  File.open(@remote_file, 'w') do |f|
38
82
  f.write(remote_csv.to_csv)
39
83
  end
@@ -41,45 +85,58 @@ module NexposeServiceNow
41
85
  puts filtered_csv
42
86
  end
43
87
 
44
- #Reads the downloaded report containing LATEST scan IDs
88
+ # Reads the downloaded report containing LATEST scan IDs
45
89
  def load_scan_id_report
46
- @log.log_message "Loading scan data report"
90
+ @log.log_message 'Loading scan data report'
47
91
  unless File.exists? @remote_file
48
- @log.log_message "No existing report file found."
92
+ @log.log_message 'No existing report file found.'
49
93
  return nil
50
94
  end
51
95
  CSV.read(@remote_file, headers: true)
52
96
  end
53
97
 
54
- #Loads the last scan data file as CSV.
55
- #It may be necessary to create one first.
98
+ # Loads the last scan data file as CSV.
99
+ # It may be necessary to create one first.
56
100
  def load_last_scan_data
57
- @log.log_message "Loading last scan data"
101
+ @log.log_message 'Loading last scan data.'
58
102
 
59
- create_last_scan_data unless File.exist? @local_file
103
+ create_base_delta_file unless File.exist? @local_file
60
104
  CSV.read(@local_file, headers: true)
61
105
  end
62
106
 
63
- def last_scan_ids(sites)
107
+ def stored_delta_values(nexpose_ids)
64
108
  return [] if !File.exist? @local_file
65
109
 
110
+ if @id_type == :site
111
+ identifier = SITE_IDENTIFIER
112
+ delta_column = SITE_DELTA_VALUE
113
+ base_value = SITE_BASE_VALUE
114
+ elsif @id_type == :asset_group
115
+ identifier = DAG_IDENTIFIER
116
+ delta_column = DAG_DELTA_VALUE
117
+ base_value = DAG_BASE_VALUE
118
+ end
119
+
66
120
  csv = load_last_scan_data
67
- last_scan_ids = {}
68
- sites.each do |id|
69
- row = csv.find { |r| r['site_id'] == id.to_s } || { 'last_scan_id' => '0' }
70
- last_scan_ids[id.to_s] = row['last_scan_id']
121
+ delta_values = {}
122
+ nexpose_ids.each do |id|
123
+ row = csv.find { |r| r[identifier] == id.to_s }
124
+ row ||= { delta_column => base_value }
125
+ delta_values[id.to_s] = row[delta_column]
71
126
  end
72
127
 
73
- last_scan_ids
128
+ delta_values
74
129
  end
75
130
 
76
- #Compares stored scan IDs versus remote scan IDs.
77
- #This determines which scans are included as filters.
78
- def sites_to_scan(imported_sites_only=false)
131
+ # Compares stored scan IDs versus remote scan IDs.
132
+ # This determines which scans are included as filters.
133
+ def collections_to_import(previously_imported_only=false)
79
134
  return @ids unless File.exist? @remote_file
135
+ @log.log_message "Filtering for #{@id_type}s with new scans"
136
+ self.send("#{@id_type}s_to_import", previously_imported_only)
137
+ end
80
138
 
81
- @log.log_message 'Filtering for sites with new scans'
82
-
139
+ def sites_to_import(previously_imported_only=false)
83
140
  remote_csv = CSV.read(@remote_file, headers: true)
84
141
  local_csv = load_last_scan_data
85
142
 
@@ -93,7 +150,7 @@ module NexposeServiceNow
93
150
  local_scan_id = local_scan_id['last_scan_id'] || 0
94
151
 
95
152
  # Check if only allowing sites which were previously imported
96
- next if local_scan_id.to_s == "0" && imported_sites_only
153
+ next if local_scan_id.to_s == '0' && previously_imported_only
97
154
 
98
155
  filtered_sites << id if local_scan_id.to_i < remote_scan_id.to_i
99
156
  end
@@ -101,22 +158,50 @@ module NexposeServiceNow
101
158
  @ids = filtered_sites
102
159
  end
103
160
 
104
- #Creates a base last scan data file from a downloaded report
105
- def create_last_scan_data
106
- @log.log_message 'Creating base last scan data file'
161
+ def asset_groups_to_import(previously_imported_only=false)
162
+ filtered_asset_groups = []
163
+ local_csv = load_last_scan_data
164
+
165
+ @ids.each do |id|
166
+ local_id = local_csv.find { |r| r[DAG_IDENTIFIER] == id.to_s } || {}
167
+ local_id = local_id[DAG_DELTA_VALUE] || DAG_BASE_VALUE
168
+
169
+ next if local_id == DAG_BASE_VALUE && previously_imported_only
170
+
171
+ filtered_asset_groups << id
172
+ end
107
173
 
174
+ @ids = filtered_asset_groups
175
+ end
176
+
177
+ # Creates a base last scan data file from a downloaded report
178
+ def create_base_delta_file
179
+ @log.log_message 'Creating base delta file'
180
+ self.send("create_#{@id_type}_base_file")
181
+ end
182
+
183
+ def create_site_base_file
108
184
  csv = load_scan_id_report
109
185
  csv.delete('finished')
110
- csv.each { |l| l['last_scan_id'] = 0 }
186
+ csv.each { |l| l['last_scan_id'] = SITE_BASE_VALUE }
111
187
 
112
188
  save_last_scan_data(csv)
113
189
  end
114
190
 
115
- #Updates only the rows that were affected by this scan
116
- def update_last_scan_data
117
- @log.log_message "Updating last scan data"
191
+ def create_asset_group_base_file
192
+ CSV.open(@local_file, 'w') do |csv|
193
+ csv << %w(asset_group_id last_import)
194
+ @ids.each do |n|
195
+ csv << [n, DAG_BASE_VALUE]
196
+ end
197
+ end
198
+ end
199
+
200
+ # Updates only the rows that were affected by this scan
201
+ def update_delta_file
202
+ @log.log_message 'Updating last scan data'
118
203
 
119
- if !(File.exist? @local_file) && !(File.exist? @remote_file)
204
+ if !(File.exist? @local_file) && !(File.exist? @remote_file)
120
205
  @log.log_message 'Last scan data does not exist yet.'
121
206
  return
122
207
  end
@@ -124,23 +209,51 @@ module NexposeServiceNow
124
209
  updated_csv = load_last_scan_data
125
210
  remote_csv = load_scan_id_report
126
211
 
212
+ method = "update_#{@id_type}_delta_file"
213
+ updated_csv = self.send(method, updated_csv, remote_csv)
214
+
215
+ save_last_scan_data(updated_csv)
216
+ end
217
+
218
+ def update_site_delta_file(updated_csv, remote_csv)
127
219
  #merge changes in from remote_csv
128
220
  remote_csv.each do |row|
129
221
  updated_row = updated_csv.find { |r| r['site_id'] == row['site_id'] }
130
222
  if updated_row.nil?
131
223
  row.delete 'finished'
132
- updated_csv << row
224
+ updated_csv << row
133
225
  else
134
- updated_row['last_scan_id'] = row['last_scan_id']
226
+ updated_row['last_scan_id'] = row['last_scan_id']
135
227
  end
136
228
  end
137
229
 
138
- save_last_scan_data(updated_csv)
230
+ updated_csv
231
+ end
232
+
233
+ def update_asset_group_delta_file(updated_csv, remote_csv)
234
+ #merge changes in from remote_csv
235
+ remote_csv.each do |row|
236
+ updated_row = updated_csv.find do |r|
237
+ r['asset_group_id'] == row['asset_group_id']
238
+ end
139
239
 
140
- #puts updated_csv
240
+ if updated_row.nil?
241
+ updated_csv << row
242
+ else
243
+ updated_row['last_import'] = row['last_import']
244
+ end
245
+ end
246
+
247
+ # Insert any NEW IDs with baseline time
248
+ @ids.each do |i|
249
+ row = updated_csv.find { |r| r[DAG_IDENTIFIER] == i }
250
+ updated_csv << [i, DAG_BASE_VALUE] if row.nil?
251
+ end
252
+
253
+ updated_csv
141
254
  end
142
255
 
143
- #Overwrite the last scan data file with new csv
256
+ # Overwrite the last scan data file with new csv
144
257
  def save_last_scan_data(csv)
145
258
  @log.log_message 'Saving last scan data'
146
259
  File.open(@local_file, 'w') do |f|
@@ -148,9 +261,7 @@ module NexposeServiceNow
148
261
  end
149
262
  end
150
263
 
151
-
152
- #insert sites?
153
- def save_vuln_timestamp(sites=[])
264
+ def save_vuln_timestamp(nexpose_ids=[])
154
265
  start_time = Time.new
155
266
 
156
267
  #Read timestamp from new timestamp file (substitute base time)
@@ -167,32 +278,32 @@ module NexposeServiceNow
167
278
 
168
279
  last_run ||= Time.new(1985)
169
280
  last_sites ||= []
170
- last_run = last_run.strftime("%Y-%m-%d") if last_run.class.to_s == 'Time'
281
+ last_run = last_run.strftime('%Y-%m-%d') if last_run.class.to_s == 'Time'
171
282
  create_last_vuln_data(last_run, last_sites)
172
283
 
173
284
  file = File.expand_path(@prev_timestamp_file)
174
285
  CSV.open(file, 'w') do |csv|
175
286
  csv << ['Last Scan Time', 'Sites']
176
- csv << [start_time.strftime("%Y-%m-%d"), sites.join(',')]
287
+ csv << [start_time.strftime('%Y-%m-%d'), nexpose_ids.join(',')]
177
288
  end
178
289
  end
179
290
 
180
- def create_last_vuln_data(time=nil, sites=[])
291
+ def create_last_vuln_data(time=nil, nexpose_ids=[])
181
292
  @log.log_message 'Creating last vulnerability scan time file.'
182
293
 
183
294
  time ||= Time.new(1985)
184
- time = time.strftime("%Y-%m-%d") if time.class.to_s == 'Time'
185
- sites = sites.join(',') if sites.class.to_s == 'Array'
295
+ time = time.strftime('%Y-%m-%d') if time.class.to_s == 'Time'
296
+ nexpose_ids = nexpose_ids.join(',') if nexpose_ids.class.to_s == 'Array'
186
297
 
187
298
  file = File.expand_path(@timestamp_file)
188
299
 
189
300
  CSV.open(file, 'w') do |csv|
190
301
  csv << ['Last Scan Time', 'Sites']
191
- csv << [time, sites]
302
+ csv << [time, nexpose_ids]
192
303
  end
193
304
  end
194
305
 
195
- #Current IDs are inserted into the updated CSV file.
306
+ # Current IDs are inserted into the updated CSV file.
196
307
  def last_vuln_run
197
308
  @log.log_message 'Retrieving the last vulnerability timestamp'
198
309
 
@@ -206,8 +317,11 @@ module NexposeServiceNow
206
317
  last_run
207
318
  end
208
319
 
209
- #Experimental
210
- #These should probably return strings that can be mlog'd
320
+ #########################################################
321
+ # Experimental #
322
+ #########################################################
323
+
324
+ # These should probably return strings that can be mlog'd
211
325
  def log_and_print(message)
212
326
  puts message
213
327
  @log.log_message message unless @log.nil?
@@ -227,15 +341,15 @@ module NexposeServiceNow
227
341
 
228
342
  csv = load_last_scan_data
229
343
  row = csv.find { |r| r['site_id'] == nexpose_id }
230
-
344
+
231
345
  if row.nil?
232
346
  csv << [nexpose_id, scan_id]
233
347
  else
234
348
  row['last_scan_id'] = scan_id
235
349
  end
236
-
350
+
237
351
  save_last_scan_data csv
238
-
352
+
239
353
  log_and_print 'Last scan data updated.'
240
354
  end
241
355
 
@@ -253,7 +367,7 @@ module NexposeServiceNow
253
367
 
254
368
  new_name = "#{filename}.#{Time.new.strftime('%Y-%m-%d.%H:%M:%S')}"
255
369
  begin
256
- #Delete existing file with same name
370
+ # Delete existing file with same name
257
371
  File.delete new_name if File.exist? new_name
258
372
  File.rename(filename, new_name)
259
373
  rescue Exception => e