heimdall_tools 1.3.38 → 1.3.42

Sign up to get free protection for your applications and to get access to all the features.
@@ -54,11 +54,10 @@ module HeimdallTools
54
54
  hdfs = HeimdallTools::NessusMapper.new(File.read(options[:xml])).to_hdf
55
55
 
56
56
  puts "\nHDF Generated:"
57
- hdfs.keys.each do | host |
57
+ hdfs.each_key do |host|
58
58
  File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
59
59
  puts "#{options[:output_prefix]}-#{host}.json"
60
60
  end
61
-
62
61
  end
63
62
 
64
63
  desc 'snyk_mapper', 'snyk_mapper translates Snyk results Json to HDF format Json be viewed on Heimdall'
@@ -69,7 +68,7 @@ module HeimdallTools
69
68
  def snyk_mapper
70
69
  hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
71
70
  puts "\r\HDF Generated:\n"
72
- hdfs.keys.each do | host |
71
+ hdfs.each_key do |host|
73
72
  File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
74
73
  puts "#{options[:output_prefix]}-#{host}.json"
75
74
  end
@@ -84,7 +83,7 @@ module HeimdallTools
84
83
  hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
85
84
  File.write(options[:output], hdf)
86
85
  puts "\r\HDF Generated:\n"
87
- puts "#{options[:output]}"
86
+ puts options[:output].to_s
88
87
  end
89
88
 
90
89
  desc 'jfrog_xray_mapper', 'jfrog_xray_mapper translates Jfrog Xray results Json to HDF format Json be viewed on Heimdall'
@@ -96,9 +95,9 @@ module HeimdallTools
96
95
  hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
97
96
  File.write(options[:output], hdf)
98
97
  puts "\r\HDF Generated:\n"
99
- puts "#{options[:output]}"
98
+ puts options[:output].to_s
100
99
  end
101
-
100
+
102
101
  desc 'dbprotect_mapper', 'dbprotect_mapper translates dbprotect results xml to HDF format Json be viewed on Heimdall'
103
102
  long_desc Help.text(:dbprotect_mapper)
104
103
  option :xml, required: true, aliases: '-x'
@@ -108,7 +107,7 @@ module HeimdallTools
108
107
  hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
109
108
  File.write(options[:output], hdf)
110
109
  puts "\r\HDF Generated:\n"
111
- puts "#{options[:output]}"
110
+ puts options[:output].to_s
112
111
  end
113
112
 
114
113
  desc 'aws_config_mapper', 'aws_config_mapper pulls Ruby AWS SDK data to translate AWS Config Rule results into HDF format Json to be viewable in Heimdall'
@@ -120,7 +119,19 @@ module HeimdallTools
120
119
  hdf = HeimdallTools::AwsConfigMapper.new(options[:custom_mapping]).to_hdf
121
120
  File.write(options[:output], hdf)
122
121
  puts "\r\HDF Generated:\n"
123
- puts "#{options[:output]}"
122
+ puts options[:output].to_s
123
+ end
124
+
125
+ desc 'netsparker_mapper', 'netsparker_mapper translates netsparker enterprise results xml to HDF format Json be viewed on Heimdall'
126
+ long_desc Help.text(:netsparker_mapper)
127
+ option :xml, required: true, aliases: '-x'
128
+ option :output, required: true, aliases: '-o'
129
+ option :verbose, type: :boolean, aliases: '-V'
130
+ def netsparker_mapper
131
+ hdf = HeimdallTools::NetsparkerMapper.new(File.read(options[:xml])).to_hdf
132
+ File.write(options[:output], hdf)
133
+ puts "\r\HDF Generated:\n"
134
+ puts options[:output].to_s
124
135
  end
125
136
 
126
137
  desc 'version', 'prints version'
@@ -3,8 +3,6 @@ require 'thor'
3
3
  # Override thor's long_desc identation behavior
4
4
  # https://github.com/erikhuda/thor/issues/398
5
5
 
6
- # rubocop:disable Naming/UncommunicativeMethodParamName
7
-
8
6
  class Thor
9
7
  module Shell
10
8
  class Basic
@@ -10,21 +10,17 @@ IMPACT_MAPPING = {
10
10
  Informational: 0.0
11
11
  }.freeze
12
12
 
13
- # rubocop:disable Metrics/AbcSize
14
-
15
13
  module HeimdallTools
16
14
  class DBProtectMapper
17
- def initialize(xml, name=nil, verbose = false)
15
+ def initialize(xml, _name = nil, verbose = false)
18
16
  @verbose = verbose
19
17
 
20
18
  begin
21
19
  dataset = xml_to_hash(xml)
22
20
  @entries = compile_findings(dataset['dataset'])
23
-
24
21
  rescue StandardError => e
25
22
  raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
26
23
  end
27
-
28
24
  end
29
25
 
30
26
  def to_hdf
@@ -46,7 +42,7 @@ module HeimdallTools
46
42
  end
47
43
  controls = collapse_duplicates(controls)
48
44
  results = HeimdallDataFormat.new(profile_name: @entries.first['Policy'],
49
- version: "",
45
+ version: '',
50
46
  title: @entries.first['Job Name'],
51
47
  summary: format_summary(@entries.first),
52
48
  controls: controls)
@@ -56,16 +52,15 @@ module HeimdallTools
56
52
  private
57
53
 
58
54
  def compile_findings(dataset)
59
- keys = dataset['metadata']['item'].map{ |e| e['name']}
60
- findings = dataset['data']['row'].map { |e| Hash[keys.zip(e['value'])] }
61
- findings
55
+ keys = dataset['metadata']['item'].map { |e| e['name'] }
56
+ dataset['data']['row'].map { |e| keys.zip(e['value']).to_h }
62
57
  end
63
58
 
64
59
  def format_desc(entry)
65
60
  text = []
66
61
  text << "Task : #{entry['Task']}"
67
62
  text << "Check Category : #{entry['Check Category']}"
68
- text.join("; ")
63
+ text.join('; ')
69
64
  end
70
65
 
71
66
  def format_summary(entry)
@@ -90,14 +85,12 @@ module HeimdallTools
90
85
  finding['status'] = 'skipped'
91
86
  when 'Failed'
92
87
  finding['status'] = 'failed'
93
- finding['backtrace'] = ["DB Protect Failed Check"]
88
+ finding['backtrace'] = ['DB Protect Failed Check']
94
89
  when 'Finding'
95
90
  finding['status'] = 'failed'
96
91
  when 'Not A Finding'
97
92
  finding['status'] = 'passed'
98
- when 'Skipped'
99
- finding['status'] = 'skipped'
100
- else
93
+ else
101
94
  finding['status'] = 'skipped'
102
95
  end
103
96
  [finding]
@@ -108,20 +101,18 @@ module HeimdallTools
108
101
  end
109
102
 
110
103
  # DBProtect report could have multiple issue entries for multiple findings of same issue type.
111
- # The meta data is identical across entries
104
+ # The meta data is identical across entries
112
105
  # method collapse_duplicates return unique controls with applicable findings collapsed into it.
113
106
  def collapse_duplicates(controls)
114
107
  unique_controls = []
115
108
 
116
109
  controls.map { |x| x['id'] }.uniq.each do |id|
117
- collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
110
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
118
111
  unique_control = controls.find { |x| x['id'].eql?(id) }
119
112
  unique_control['results'] = collapsed_results.flatten
120
113
  unique_controls << unique_control
121
114
  end
122
115
  unique_controls
123
116
  end
124
-
125
-
126
117
  end
127
118
  end
@@ -3,7 +3,7 @@ require 'heimdall_tools/hdf'
3
3
  require 'utilities/xml_to_hash'
4
4
 
5
5
  NIST_REFERENCE_NAME = 'Standards Mapping - NIST Special Publication 800-53 Revision 4'.freeze
6
- DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
6
+ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
7
7
 
8
8
  module HeimdallTools
9
9
  class FortifyMapper
@@ -19,7 +19,6 @@ module HeimdallTools
19
19
  @rules = data['FVDL']['Description']
20
20
  @uuid = data['FVDL']['UUID']
21
21
  @fortify_version = data['FVDL']['EngineData']['EngineVersion']
22
-
23
22
  rescue StandardError => e
24
23
  raise "Invalid Fortify FVDL file provided Exception: #{e}"
25
24
  end
@@ -2,15 +2,14 @@ require 'json'
2
2
  require 'heimdall_tools/version'
3
3
  require 'openssl'
4
4
 
5
- NA_STRING = "".freeze
6
- NA_TAG = nil.freeze
5
+ NA_STRING = ''.freeze
6
+ NA_TAG = nil
7
7
  NA_ARRAY = [].freeze
8
8
  NA_HASH = {}.freeze
9
- NA_FLOAT = 0.0.freeze
9
+ NA_FLOAT = 0.0
10
10
 
11
11
  PLATFORM_NAME = 'Heimdall Tools'.freeze
12
12
 
13
-
14
13
  module HeimdallTools
15
14
  class HeimdallDataFormat
16
15
  def initialize(profile_name: NA_TAG,
@@ -60,7 +59,7 @@ module HeimdallTools
60
59
  profile_block['groups'] = groups
61
60
  profile_block['status'] = status
62
61
  profile_block['controls'] = controls
63
- profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).unpack("H*")[0]
62
+ profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).unpack1('H*')
64
63
  @results_json['profiles'] << profile_block
65
64
  end
66
65
 
@@ -0,0 +1,7 @@
1
+ netsparker_mapper translates an Netsparker XML results file into HDF format json to be viewable in Heimdall
2
+
3
+ The current iteration only works with Netsparker Enterprise Vulnerabilities Scan.
4
+
5
+ Examples:
6
+
7
+ heimdall_tools netsparker_mapper -x netsparker_results.xml -o netsparker_hdf.json
@@ -10,10 +10,10 @@ CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
10
10
  IMPACT_MAPPING = {
11
11
  high: 0.7,
12
12
  medium: 0.5,
13
- low: 0.3,
13
+ low: 0.3
14
14
  }.freeze
15
15
 
16
- DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
16
+ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
17
17
 
18
18
  # Loading spinner sign
19
19
  $spinner = Enumerator.new do |e|
@@ -27,14 +27,13 @@ end
27
27
 
28
28
  module HeimdallTools
29
29
  class JfrogXrayMapper
30
- def initialize(xray_json, name=nil, verbose = false)
30
+ def initialize(xray_json, _name = nil, verbose = false)
31
31
  @xray_json = xray_json
32
32
  @verbose = verbose
33
33
 
34
34
  begin
35
35
  @cwe_nist_mapping = parse_mapper
36
36
  @project = JSON.parse(xray_json)
37
-
38
37
  rescue StandardError => e
39
38
  raise "Invalid JFrog Xray JSON file provided Exception: #{e}"
40
39
  end
@@ -44,11 +43,11 @@ module HeimdallTools
44
43
  finding = {}
45
44
  finding['status'] = 'failed'
46
45
  finding['code_desc'] = []
47
- finding['code_desc'] << "source_comp_id : #{vulnerability['source_comp_id'].to_s }"
48
- finding['code_desc'] << "vulnerable_versions : #{vulnerability['component_versions']['vulnerable_versions'].to_s }"
49
- finding['code_desc'] << "fixed_versions : #{vulnerability['component_versions']['fixed_versions'].to_s }"
50
- finding['code_desc'] << "issue_type : #{vulnerability['issue_type'].to_s }"
51
- finding['code_desc'] << "provider : #{vulnerability['provider'].to_s }"
46
+ finding['code_desc'] << "source_comp_id : #{vulnerability['source_comp_id']}"
47
+ finding['code_desc'] << "vulnerable_versions : #{vulnerability['component_versions']['vulnerable_versions']}"
48
+ finding['code_desc'] << "fixed_versions : #{vulnerability['component_versions']['fixed_versions']}"
49
+ finding['code_desc'] << "issue_type : #{vulnerability['issue_type']}"
50
+ finding['code_desc'] << "provider : #{vulnerability['provider']}"
52
51
  finding['code_desc'] = finding['code_desc'].join("\n")
53
52
  finding['run_time'] = NA_FLOAT
54
53
 
@@ -57,17 +56,25 @@ module HeimdallTools
57
56
  [finding]
58
57
  end
59
58
 
59
+ def format_control_desc(vulnerability)
60
+ text = []
61
+ info = vulnerability['component_versions']['more_details']
62
+ text << info['description'].to_s
63
+ text << "cves: #{info['cves']}" unless info['cves'].nil?
64
+ text.join('<br>')
65
+ end
66
+
60
67
  def nist_tag(cweid)
61
- entries = @cwe_nist_mapping.select { |x| cweid.include? x[:cweid].to_s }
68
+ entries = @cwe_nist_mapping.select { |x| cweid.include?(x[:cweid].to_s) && !x[:nistid].nil? }
62
69
  tags = entries.map { |x| x[:nistid] }
63
70
  tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
64
71
  end
65
72
 
66
73
  def parse_identifiers(vulnerability, ref)
67
74
  # Extracting id number from reference style CWE-297
68
- vulnerability['component_versions']['more_details']['cves'][0][ref.downcase].map { |e| e.split("#{ref}-")[1] }
69
- rescue
70
- return []
75
+ vulnerability['component_versions']['more_details']['cves'][0][ref.downcase].map { |e| e.split("#{ref}-")[1] }
76
+ rescue StandardError
77
+ []
71
78
  end
72
79
 
73
80
  def impact(severity)
@@ -83,17 +90,17 @@ module HeimdallTools
83
90
  end
84
91
 
85
92
  def desc_tags(data, label)
86
- { "data": data || NA_STRING, "label": label || NA_STRING }
93
+ { data: data || NA_STRING, label: label || NA_STRING }
87
94
  end
88
95
 
89
96
  # Xray report could have multiple vulnerability entries for multiple findings of same issue type.
90
- # The meta data is identical across entries
97
+ # The meta data is identical across entries
91
98
  # method collapse_duplicates return unique controls with applicable findings collapsed into it.
92
99
  def collapse_duplicates(controls)
93
100
  unique_controls = []
94
101
 
95
102
  controls.map { |x| x['id'] }.uniq.each do |id|
96
- collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
103
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
97
104
  unique_control = controls.find { |x| x['id'].eql?(id) }
98
105
  unique_control['results'] = collapsed_results.flatten
99
106
  unique_controls << unique_control
@@ -104,9 +111,9 @@ module HeimdallTools
104
111
  def to_hdf
105
112
  controls = []
106
113
  vulnerability_count = 0
107
- @project['data'].uniq.each do | vulnerability |
114
+ @project['data'].uniq.each do |vulnerability|
108
115
  printf("\rProcessing: %s", $spinner.next)
109
-
116
+
110
117
  vulnerability_count +=1
111
118
  item = {}
112
119
  item['tags'] = {}
@@ -115,26 +122,26 @@ module HeimdallTools
115
122
  item['source_location'] = NA_HASH
116
123
  item['descriptions'] = NA_ARRAY
117
124
 
118
- # Xray JSONs might note have `id` fields populated.
125
+ # Xray JSONs might note have `id` fields populated.
119
126
  # If thats a case MD5 hash is used to collapse vulnerability findings of the same type.
120
- item['id'] = vulnerability['id'].empty? ? OpenSSL::Digest::MD5.digest(vulnerability['summary'].to_s).unpack("H*")[0].to_s : vulnerability['id']
127
+ item['id'] = vulnerability['id'].empty? ? OpenSSL::Digest::MD5.digest(vulnerability['summary'].to_s).unpack1('H*').to_s : vulnerability['id']
121
128
  item['title'] = vulnerability['summary'].to_s
122
- item['desc'] = vulnerability['component_versions']['more_details']['description'].to_s
123
- item['impact'] = impact(vulnerability['severity'].to_s)
129
+ item['desc'] = format_control_desc(vulnerability)
130
+ item['impact'] = impact(vulnerability['severity'].to_s)
124
131
  item['code'] = NA_STRING
125
132
  item['results'] = finding(vulnerability)
126
133
 
127
- item['tags']['nist'] = nist_tag( parse_identifiers( vulnerability, 'CWE') )
128
- item['tags']['cweid'] = parse_identifiers( vulnerability, 'CWE')
134
+ item['tags']['nist'] = nist_tag(parse_identifiers(vulnerability, 'CWE'))
135
+ item['tags']['cweid'] = parse_identifiers(vulnerability, 'CWE')
129
136
 
130
137
  controls << item
131
138
  end
132
139
 
133
140
  controls = collapse_duplicates(controls)
134
- results = HeimdallDataFormat.new(profile_name: "JFrog Xray Scan",
141
+ results = HeimdallDataFormat.new(profile_name: 'JFrog Xray Scan',
135
142
  version: NA_STRING,
136
- title: "JFrog Xray Scan",
137
- summary: "Continuous Security and Universal Artifact Analysis",
143
+ title: 'JFrog Xray Scan',
144
+ summary: 'Continuous Security and Universal Artifact Analysis',
138
145
  controls: controls)
139
146
  results.to_hdf
140
147
  end
@@ -6,7 +6,7 @@ require 'nokogiri'
6
6
 
7
7
  RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
8
8
 
9
- NESSUS_PLUGINS_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'nessus-plugins-nist-mapping.csv')
9
+ NESSUS_PLUGINS_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'nessus-plugins-nist-mapping.csv')
10
10
  U_CCI_LIST = File.join(RESOURCE_DIR, 'U_CCI_List.xml')
11
11
 
12
12
  IMPACT_MAPPING = {
@@ -14,16 +14,16 @@ IMPACT_MAPPING = {
14
14
  Low: 0.3,
15
15
  Medium: 0.5,
16
16
  High: 0.7,
17
- Critical: 0.9,
17
+ Critical: 0.9
18
18
  }.freeze
19
19
 
20
- DEFAULT_NIST_TAG = ["unmapped"].freeze
20
+ DEFAULT_NIST_TAG = ['unmapped'].freeze
21
21
 
22
22
  # Nessus results file 800-53 refs does not contain Nist rev version. Using this default
23
23
  # version in that case
24
24
  DEFAULT_NIST_REV = 'Rev_4'.freeze
25
25
 
26
- NA_PLUGIN_OUTPUT = "This Nessus Plugin does not provide output message.".freeze
26
+ NA_PLUGIN_OUTPUT = 'This Nessus Plugin does not provide output message.'.freeze
27
27
 
28
28
  # rubocop:disable Metrics/AbcSize
29
29
 
@@ -51,19 +51,16 @@ module HeimdallTools
51
51
  rescue StandardError => e
52
52
  raise "Invalid Nessus XML file provided Exception: #{e}"
53
53
  end
54
-
55
54
  end
56
55
 
57
56
  def extract_report
58
- begin
59
- # When there are multiple hosts in the nessus report ReportHost field is an array
60
- # When there is only one host in the nessus report ReportHost field is a hash
61
- # Array() converts ReportHost to array in case there is only one host
62
- reports = @data['NessusClientData_v2']['Report']['ReportHost']
63
- reports.kind_of?(Array) ? reports : [reports]
64
- rescue StandardError => e
65
- raise "Invalid Nessus XML file provided Exception: #{e}"
66
- end
57
+ # When there are multiple hosts in the nessus report ReportHost field is an array
58
+ # When there is only one host in the nessus report ReportHost field is a hash
59
+ # Array() converts ReportHost to array in case there is only one host
60
+ reports = @data['NessusClientData_v2']['Report']['ReportHost']
61
+ reports.is_a?(Array) ? reports : [reports]
62
+ rescue StandardError => e
63
+ raise "Invalid Nessus XML file provided Exception: #{e}"
67
64
  end
68
65
 
69
66
  def parse_refs(refs, key)
@@ -71,24 +68,20 @@ module HeimdallTools
71
68
  end
72
69
 
73
70
  def extract_scaninfo
74
- begin
75
- policy = @data['NessusClientData_v2']['Policy']
76
- info = {}
71
+ policy = @data['NessusClientData_v2']['Policy']
72
+ info = {}
77
73
 
78
- info['policyName'] = policy['policyName']
79
- info['version'] = policy['Preferences']['ServerPreferences']['preference'].select {|x| x['name'].eql? 'sc_version'}.first['value']
80
- info
81
- rescue StandardError => e
82
- raise "Invalid Nessus XML file provided Exception: #{e}"
83
- end
74
+ info['policyName'] = policy['policyName']
75
+ info['version'] = policy['Preferences']['ServerPreferences']['preference'].select { |x| x['name'].eql? 'sc_version' }.first['value']
76
+ info
77
+ rescue StandardError => e
78
+ raise "Invalid Nessus XML file provided Exception: #{e}"
84
79
  end
85
80
 
86
81
  def extract_timestamp(report)
87
- begin
88
- timestamp = report['HostProperties']['tag'].select {|x| x['name'].eql? 'HOST_START'}.first['text']
89
- rescue StandardError => e
90
- raise "Invalid Nessus XML file provided Exception: #{e}"
91
- end
82
+ report['HostProperties']['tag'].select { |x| x['name'].eql? 'HOST_START' }.first['text']
83
+ rescue StandardError => e
84
+ raise "Invalid Nessus XML file provided Exception: #{e}"
92
85
  end
93
86
 
94
87
  def format_desc(issue)
@@ -129,7 +122,7 @@ module HeimdallTools
129
122
 
130
123
  def cci_nist_tag(cci_refs)
131
124
  nist_tags = []
132
- cci_refs.each do | cci_ref |
125
+ cci_refs.each do |cci_ref|
133
126
  item_node = @cci_xml.xpath("//cci_list/cci_items/cci_item[@id='#{cci_ref}']")[0] unless @cci_xml.nil?
134
127
  unless item_node.nil?
135
128
  nist_ref = item_node.xpath('./references/reference[not(@version <= preceding-sibling::reference/@version) and not(@version <=following-sibling::reference/@version)]/@index').text
@@ -140,7 +133,7 @@ module HeimdallTools
140
133
  end
141
134
 
142
135
  def plugin_nist_tag(pluginfamily, pluginid)
143
- entries = @cwe_nist_mapping.select { |x| (x[:pluginfamily].eql?(pluginfamily) && (x[:pluginid].eql?('*') || x[:pluginid].eql?(pluginid.to_i)) ) }
136
+ entries = @cwe_nist_mapping.select { |x| (x[:pluginfamily].eql?(pluginfamily) && (x[:pluginid].eql?('*') || x[:pluginid].eql?(pluginid.to_i))) && !x[:nistid].nil? }
144
137
  tags = entries.map { |x| [x[:nistid].split('|'), "Rev_#{x[:rev]}"] }
145
138
  tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
146
139
  end
@@ -148,15 +141,15 @@ module HeimdallTools
148
141
  def impact(severity)
149
142
  # Map CAT levels and Plugin severity to HDF impact levels
150
143
  case severity
151
- when "0"
144
+ when '0'
152
145
  IMPACT_MAPPING[:Info]
153
- when "1","III"
146
+ when '1', 'III'
154
147
  IMPACT_MAPPING[:Low]
155
- when "2","II"
148
+ when '2', 'II'
156
149
  IMPACT_MAPPING[:Medium]
157
- when "3","I"
150
+ when '3', 'I'
158
151
  IMPACT_MAPPING[:High]
159
- when "4"
152
+ when '4'
160
153
  IMPACT_MAPPING[:Critical]
161
154
  else
162
155
  -1
@@ -172,17 +165,17 @@ module HeimdallTools
172
165
  end
173
166
 
174
167
  def desc_tags(data, label)
175
- { "data": data || NA_STRING, "label": label || NA_STRING }
168
+ { data: data || NA_STRING, label: label || NA_STRING }
176
169
  end
177
170
 
178
171
  # Nessus report could have multiple issue entries for multiple findings of same issue type.
179
- # The meta data is identical across entries
172
+ # The meta data is identical across entries
180
173
  # method collapse_duplicates return unique controls with applicable findings collapsed into it.
181
174
  def collapse_duplicates(controls)
182
175
  unique_controls = []
183
176
 
184
177
  controls.map { |x| x['id'] }.uniq.each do |id|
185
- collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
178
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
186
179
  unique_control = controls.find { |x| x['id'].eql?(id) }
187
180
  unique_control['results'] = collapsed_results.flatten
188
181
  unique_controls << unique_control
@@ -192,9 +185,9 @@ module HeimdallTools
192
185
 
193
186
  def to_hdf
194
187
  host_results = {}
195
- @reports.each do | report|
188
+ @reports.each do |report|
196
189
  controls = []
197
- report['ReportItem'].each do | item |
190
+ report['ReportItem'].each do |item|
198
191
  printf("\rProcessing: %s", $spinner.next)
199
192
  @item = {}
200
193
  @item['tags'] = {}
@@ -207,7 +200,7 @@ module HeimdallTools
207
200
  # Current version covers STIG based 'Policy Compliance' results
208
201
  # TODO Cover cases for 'Policy Compliance' results based on CIS
209
202
  if item['compliance-reference']
210
- @item['id'] = parse_refs(item['compliance-reference'],'Vuln-ID').join.to_s
203
+ @item['id'] = parse_refs(item['compliance-reference'], 'Vuln-ID').join.to_s
211
204
  else
212
205
  @item['id'] = item['pluginID'].to_s
213
206
  end
@@ -222,17 +215,17 @@ module HeimdallTools
222
215
  @item['desc'] = format_desc(item).to_s
223
216
  end
224
217
  if item['compliance-reference']
225
- @item['impact'] = impact(parse_refs(item['compliance-reference'],'CAT').join.to_s)
218
+ @item['impact'] = impact(parse_refs(item['compliance-reference'], 'CAT').join.to_s)
226
219
  else
227
- @item['impact'] = impact(item['severity'])
220
+ @item['impact'] = impact(item['severity'])
228
221
  end
229
222
  if item['compliance-reference']
230
- @item['tags']['nist'] = cci_nist_tag(parse_refs(item['compliance-reference'],'CCI'))
223
+ @item['tags']['nist'] = cci_nist_tag(parse_refs(item['compliance-reference'], 'CCI'))
231
224
  else
232
- @item['tags']['nist'] = plugin_nist_tag(item['pluginFamily'],item['pluginID'])
225
+ @item['tags']['nist'] = plugin_nist_tag(item['pluginFamily'], item['pluginID'])
233
226
  end
234
227
  if item['compliance-solution']
235
- @item['descriptions'] << desc_tags(item['compliance-solution'], 'check')
228
+ @item['descriptions'] << desc_tags(item['compliance-solution'], 'check')
236
229
  end
237
230
 
238
231
  @item['code'] = ''