heimdall_tools 1.3.40 → 1.3.45
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +15 -0
- data/lib/data/aws-config-mapping.csv +107 -107
- data/lib/heimdall_tools/aws_config_mapper.rb +55 -37
- data/lib/heimdall_tools/burpsuite_mapper.rb +7 -12
- data/lib/heimdall_tools/cli.rb +9 -21
- data/lib/heimdall_tools/command.rb +0 -2
- data/lib/heimdall_tools/dbprotect_mapper.rb +13 -26
- data/lib/heimdall_tools/fortify_mapper.rb +2 -4
- data/lib/heimdall_tools/hdf.rb +4 -5
- data/lib/heimdall_tools/jfrog_xray_mapper.rb +26 -28
- data/lib/heimdall_tools/nessus_mapper.rb +41 -48
- data/lib/heimdall_tools/netsparker_mapper.rb +21 -28
- data/lib/heimdall_tools/nikto_mapper.rb +27 -28
- data/lib/heimdall_tools/snyk_mapper.rb +20 -23
- data/lib/heimdall_tools/sonarqube_mapper.rb +23 -21
- data/lib/heimdall_tools/zap_mapper.rb +4 -6
- data/lib/utilities/xml_to_hash.rb +6 -6
- metadata +39 -25
data/lib/heimdall_tools/cli.rb
CHANGED
@@ -6,7 +6,6 @@ module HeimdallTools
|
|
6
6
|
long_desc Help.text(:fortify_mapper)
|
7
7
|
option :fvdl, required: true, aliases: '-f'
|
8
8
|
option :output, required: true, aliases: '-o'
|
9
|
-
option :verbose, type: :boolean, aliases: '-V'
|
10
9
|
def fortify_mapper
|
11
10
|
hdf = HeimdallTools::FortifyMapper.new(File.read(options[:fvdl])).to_hdf
|
12
11
|
File.write(options[:output], hdf)
|
@@ -17,7 +16,6 @@ module HeimdallTools
|
|
17
16
|
option :json, required: true, aliases: '-j'
|
18
17
|
option :name, required: true, aliases: '-n'
|
19
18
|
option :output, required: true, aliases: '-o'
|
20
|
-
option :verbose, type: :boolean, aliases: '-V'
|
21
19
|
def zap_mapper
|
22
20
|
hdf = HeimdallTools::ZapMapper.new(File.read(options[:json]), options[:name]).to_hdf
|
23
21
|
File.write(options[:output], hdf)
|
@@ -29,7 +27,6 @@ module HeimdallTools
|
|
29
27
|
option :api_url, required: true, aliases: '-u'
|
30
28
|
option :auth, type: :string, required: false
|
31
29
|
option :output, required: true, aliases: '-o'
|
32
|
-
option :verbose, type: :boolean, aliases: '-V'
|
33
30
|
def sonarqube_mapper
|
34
31
|
hdf = HeimdallTools::SonarQubeMapper.new(options[:name], options[:api_url], options[:auth]).to_hdf
|
35
32
|
File.write(options[:output], hdf)
|
@@ -39,7 +36,6 @@ module HeimdallTools
|
|
39
36
|
long_desc Help.text(:burpsuite_mapper)
|
40
37
|
option :xml, required: true, aliases: '-x'
|
41
38
|
option :output, required: true, aliases: '-o'
|
42
|
-
option :verbose, type: :boolean, aliases: '-V'
|
43
39
|
def burpsuite_mapper
|
44
40
|
hdf = HeimdallTools::BurpSuiteMapper.new(File.read(options[:xml])).to_hdf
|
45
41
|
File.write(options[:output], hdf)
|
@@ -49,27 +45,24 @@ module HeimdallTools
|
|
49
45
|
long_desc Help.text(:nessus_mapper)
|
50
46
|
option :xml, required: true, aliases: '-x'
|
51
47
|
option :output_prefix, required: true, aliases: '-o'
|
52
|
-
option :verbose, type: :boolean, aliases: '-V'
|
53
48
|
def nessus_mapper
|
54
49
|
hdfs = HeimdallTools::NessusMapper.new(File.read(options[:xml])).to_hdf
|
55
50
|
|
56
51
|
puts "\nHDF Generated:"
|
57
|
-
hdfs.
|
52
|
+
hdfs.each_key do |host|
|
58
53
|
File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
|
59
54
|
puts "#{options[:output_prefix]}-#{host}.json"
|
60
55
|
end
|
61
|
-
|
62
56
|
end
|
63
57
|
|
64
58
|
desc 'snyk_mapper', 'snyk_mapper translates Snyk results Json to HDF format Json be viewed on Heimdall'
|
65
59
|
long_desc Help.text(:snyk_mapper)
|
66
60
|
option :json, required: true, aliases: '-j'
|
67
61
|
option :output_prefix, required: true, aliases: '-o'
|
68
|
-
option :verbose, type: :boolean, aliases: '-V'
|
69
62
|
def snyk_mapper
|
70
63
|
hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
|
71
64
|
puts "\r\HDF Generated:\n"
|
72
|
-
hdfs.
|
65
|
+
hdfs.each_key do |host|
|
73
66
|
File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
|
74
67
|
puts "#{options[:output_prefix]}-#{host}.json"
|
75
68
|
end
|
@@ -79,60 +72,55 @@ module HeimdallTools
|
|
79
72
|
long_desc Help.text(:nikto_mapper)
|
80
73
|
option :json, required: true, aliases: '-j'
|
81
74
|
option :output, required: true, aliases: '-o'
|
82
|
-
option :verbose, type: :boolean, aliases: '-V'
|
83
75
|
def nikto_mapper
|
84
76
|
hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
|
85
77
|
File.write(options[:output], hdf)
|
86
78
|
puts "\r\HDF Generated:\n"
|
87
|
-
puts
|
79
|
+
puts options[:output].to_s
|
88
80
|
end
|
89
81
|
|
90
82
|
desc 'jfrog_xray_mapper', 'jfrog_xray_mapper translates Jfrog Xray results Json to HDF format Json be viewed on Heimdall'
|
91
83
|
long_desc Help.text(:jfrog_xray_mapper)
|
92
84
|
option :json, required: true, aliases: '-j'
|
93
85
|
option :output, required: true, aliases: '-o'
|
94
|
-
option :verbose, type: :boolean, aliases: '-V'
|
95
86
|
def jfrog_xray_mapper
|
96
87
|
hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
|
97
88
|
File.write(options[:output], hdf)
|
98
89
|
puts "\r\HDF Generated:\n"
|
99
|
-
puts
|
90
|
+
puts options[:output].to_s
|
100
91
|
end
|
101
|
-
|
92
|
+
|
102
93
|
desc 'dbprotect_mapper', 'dbprotect_mapper translates dbprotect results xml to HDF format Json be viewed on Heimdall'
|
103
94
|
long_desc Help.text(:dbprotect_mapper)
|
104
95
|
option :xml, required: true, aliases: '-x'
|
105
96
|
option :output, required: true, aliases: '-o'
|
106
|
-
option :verbose, type: :boolean, aliases: '-V'
|
107
97
|
def dbprotect_mapper
|
108
98
|
hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
|
109
99
|
File.write(options[:output], hdf)
|
110
100
|
puts "\r\HDF Generated:\n"
|
111
|
-
puts
|
101
|
+
puts options[:output].to_s
|
112
102
|
end
|
113
103
|
|
114
104
|
desc 'aws_config_mapper', 'aws_config_mapper pulls Ruby AWS SDK data to translate AWS Config Rule results into HDF format Json to be viewable in Heimdall'
|
115
105
|
long_desc Help.text(:aws_config_mapper)
|
116
106
|
# option :custom_mapping, required: false, aliases: '-m'
|
117
107
|
option :output, required: true, aliases: '-o'
|
118
|
-
option :verbose, type: :boolean, aliases: '-V'
|
119
108
|
def aws_config_mapper
|
120
109
|
hdf = HeimdallTools::AwsConfigMapper.new(options[:custom_mapping]).to_hdf
|
121
110
|
File.write(options[:output], hdf)
|
122
111
|
puts "\r\HDF Generated:\n"
|
123
|
-
puts
|
112
|
+
puts options[:output].to_s
|
124
113
|
end
|
125
|
-
|
114
|
+
|
126
115
|
desc 'netsparker_mapper', 'netsparker_mapper translates netsparker enterprise results xml to HDF format Json be viewed on Heimdall'
|
127
116
|
long_desc Help.text(:netsparker_mapper)
|
128
117
|
option :xml, required: true, aliases: '-x'
|
129
118
|
option :output, required: true, aliases: '-o'
|
130
|
-
option :verbose, type: :boolean, aliases: '-V'
|
131
119
|
def netsparker_mapper
|
132
120
|
hdf = HeimdallTools::NetsparkerMapper.new(File.read(options[:xml])).to_hdf
|
133
121
|
File.write(options[:output], hdf)
|
134
122
|
puts "\r\HDF Generated:\n"
|
135
|
-
puts
|
123
|
+
puts options[:output].to_s
|
136
124
|
end
|
137
125
|
|
138
126
|
desc 'version', 'prints version'
|
@@ -10,21 +10,13 @@ IMPACT_MAPPING = {
|
|
10
10
|
Informational: 0.0
|
11
11
|
}.freeze
|
12
12
|
|
13
|
-
# rubocop:disable Metrics/AbcSize
|
14
|
-
|
15
13
|
module HeimdallTools
|
16
14
|
class DBProtectMapper
|
17
|
-
def initialize(xml,
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
@entries = compile_findings(dataset['dataset'])
|
23
|
-
|
24
|
-
rescue StandardError => e
|
25
|
-
raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
|
26
|
-
end
|
27
|
-
|
15
|
+
def initialize(xml, _name = nil)
|
16
|
+
dataset = xml_to_hash(xml)
|
17
|
+
@entries = compile_findings(dataset['dataset'])
|
18
|
+
rescue StandardError => e
|
19
|
+
raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
|
28
20
|
end
|
29
21
|
|
30
22
|
def to_hdf
|
@@ -46,7 +38,7 @@ module HeimdallTools
|
|
46
38
|
end
|
47
39
|
controls = collapse_duplicates(controls)
|
48
40
|
results = HeimdallDataFormat.new(profile_name: @entries.first['Policy'],
|
49
|
-
version:
|
41
|
+
version: '',
|
50
42
|
title: @entries.first['Job Name'],
|
51
43
|
summary: format_summary(@entries.first),
|
52
44
|
controls: controls)
|
@@ -56,16 +48,15 @@ module HeimdallTools
|
|
56
48
|
private
|
57
49
|
|
58
50
|
def compile_findings(dataset)
|
59
|
-
keys = dataset['metadata']['item'].map{ |e| e['name']}
|
60
|
-
|
61
|
-
findings
|
51
|
+
keys = dataset['metadata']['item'].map { |e| e['name'] }
|
52
|
+
dataset['data']['row'].map { |e| keys.zip(e['value']).to_h }
|
62
53
|
end
|
63
54
|
|
64
55
|
def format_desc(entry)
|
65
56
|
text = []
|
66
57
|
text << "Task : #{entry['Task']}"
|
67
58
|
text << "Check Category : #{entry['Check Category']}"
|
68
|
-
text.join(
|
59
|
+
text.join('; ')
|
69
60
|
end
|
70
61
|
|
71
62
|
def format_summary(entry)
|
@@ -90,14 +81,12 @@ module HeimdallTools
|
|
90
81
|
finding['status'] = 'skipped'
|
91
82
|
when 'Failed'
|
92
83
|
finding['status'] = 'failed'
|
93
|
-
finding['backtrace'] = [
|
84
|
+
finding['backtrace'] = ['DB Protect Failed Check']
|
94
85
|
when 'Finding'
|
95
86
|
finding['status'] = 'failed'
|
96
87
|
when 'Not A Finding'
|
97
88
|
finding['status'] = 'passed'
|
98
|
-
|
99
|
-
finding['status'] = 'skipped'
|
100
|
-
else
|
89
|
+
else
|
101
90
|
finding['status'] = 'skipped'
|
102
91
|
end
|
103
92
|
[finding]
|
@@ -108,20 +97,18 @@ module HeimdallTools
|
|
108
97
|
end
|
109
98
|
|
110
99
|
# DBProtect report could have multiple issue entries for multiple findings of same issue type.
|
111
|
-
# The meta data is identical across entries
|
100
|
+
# The meta data is identical across entries
|
112
101
|
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
113
102
|
def collapse_duplicates(controls)
|
114
103
|
unique_controls = []
|
115
104
|
|
116
105
|
controls.map { |x| x['id'] }.uniq.each do |id|
|
117
|
-
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
106
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
118
107
|
unique_control = controls.find { |x| x['id'].eql?(id) }
|
119
108
|
unique_control['results'] = collapsed_results.flatten
|
120
109
|
unique_controls << unique_control
|
121
110
|
end
|
122
111
|
unique_controls
|
123
112
|
end
|
124
|
-
|
125
|
-
|
126
113
|
end
|
127
114
|
end
|
@@ -3,13 +3,12 @@ require 'heimdall_tools/hdf'
|
|
3
3
|
require 'utilities/xml_to_hash'
|
4
4
|
|
5
5
|
NIST_REFERENCE_NAME = 'Standards Mapping - NIST Special Publication 800-53 Revision 4'.freeze
|
6
|
-
DEFAULT_NIST_TAG =
|
6
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
7
7
|
|
8
8
|
module HeimdallTools
|
9
9
|
class FortifyMapper
|
10
|
-
def initialize(fvdl
|
10
|
+
def initialize(fvdl)
|
11
11
|
@fvdl = fvdl
|
12
|
-
@verbose = verbose
|
13
12
|
|
14
13
|
begin
|
15
14
|
data = xml_to_hash(fvdl)
|
@@ -19,7 +18,6 @@ module HeimdallTools
|
|
19
18
|
@rules = data['FVDL']['Description']
|
20
19
|
@uuid = data['FVDL']['UUID']
|
21
20
|
@fortify_version = data['FVDL']['EngineData']['EngineVersion']
|
22
|
-
|
23
21
|
rescue StandardError => e
|
24
22
|
raise "Invalid Fortify FVDL file provided Exception: #{e}"
|
25
23
|
end
|
data/lib/heimdall_tools/hdf.rb
CHANGED
@@ -2,15 +2,14 @@ require 'json'
|
|
2
2
|
require 'heimdall_tools/version'
|
3
3
|
require 'openssl'
|
4
4
|
|
5
|
-
NA_STRING =
|
6
|
-
NA_TAG = nil
|
5
|
+
NA_STRING = ''.freeze
|
6
|
+
NA_TAG = nil
|
7
7
|
NA_ARRAY = [].freeze
|
8
8
|
NA_HASH = {}.freeze
|
9
|
-
NA_FLOAT = 0.0
|
9
|
+
NA_FLOAT = 0.0
|
10
10
|
|
11
11
|
PLATFORM_NAME = 'Heimdall Tools'.freeze
|
12
12
|
|
13
|
-
|
14
13
|
module HeimdallTools
|
15
14
|
class HeimdallDataFormat
|
16
15
|
def initialize(profile_name: NA_TAG,
|
@@ -60,7 +59,7 @@ module HeimdallTools
|
|
60
59
|
profile_block['groups'] = groups
|
61
60
|
profile_block['status'] = status
|
62
61
|
profile_block['controls'] = controls
|
63
|
-
profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).
|
62
|
+
profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).unpack1('H*')
|
64
63
|
@results_json['profiles'] << profile_block
|
65
64
|
end
|
66
65
|
|
@@ -10,10 +10,10 @@ CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
|
|
10
10
|
IMPACT_MAPPING = {
|
11
11
|
high: 0.7,
|
12
12
|
medium: 0.5,
|
13
|
-
low: 0.3
|
13
|
+
low: 0.3
|
14
14
|
}.freeze
|
15
15
|
|
16
|
-
DEFAULT_NIST_TAG =
|
16
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
17
17
|
|
18
18
|
# Loading spinner sign
|
19
19
|
$spinner = Enumerator.new do |e|
|
@@ -27,14 +27,12 @@ end
|
|
27
27
|
|
28
28
|
module HeimdallTools
|
29
29
|
class JfrogXrayMapper
|
30
|
-
def initialize(xray_json,
|
30
|
+
def initialize(xray_json, _name = nil)
|
31
31
|
@xray_json = xray_json
|
32
|
-
@verbose = verbose
|
33
32
|
|
34
33
|
begin
|
35
34
|
@cwe_nist_mapping = parse_mapper
|
36
35
|
@project = JSON.parse(xray_json)
|
37
|
-
|
38
36
|
rescue StandardError => e
|
39
37
|
raise "Invalid JFrog Xray JSON file provided Exception: #{e}"
|
40
38
|
end
|
@@ -44,11 +42,11 @@ module HeimdallTools
|
|
44
42
|
finding = {}
|
45
43
|
finding['status'] = 'failed'
|
46
44
|
finding['code_desc'] = []
|
47
|
-
finding['code_desc'] << "source_comp_id : #{vulnerability['source_comp_id']
|
48
|
-
finding['code_desc'] << "vulnerable_versions : #{vulnerability['component_versions']['vulnerable_versions']
|
49
|
-
finding['code_desc'] << "fixed_versions : #{vulnerability['component_versions']['fixed_versions']
|
50
|
-
finding['code_desc'] << "issue_type : #{vulnerability['issue_type']
|
51
|
-
finding['code_desc'] << "provider : #{vulnerability['provider']
|
45
|
+
finding['code_desc'] << "source_comp_id : #{vulnerability['source_comp_id']}"
|
46
|
+
finding['code_desc'] << "vulnerable_versions : #{vulnerability['component_versions']['vulnerable_versions']}"
|
47
|
+
finding['code_desc'] << "fixed_versions : #{vulnerability['component_versions']['fixed_versions']}"
|
48
|
+
finding['code_desc'] << "issue_type : #{vulnerability['issue_type']}"
|
49
|
+
finding['code_desc'] << "provider : #{vulnerability['provider']}"
|
52
50
|
finding['code_desc'] = finding['code_desc'].join("\n")
|
53
51
|
finding['run_time'] = NA_FLOAT
|
54
52
|
|
@@ -61,8 +59,8 @@ module HeimdallTools
|
|
61
59
|
text = []
|
62
60
|
info = vulnerability['component_versions']['more_details']
|
63
61
|
text << info['description'].to_s
|
64
|
-
text << "cves: #{info['cves']
|
65
|
-
text.join(
|
62
|
+
text << "cves: #{info['cves']}" unless info['cves'].nil?
|
63
|
+
text.join('<br>')
|
66
64
|
end
|
67
65
|
|
68
66
|
def nist_tag(cweid)
|
@@ -73,9 +71,9 @@ module HeimdallTools
|
|
73
71
|
|
74
72
|
def parse_identifiers(vulnerability, ref)
|
75
73
|
# Extracting id number from reference style CWE-297
|
76
|
-
vulnerability['component_versions']['more_details']['cves'][0][ref.downcase].map { |e| e.split("#{ref}-")[1]
|
77
|
-
|
78
|
-
|
74
|
+
vulnerability['component_versions']['more_details']['cves'][0][ref.downcase].map { |e| e.split("#{ref}-")[1] }
|
75
|
+
rescue StandardError
|
76
|
+
[]
|
79
77
|
end
|
80
78
|
|
81
79
|
def impact(severity)
|
@@ -91,17 +89,17 @@ module HeimdallTools
|
|
91
89
|
end
|
92
90
|
|
93
91
|
def desc_tags(data, label)
|
94
|
-
{
|
92
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
95
93
|
end
|
96
94
|
|
97
95
|
# Xray report could have multiple vulnerability entries for multiple findings of same issue type.
|
98
|
-
# The meta data is identical across entries
|
96
|
+
# The meta data is identical across entries
|
99
97
|
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
100
98
|
def collapse_duplicates(controls)
|
101
99
|
unique_controls = []
|
102
100
|
|
103
101
|
controls.map { |x| x['id'] }.uniq.each do |id|
|
104
|
-
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
102
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
105
103
|
unique_control = controls.find { |x| x['id'].eql?(id) }
|
106
104
|
unique_control['results'] = collapsed_results.flatten
|
107
105
|
unique_controls << unique_control
|
@@ -112,9 +110,9 @@ module HeimdallTools
|
|
112
110
|
def to_hdf
|
113
111
|
controls = []
|
114
112
|
vulnerability_count = 0
|
115
|
-
@project['data'].uniq.each do |
|
113
|
+
@project['data'].uniq.each do |vulnerability|
|
116
114
|
printf("\rProcessing: %s", $spinner.next)
|
117
|
-
|
115
|
+
|
118
116
|
vulnerability_count +=1
|
119
117
|
item = {}
|
120
118
|
item['tags'] = {}
|
@@ -123,26 +121,26 @@ module HeimdallTools
|
|
123
121
|
item['source_location'] = NA_HASH
|
124
122
|
item['descriptions'] = NA_ARRAY
|
125
123
|
|
126
|
-
# Xray JSONs might note have `id` fields populated.
|
124
|
+
# Xray JSONs might note have `id` fields populated.
|
127
125
|
# If thats a case MD5 hash is used to collapse vulnerability findings of the same type.
|
128
|
-
item['id'] = vulnerability['id'].empty? ? OpenSSL::Digest::MD5.digest(vulnerability['summary'].to_s).
|
126
|
+
item['id'] = vulnerability['id'].empty? ? OpenSSL::Digest::MD5.digest(vulnerability['summary'].to_s).unpack1('H*').to_s : vulnerability['id']
|
129
127
|
item['title'] = vulnerability['summary'].to_s
|
130
128
|
item['desc'] = format_control_desc(vulnerability)
|
131
|
-
item['impact'] = impact(vulnerability['severity'].to_s)
|
129
|
+
item['impact'] = impact(vulnerability['severity'].to_s)
|
132
130
|
item['code'] = NA_STRING
|
133
131
|
item['results'] = finding(vulnerability)
|
134
132
|
|
135
|
-
item['tags']['nist'] = nist_tag(
|
136
|
-
item['tags']['cweid'] = parse_identifiers(
|
133
|
+
item['tags']['nist'] = nist_tag(parse_identifiers(vulnerability, 'CWE'))
|
134
|
+
item['tags']['cweid'] = parse_identifiers(vulnerability, 'CWE')
|
137
135
|
|
138
136
|
controls << item
|
139
137
|
end
|
140
138
|
|
141
139
|
controls = collapse_duplicates(controls)
|
142
|
-
results = HeimdallDataFormat.new(profile_name:
|
140
|
+
results = HeimdallDataFormat.new(profile_name: 'JFrog Xray Scan',
|
143
141
|
version: NA_STRING,
|
144
|
-
title:
|
145
|
-
summary:
|
142
|
+
title: 'JFrog Xray Scan',
|
143
|
+
summary: 'Continuous Security and Universal Artifact Analysis',
|
146
144
|
controls: controls)
|
147
145
|
results.to_hdf
|
148
146
|
end
|
@@ -6,7 +6,7 @@ require 'nokogiri'
|
|
6
6
|
|
7
7
|
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
8
8
|
|
9
|
-
NESSUS_PLUGINS_NIST_MAPPING_FILE =
|
9
|
+
NESSUS_PLUGINS_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'nessus-plugins-nist-mapping.csv')
|
10
10
|
U_CCI_LIST = File.join(RESOURCE_DIR, 'U_CCI_List.xml')
|
11
11
|
|
12
12
|
IMPACT_MAPPING = {
|
@@ -14,16 +14,16 @@ IMPACT_MAPPING = {
|
|
14
14
|
Low: 0.3,
|
15
15
|
Medium: 0.5,
|
16
16
|
High: 0.7,
|
17
|
-
Critical: 0.9
|
17
|
+
Critical: 0.9
|
18
18
|
}.freeze
|
19
19
|
|
20
|
-
DEFAULT_NIST_TAG = [
|
20
|
+
DEFAULT_NIST_TAG = ['unmapped'].freeze
|
21
21
|
|
22
22
|
# Nessus results file 800-53 refs does not contain Nist rev version. Using this default
|
23
23
|
# version in that case
|
24
24
|
DEFAULT_NIST_REV = 'Rev_4'.freeze
|
25
25
|
|
26
|
-
NA_PLUGIN_OUTPUT =
|
26
|
+
NA_PLUGIN_OUTPUT = 'This Nessus Plugin does not provide output message.'.freeze
|
27
27
|
|
28
28
|
# rubocop:disable Metrics/AbcSize
|
29
29
|
|
@@ -39,9 +39,8 @@ end
|
|
39
39
|
|
40
40
|
module HeimdallTools
|
41
41
|
class NessusMapper
|
42
|
-
def initialize(nessus_xml
|
42
|
+
def initialize(nessus_xml)
|
43
43
|
@nessus_xml = nessus_xml
|
44
|
-
@verbose = verbose
|
45
44
|
read_cci_xml
|
46
45
|
begin
|
47
46
|
@cwe_nist_mapping = parse_mapper
|
@@ -51,19 +50,16 @@ module HeimdallTools
|
|
51
50
|
rescue StandardError => e
|
52
51
|
raise "Invalid Nessus XML file provided Exception: #{e}"
|
53
52
|
end
|
54
|
-
|
55
53
|
end
|
56
54
|
|
57
55
|
def extract_report
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
raise "Invalid Nessus XML file provided Exception: #{e}"
|
66
|
-
end
|
56
|
+
# When there are multiple hosts in the nessus report ReportHost field is an array
|
57
|
+
# When there is only one host in the nessus report ReportHost field is a hash
|
58
|
+
# Array() converts ReportHost to array in case there is only one host
|
59
|
+
reports = @data['NessusClientData_v2']['Report']['ReportHost']
|
60
|
+
reports.is_a?(Array) ? reports : [reports]
|
61
|
+
rescue StandardError => e
|
62
|
+
raise "Invalid Nessus XML file provided Exception: #{e}"
|
67
63
|
end
|
68
64
|
|
69
65
|
def parse_refs(refs, key)
|
@@ -71,24 +67,21 @@ module HeimdallTools
|
|
71
67
|
end
|
72
68
|
|
73
69
|
def extract_scaninfo
|
74
|
-
|
75
|
-
|
76
|
-
info = {}
|
70
|
+
policy = @data['NessusClientData_v2']['Policy']
|
71
|
+
info = {}
|
77
72
|
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
73
|
+
info['policyName'] = policy['policyName']
|
74
|
+
scanner_version = policy['Preferences']['ServerPreferences']['preference'].select { |x| x['name'].eql? 'sc_version' }
|
75
|
+
info['version'] = scanner_version.empty? ? NA_STRING : scanner_version.first['value']
|
76
|
+
info
|
77
|
+
rescue StandardError => e
|
78
|
+
raise "Invalid Nessus XML file provided Exception: #{e}"
|
84
79
|
end
|
85
80
|
|
86
81
|
def extract_timestamp(report)
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
raise "Invalid Nessus XML file provided Exception: #{e}"
|
91
|
-
end
|
82
|
+
report['HostProperties']['tag'].select { |x| x['name'].eql? 'HOST_START' }.first['text']
|
83
|
+
rescue StandardError => e
|
84
|
+
raise "Invalid Nessus XML file provided Exception: #{e}"
|
92
85
|
end
|
93
86
|
|
94
87
|
def format_desc(issue)
|
@@ -129,7 +122,7 @@ module HeimdallTools
|
|
129
122
|
|
130
123
|
def cci_nist_tag(cci_refs)
|
131
124
|
nist_tags = []
|
132
|
-
cci_refs.each do |
|
125
|
+
cci_refs.each do |cci_ref|
|
133
126
|
item_node = @cci_xml.xpath("//cci_list/cci_items/cci_item[@id='#{cci_ref}']")[0] unless @cci_xml.nil?
|
134
127
|
unless item_node.nil?
|
135
128
|
nist_ref = item_node.xpath('./references/reference[not(@version <= preceding-sibling::reference/@version) and not(@version <=following-sibling::reference/@version)]/@index').text
|
@@ -140,7 +133,7 @@ module HeimdallTools
|
|
140
133
|
end
|
141
134
|
|
142
135
|
def plugin_nist_tag(pluginfamily, pluginid)
|
143
|
-
entries = @cwe_nist_mapping.select { |x| (x[:pluginfamily].eql?(pluginfamily) && (x[:pluginid].eql?('*') || x[:pluginid].eql?(pluginid.to_i))
|
136
|
+
entries = @cwe_nist_mapping.select { |x| (x[:pluginfamily].eql?(pluginfamily) && (x[:pluginid].eql?('*') || x[:pluginid].eql?(pluginid.to_i))) && !x[:nistid].nil? }
|
144
137
|
tags = entries.map { |x| [x[:nistid].split('|'), "Rev_#{x[:rev]}"] }
|
145
138
|
tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
146
139
|
end
|
@@ -148,15 +141,15 @@ module HeimdallTools
|
|
148
141
|
def impact(severity)
|
149
142
|
# Map CAT levels and Plugin severity to HDF impact levels
|
150
143
|
case severity
|
151
|
-
when
|
144
|
+
when '0'
|
152
145
|
IMPACT_MAPPING[:Info]
|
153
|
-
when
|
146
|
+
when '1', 'III'
|
154
147
|
IMPACT_MAPPING[:Low]
|
155
|
-
when
|
148
|
+
when '2', 'II'
|
156
149
|
IMPACT_MAPPING[:Medium]
|
157
|
-
when
|
150
|
+
when '3', 'I'
|
158
151
|
IMPACT_MAPPING[:High]
|
159
|
-
when
|
152
|
+
when '4'
|
160
153
|
IMPACT_MAPPING[:Critical]
|
161
154
|
else
|
162
155
|
-1
|
@@ -172,17 +165,17 @@ module HeimdallTools
|
|
172
165
|
end
|
173
166
|
|
174
167
|
def desc_tags(data, label)
|
175
|
-
{
|
168
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
176
169
|
end
|
177
170
|
|
178
171
|
# Nessus report could have multiple issue entries for multiple findings of same issue type.
|
179
|
-
# The meta data is identical across entries
|
172
|
+
# The meta data is identical across entries
|
180
173
|
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
181
174
|
def collapse_duplicates(controls)
|
182
175
|
unique_controls = []
|
183
176
|
|
184
177
|
controls.map { |x| x['id'] }.uniq.each do |id|
|
185
|
-
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
178
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
186
179
|
unique_control = controls.find { |x| x['id'].eql?(id) }
|
187
180
|
unique_control['results'] = collapsed_results.flatten
|
188
181
|
unique_controls << unique_control
|
@@ -192,9 +185,9 @@ module HeimdallTools
|
|
192
185
|
|
193
186
|
def to_hdf
|
194
187
|
host_results = {}
|
195
|
-
@reports.each do |
|
188
|
+
@reports.each do |report|
|
196
189
|
controls = []
|
197
|
-
report['ReportItem'].each do |
|
190
|
+
report['ReportItem'].each do |item|
|
198
191
|
printf("\rProcessing: %s", $spinner.next)
|
199
192
|
@item = {}
|
200
193
|
@item['tags'] = {}
|
@@ -207,7 +200,7 @@ module HeimdallTools
|
|
207
200
|
# Current version covers STIG based 'Policy Compliance' results
|
208
201
|
# TODO Cover cases for 'Policy Compliance' results based on CIS
|
209
202
|
if item['compliance-reference']
|
210
|
-
@item['id'] = parse_refs(item['compliance-reference'],'Vuln-ID').join.to_s
|
203
|
+
@item['id'] = parse_refs(item['compliance-reference'], 'Vuln-ID').join.to_s
|
211
204
|
else
|
212
205
|
@item['id'] = item['pluginID'].to_s
|
213
206
|
end
|
@@ -222,17 +215,17 @@ module HeimdallTools
|
|
222
215
|
@item['desc'] = format_desc(item).to_s
|
223
216
|
end
|
224
217
|
if item['compliance-reference']
|
225
|
-
@item['impact'] = impact(parse_refs(item['compliance-reference'],'CAT').join.to_s)
|
218
|
+
@item['impact'] = impact(parse_refs(item['compliance-reference'], 'CAT').join.to_s)
|
226
219
|
else
|
227
|
-
@item['impact'] = impact(item['severity'])
|
220
|
+
@item['impact'] = impact(item['severity'])
|
228
221
|
end
|
229
222
|
if item['compliance-reference']
|
230
|
-
@item['tags']['nist'] = cci_nist_tag(parse_refs(item['compliance-reference'],'CCI'))
|
223
|
+
@item['tags']['nist'] = cci_nist_tag(parse_refs(item['compliance-reference'], 'CCI'))
|
231
224
|
else
|
232
|
-
@item['tags']['nist'] = plugin_nist_tag(item['pluginFamily'],item['pluginID'])
|
225
|
+
@item['tags']['nist'] = plugin_nist_tag(item['pluginFamily'], item['pluginID'])
|
233
226
|
end
|
234
227
|
if item['compliance-solution']
|
235
|
-
@item['descriptions'] <<
|
228
|
+
@item['descriptions'] << desc_tags(item['compliance-solution'], 'check')
|
236
229
|
end
|
237
230
|
|
238
231
|
@item['code'] = ''
|