heimdall_tools 1.3.46 → 1.3.50
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +209 -217
- data/lib/heimdall_tools/asff_compatible_products/firewall_manager.rb +11 -0
- data/lib/heimdall_tools/asff_compatible_products/prowler.rb +19 -0
- data/lib/heimdall_tools/asff_compatible_products/securityhub.rb +89 -0
- data/lib/heimdall_tools/asff_mapper.rb +232 -0
- data/lib/heimdall_tools/aws_config_mapper.rb +1 -1
- data/lib/heimdall_tools/cli.rb +39 -7
- data/lib/heimdall_tools/fortify_mapper.rb +3 -3
- data/lib/heimdall_tools/help/asff_mapper.md +6 -0
- data/lib/heimdall_tools/help/prowler_mapper.md +5 -0
- data/lib/heimdall_tools/nessus_mapper.rb +14 -6
- data/lib/heimdall_tools/prowler_mapper.rb +8 -0
- data/lib/heimdall_tools/sonarqube_mapper.rb +5 -1
- data/lib/heimdall_tools/xccdf_results_mapper.rb +161 -0
- data/lib/heimdall_tools.rb +4 -0
- metadata +41 -4
@@ -0,0 +1,89 @@
|
|
1
|
+
require 'csv'
|
2
|
+
require 'json'
|
3
|
+
|
4
|
+
module HeimdallTools
|
5
|
+
class SecurityHub
|
6
|
+
private_class_method def self.corresponding_control(controls, finding)
|
7
|
+
controls.find { |c| c['StandardsControlArn'] == finding['ProductFields']['StandardsControlArn'] }
|
8
|
+
end
|
9
|
+
|
10
|
+
def self.supporting_docs(standards:)
|
11
|
+
begin
|
12
|
+
controls = standards.nil? ? nil : standards.map { |s| JSON.parse(s)['Controls'] }.flatten
|
13
|
+
rescue StandardError => e
|
14
|
+
raise "Invalid supporting docs for Security Hub:\nException: #{e}"
|
15
|
+
end
|
16
|
+
|
17
|
+
begin
|
18
|
+
resource_dir = Pathname.new(__FILE__).join('../../../data')
|
19
|
+
aws_config_mapping_file = File.join(resource_dir, 'aws-config-mapping.csv')
|
20
|
+
aws_config_mapping = CSV.read(aws_config_mapping_file, { encoding: 'UTF-8', headers: true, header_converters: :symbol }).map(&:to_hash)
|
21
|
+
rescue StandardError => e
|
22
|
+
raise "Invalid AWS Config mapping file:\nException: #{e}"
|
23
|
+
end
|
24
|
+
|
25
|
+
{ controls: controls, aws_config_mapping: aws_config_mapping }
|
26
|
+
end
|
27
|
+
|
28
|
+
def self.finding_id(finding, *, encode:, controls: nil, **)
|
29
|
+
ret = if !controls.nil? && !(control = corresponding_control(controls, finding)).nil?
|
30
|
+
control['ControlId']
|
31
|
+
elsif finding['ProductFields'].member?('ControlId') # check if aws
|
32
|
+
finding['ProductFields']['ControlId']
|
33
|
+
elsif finding['ProductFields'].member?('RuleId') # check if cis
|
34
|
+
finding['ProductFields']['RuleId']
|
35
|
+
else
|
36
|
+
finding['GeneratorId'].split('/')[-1]
|
37
|
+
end
|
38
|
+
encode.call(ret)
|
39
|
+
end
|
40
|
+
|
41
|
+
def self.finding_impact(finding, *, controls: nil, **)
|
42
|
+
if !controls.nil? && !(control = corresponding_control(controls, finding)).nil?
|
43
|
+
imp = control['SeverityRating'].to_sym
|
44
|
+
else
|
45
|
+
# severity is required, but can be either 'label' or 'normalized' internally with 'label' being preferred. other values can be in here too such as the original severity rating.
|
46
|
+
imp = finding['Severity'].key?('Label') ? finding['Severity']['Label'].to_sym : finding['Severity']['Normalized']/100.0
|
47
|
+
# securityhub asff file does not contain accurate severity information by setting things that shouldn't be informational to informational: when additional context, i.e. standards, is not provided, set informational to medium.
|
48
|
+
imp = :MEDIUM if imp.is_a?(Symbol) && imp == :INFORMATIONAL
|
49
|
+
end
|
50
|
+
imp
|
51
|
+
end
|
52
|
+
|
53
|
+
def self.finding_nist_tag(finding, *, aws_config_mapping:, **)
|
54
|
+
return {} unless finding['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule'
|
55
|
+
|
56
|
+
entries = aws_config_mapping.select { |rule| finding['ProductFields']['RelatedAWSResources:0/name'].include? rule[:awsconfigrulename] }
|
57
|
+
entries.map do |rule|
|
58
|
+
tags_joined = rule[:nistid].split('|') # subheadings are joined together in the csv file
|
59
|
+
tags_joined.map do |tag|
|
60
|
+
if (i = tag.index('(')).nil?
|
61
|
+
tag
|
62
|
+
else
|
63
|
+
tag[i..-1].scan(/\(.+?\)/).map { |subheading| "#{tag[0..i-1]}#{subheading}" }
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end.flatten.uniq
|
67
|
+
end
|
68
|
+
|
69
|
+
def self.finding_title(finding, *, encode:, controls: nil, **)
|
70
|
+
ret = if !controls.nil? && !(control = corresponding_control(controls, finding)).nil?
|
71
|
+
control['Title']
|
72
|
+
else
|
73
|
+
finding['Title']
|
74
|
+
end
|
75
|
+
encode.call(ret)
|
76
|
+
end
|
77
|
+
|
78
|
+
def self.product_name(findings, *, encode:, **)
|
79
|
+
# "#{findings[0]['ProductFields']['aws/securityhub/CompanyName']} #{findings[0]['ProductFields']['aws/securityhub/ProductName']}"
|
80
|
+
# not using above due to wanting to provide the standard's name instead
|
81
|
+
if findings[0]['Types'][0].split('/')[-1].gsub(/-/, ' ').downcase == findings[0]['ProductFields']['StandardsControlArn'].split('/')[-4].gsub(/-/, ' ').downcase
|
82
|
+
standardname = findings[0]['Types'][0].split('/')[-1].gsub(/-/, ' ')
|
83
|
+
else
|
84
|
+
standardname = findings[0]['ProductFields']['StandardsControlArn'].split('/')[-4].gsub(/-/, ' ').split.map(&:capitalize).join(' ')
|
85
|
+
end
|
86
|
+
encode.call("#{standardname} v#{findings[0]['ProductFields']['StandardsControlArn'].split('/')[-2]}")
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
@@ -0,0 +1,232 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'set'
|
3
|
+
|
4
|
+
require 'htmlentities'
|
5
|
+
|
6
|
+
require 'heimdall_tools/hdf'
|
7
|
+
require 'heimdall_tools/asff_compatible_products/firewall_manager'
|
8
|
+
require 'heimdall_tools/asff_compatible_products/prowler'
|
9
|
+
require 'heimdall_tools/asff_compatible_products/securityhub'
|
10
|
+
|
11
|
+
module HeimdallTools
|
12
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
13
|
+
|
14
|
+
INSPEC_INPUTS_MAPPING = {
|
15
|
+
string: 'String',
|
16
|
+
numeric: 'Numeric',
|
17
|
+
regexp: 'Regexp',
|
18
|
+
array: 'Array',
|
19
|
+
hash: 'Hash',
|
20
|
+
boolean: 'Boolean',
|
21
|
+
any: 'Any'
|
22
|
+
}.freeze
|
23
|
+
|
24
|
+
# Loading spinner sign
|
25
|
+
$spinner = Enumerator.new do |e|
|
26
|
+
loop do
|
27
|
+
e.yield '|'
|
28
|
+
e.yield '/'
|
29
|
+
e.yield '-'
|
30
|
+
e.yield '\\'
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
# TODO: use hash.dig and safe navigation operator throughout
|
35
|
+
class ASFFMapper
|
36
|
+
IMPACT_MAPPING = {
|
37
|
+
CRITICAL: 0.9,
|
38
|
+
HIGH: 0.7,
|
39
|
+
MEDIUM: 0.5,
|
40
|
+
LOW: 0.3,
|
41
|
+
INFORMATIONAL: 0.0
|
42
|
+
}.freeze
|
43
|
+
|
44
|
+
PRODUCT_ARN_MAPPING = {
|
45
|
+
%r{arn:.+:securityhub:.+:.*:product/aws/firewall-manager} => FirewallManager,
|
46
|
+
%r{arn:.+:securityhub:.+:.*:product/aws/securityhub} => SecurityHub,
|
47
|
+
%r{arn:.+:securityhub:.+:.*:product/prowler/prowler} => Prowler
|
48
|
+
}.freeze
|
49
|
+
|
50
|
+
def initialize(asff_json, securityhub_standards_json_array: nil, meta: nil)
|
51
|
+
@meta = meta
|
52
|
+
|
53
|
+
@supporting_docs = {}
|
54
|
+
@supporting_docs[SecurityHub] = SecurityHub.supporting_docs({ standards: securityhub_standards_json_array })
|
55
|
+
|
56
|
+
begin
|
57
|
+
asff_required_keys = %w{AwsAccountId CreatedAt Description GeneratorId Id ProductArn Resources SchemaVersion Severity Title Types UpdatedAt}
|
58
|
+
@report = JSON.parse(asff_json)
|
59
|
+
if @report.length == 1 && @report.member?('Findings') && @report['Findings'].each { |finding| asff_required_keys.to_set.difference(finding.keys.to_set).none? }.all?
|
60
|
+
# ideal case that is spec compliant
|
61
|
+
# might need to ensure that the file is utf-8 encoded and remove a BOM if one exists
|
62
|
+
elsif asff_required_keys.to_set.difference(@report.keys.to_set).none?
|
63
|
+
# individual finding so have to add wrapping array
|
64
|
+
@report = { 'Findings' => [@report] }
|
65
|
+
else
|
66
|
+
raise 'Not a findings file nor an individual finding'
|
67
|
+
end
|
68
|
+
rescue StandardError => e
|
69
|
+
raise "Invalid ASFF file provided:\nException: #{e}"
|
70
|
+
end
|
71
|
+
|
72
|
+
@coder = HTMLEntities.new
|
73
|
+
end
|
74
|
+
|
75
|
+
def encode(string)
|
76
|
+
@coder.encode(string, :basic, :named, :decimal)
|
77
|
+
end
|
78
|
+
|
79
|
+
def external_product_handler(product, data, func, default)
|
80
|
+
if (product.is_a?(Regexp) || (arn = PRODUCT_ARN_MAPPING.keys.find { |a| product.match(a) })) && PRODUCT_ARN_MAPPING.key?(arn || product) && PRODUCT_ARN_MAPPING[arn || product].respond_to?(func)
|
81
|
+
keywords = { encode: method(:encode) }
|
82
|
+
keywords = keywords.merge(@supporting_docs[PRODUCT_ARN_MAPPING[arn || product]]) if @supporting_docs.member?(PRODUCT_ARN_MAPPING[arn || product])
|
83
|
+
PRODUCT_ARN_MAPPING[arn || product].send(func, data, **keywords)
|
84
|
+
elsif default.is_a? Proc
|
85
|
+
default.call
|
86
|
+
else
|
87
|
+
default
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
def nist_tag(finding)
|
92
|
+
tags = external_product_handler(finding['ProductArn'], finding, :finding_nist_tag, {})
|
93
|
+
tags.empty? ? DEFAULT_NIST_TAG : tags
|
94
|
+
end
|
95
|
+
|
96
|
+
def impact(finding)
|
97
|
+
# there can be findings listed that are intentionally ignored due to the underlying control being superceded by a control from a different standard
|
98
|
+
if finding.member?('Workflow') && finding['Workflow'].member?('Status') && finding['Workflow']['Status'] == 'SUPPRESSED'
|
99
|
+
imp = :INFORMATIONAL
|
100
|
+
else
|
101
|
+
# severity is required, but can be either 'label' or 'normalized' internally with 'label' being preferred. other values can be in here too such as the original severity rating.
|
102
|
+
default = proc { finding['Severity'].key?('Label') ? finding['Severity']['Label'].to_sym : finding['Severity']['Normalized']/100.0 }
|
103
|
+
imp = external_product_handler(finding['ProductArn'], finding, :finding_impact, default)
|
104
|
+
end
|
105
|
+
imp.is_a?(Symbol) ? IMPACT_MAPPING[imp] : imp
|
106
|
+
end
|
107
|
+
|
108
|
+
def desc_tags(data, label)
|
109
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
110
|
+
end
|
111
|
+
|
112
|
+
def subfindings(finding)
|
113
|
+
subfinding = {}
|
114
|
+
|
115
|
+
statusreason = finding['Compliance']['StatusReasons'].map { |reason| reason.flatten.map { |string| encode(string) } }.flatten.join("\n") if finding.key?('Compliance') && finding['Compliance'].key?('StatusReasons')
|
116
|
+
if finding.key?('Compliance') && finding['Compliance'].key?('Status')
|
117
|
+
case finding['Compliance']['Status']
|
118
|
+
when 'PASSED'
|
119
|
+
subfinding['status'] = 'passed'
|
120
|
+
subfinding['message'] = statusreason if statusreason
|
121
|
+
when 'WARNING'
|
122
|
+
subfinding['status'] = 'skipped'
|
123
|
+
subfinding['skip_message'] = statusreason if statusreason
|
124
|
+
when 'FAILED'
|
125
|
+
subfinding['status'] = 'failed'
|
126
|
+
subfinding['message'] = statusreason if statusreason
|
127
|
+
when 'NOT_AVAILABLE'
|
128
|
+
# primary meaning is that the check could not be performed due to a service outage or API error, but it's also overloaded to mean NOT_APPLICABLE so technically 'skipped' or 'error' could be applicable, but AWS seems to do the equivalent of skipped
|
129
|
+
subfinding['status'] = 'skipped'
|
130
|
+
subfinding['skip_message'] = statusreason if statusreason
|
131
|
+
else
|
132
|
+
subfinding['status'] = 'error' # not a valid value for the status enum
|
133
|
+
subfinding['message'] = statusreason if statusreason
|
134
|
+
end
|
135
|
+
else
|
136
|
+
subfinding['status'] = 'skipped' # if no compliance status is provided which is a weird but possible case, then skip
|
137
|
+
subfinding['skip_message'] = statusreason if statusreason
|
138
|
+
end
|
139
|
+
|
140
|
+
subfinding['code_desc'] = external_product_handler(finding['ProductArn'], finding, :subfindings_code_desc, '')
|
141
|
+
subfinding['code_desc'] += '; ' unless subfinding['code_desc'].empty?
|
142
|
+
subfinding['code_desc'] += "Resources: [#{finding['Resources'].map { |r| "Type: #{encode(r['Type'])}, Id: #{encode(r['Id'])}#{", Partition: #{encode(r['Partition'])}" if r.key?('Partition')}#{", Region: #{encode(r['Region'])}" if r.key?('Region')}" }.join(', ')}]"
|
143
|
+
|
144
|
+
subfinding['start_time'] = finding.key?('LastObservedAt') ? finding['LastObservedAt'] : finding['UpdatedAt']
|
145
|
+
|
146
|
+
[subfinding]
|
147
|
+
end
|
148
|
+
|
149
|
+
def to_hdf
|
150
|
+
product_groups = {}
|
151
|
+
@report['Findings'].each do |finding|
|
152
|
+
printf("\rProcessing: %s", $spinner.next)
|
153
|
+
|
154
|
+
external = method(:external_product_handler).curry(4)[finding['ProductArn']][finding]
|
155
|
+
|
156
|
+
# group subfindings by asff productarn and then hdf id
|
157
|
+
item = {}
|
158
|
+
item['id'] = external[:finding_id][encode(finding['GeneratorId'])]
|
159
|
+
|
160
|
+
item['title'] = external[:finding_title][encode(finding['Title'])]
|
161
|
+
|
162
|
+
item['tags'] = { nist: nist_tag(finding) }
|
163
|
+
|
164
|
+
item['impact'] = impact(finding)
|
165
|
+
|
166
|
+
item['desc'] = encode(finding['Description'])
|
167
|
+
|
168
|
+
item['descriptions'] = []
|
169
|
+
item['descriptions'] << desc_tags(finding['Remediation']['Recommendation'].map { |_k, v| encode(v) }.join("\n"), 'fix') if finding.key?('Remediation') && finding['Remediation'].key?('Recommendation')
|
170
|
+
|
171
|
+
item['refs'] = []
|
172
|
+
item['refs'] << { url: finding['SourceUrl'] } if finding.key?('SourceUrl')
|
173
|
+
|
174
|
+
item['source_location'] = NA_HASH
|
175
|
+
|
176
|
+
item['results'] = subfindings(finding)
|
177
|
+
|
178
|
+
arn = PRODUCT_ARN_MAPPING.keys.find { |a| finding['ProductArn'].match(a) }
|
179
|
+
if arn.nil?
|
180
|
+
product_info = finding['ProductArn'].split(':')[-1]
|
181
|
+
arn = Regexp.new "arn:.+:securityhub:.+:.*:product/#{product_info.split('/')[1]}/#{product_info.split('/')[2]}"
|
182
|
+
end
|
183
|
+
product_groups[arn] = {} if product_groups[arn].nil?
|
184
|
+
product_groups[arn][item['id']] = [] if product_groups[arn][item['id']].nil?
|
185
|
+
product_groups[arn][item['id']] << [item, finding]
|
186
|
+
end
|
187
|
+
|
188
|
+
controls = []
|
189
|
+
product_groups.each do |product, id_groups|
|
190
|
+
id_groups.each do |id, data|
|
191
|
+
printf("\rProcessing: %s", $spinner.next)
|
192
|
+
|
193
|
+
external = method(:external_product_handler).curry(4)[product]
|
194
|
+
|
195
|
+
group = data.map { |d| d[0] }
|
196
|
+
findings = data.map { |d| d[1] }
|
197
|
+
|
198
|
+
product_info = findings[0]['ProductArn'].split(':')[-1].split('/')
|
199
|
+
product_name = external[findings][:product_name][encode("#{product_info[1]}/#{product_info[2]}")]
|
200
|
+
|
201
|
+
item = {}
|
202
|
+
# add product name to id if any ids are the same across products
|
203
|
+
item['id'] = product_groups.reject { |pg| pg == product }.values.any? { |ig| ig.keys.include?(id) } ? "[#{product_name}] #{id}" : id
|
204
|
+
|
205
|
+
item['title'] = "#{product_name}: #{group.map { |d| d['title'] }.uniq.join(';')}"
|
206
|
+
|
207
|
+
item['tags'] = { nist: group.map { |d| d['tags'][:nist] }.flatten.uniq }
|
208
|
+
|
209
|
+
item['impact'] = group.map { |d| d['impact'] }.max
|
210
|
+
|
211
|
+
item['desc'] = external[group][:desc][group.map { |d| d['desc'] }.uniq.join("\n")]
|
212
|
+
|
213
|
+
item['descriptions'] = group.map { |d| d['descriptions'] }.flatten.compact.reject(&:empty?).uniq
|
214
|
+
|
215
|
+
item['refs'] = group.map { |d| d['refs'] }.flatten.compact.reject(&:empty?).uniq
|
216
|
+
|
217
|
+
item['source_location'] = NA_HASH
|
218
|
+
item['code'] = JSON.pretty_generate({ Findings: findings })
|
219
|
+
|
220
|
+
item['results'] = group.map { |d| d['results'] }.flatten.uniq
|
221
|
+
|
222
|
+
controls << item
|
223
|
+
end
|
224
|
+
end
|
225
|
+
|
226
|
+
results = HeimdallDataFormat.new(profile_name: @meta&.key?('name') ? @meta['name'] : 'AWS Security Finding Format',
|
227
|
+
title: @meta&.key?('title') ? @meta['title'] : 'ASFF findings',
|
228
|
+
controls: controls)
|
229
|
+
results.to_hdf
|
230
|
+
end
|
231
|
+
end
|
232
|
+
end
|
@@ -8,7 +8,7 @@ RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
|
8
8
|
AWS_CONFIG_MAPPING_FILE = File.join(RESOURCE_DIR, 'aws-config-mapping.csv')
|
9
9
|
|
10
10
|
NOT_APPLICABLE_MSG = 'No AWS resources found to evaluate complaince for this rule'.freeze
|
11
|
-
INSUFFICIENT_DATA_MSG = 'Not enough data has been
|
11
|
+
INSUFFICIENT_DATA_MSG = 'Not enough data has been collected to determine compliance yet.'.freeze
|
12
12
|
|
13
13
|
##
|
14
14
|
# HDF mapper for use with AWS Config rules.
|
data/lib/heimdall_tools/cli.rb
CHANGED
@@ -41,6 +41,15 @@ module HeimdallTools
|
|
41
41
|
File.write(options[:output], hdf)
|
42
42
|
end
|
43
43
|
|
44
|
+
desc 'xccdf_results_mapper', 'xccdf_results_mapper translates SCAP client XCCDF-Results XML report to HDF format Json be viewed on Heimdall'
|
45
|
+
long_desc Help.text(:xccdf_results_mapper)
|
46
|
+
option :xml, required: true, aliases: '-x'
|
47
|
+
option :output, required: true, aliases: '-o'
|
48
|
+
def xccdf_results_mapper
|
49
|
+
hdf = HeimdallTools::XCCDFResultsMapper.new(File.read(options[:xml])).to_hdf
|
50
|
+
File.write(options[:output], hdf)
|
51
|
+
end
|
52
|
+
|
44
53
|
desc 'nessus_mapper', 'nessus_mapper translates nessus xml report to HDF format Json be viewed on Heimdall'
|
45
54
|
long_desc Help.text(:nessus_mapper)
|
46
55
|
option :xml, required: true, aliases: '-x'
|
@@ -61,7 +70,7 @@ module HeimdallTools
|
|
61
70
|
option :output_prefix, required: true, aliases: '-o'
|
62
71
|
def snyk_mapper
|
63
72
|
hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
|
64
|
-
puts "\
|
73
|
+
puts "\rHDF Generated:\n"
|
65
74
|
hdfs.each_key do |host|
|
66
75
|
File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
|
67
76
|
puts "#{options[:output_prefix]}-#{host}.json"
|
@@ -75,7 +84,7 @@ module HeimdallTools
|
|
75
84
|
def nikto_mapper
|
76
85
|
hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
|
77
86
|
File.write(options[:output], hdf)
|
78
|
-
puts "\
|
87
|
+
puts "\rHDF Generated:\n"
|
79
88
|
puts options[:output].to_s
|
80
89
|
end
|
81
90
|
|
@@ -86,7 +95,7 @@ module HeimdallTools
|
|
86
95
|
def jfrog_xray_mapper
|
87
96
|
hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
|
88
97
|
File.write(options[:output], hdf)
|
89
|
-
puts "\
|
98
|
+
puts "\rHDF Generated:\n"
|
90
99
|
puts options[:output].to_s
|
91
100
|
end
|
92
101
|
|
@@ -97,7 +106,7 @@ module HeimdallTools
|
|
97
106
|
def dbprotect_mapper
|
98
107
|
hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
|
99
108
|
File.write(options[:output], hdf)
|
100
|
-
puts "\
|
109
|
+
puts "\rHDF Generated:\n"
|
101
110
|
puts options[:output].to_s
|
102
111
|
end
|
103
112
|
|
@@ -108,7 +117,7 @@ module HeimdallTools
|
|
108
117
|
def aws_config_mapper
|
109
118
|
hdf = HeimdallTools::AwsConfigMapper.new(options[:custom_mapping]).to_hdf
|
110
119
|
File.write(options[:output], hdf)
|
111
|
-
puts "\
|
120
|
+
puts "\rHDF Generated:\n"
|
112
121
|
puts options[:output].to_s
|
113
122
|
end
|
114
123
|
|
@@ -119,7 +128,7 @@ module HeimdallTools
|
|
119
128
|
def netsparker_mapper
|
120
129
|
hdf = HeimdallTools::NetsparkerMapper.new(File.read(options[:xml])).to_hdf
|
121
130
|
File.write(options[:output], hdf)
|
122
|
-
puts "\
|
131
|
+
puts "\rHDF Generated:\n"
|
123
132
|
puts options[:output].to_s
|
124
133
|
end
|
125
134
|
|
@@ -131,7 +140,7 @@ module HeimdallTools
|
|
131
140
|
def sarif_mapper
|
132
141
|
hdf = HeimdallTools::SarifMapper.new(File.read(options[:json])).to_hdf
|
133
142
|
File.write(options[:output], hdf)
|
134
|
-
puts "\
|
143
|
+
puts "\rHDF Generated:\n"
|
135
144
|
puts options[:output].to_s
|
136
145
|
end
|
137
146
|
|
@@ -146,6 +155,29 @@ module HeimdallTools
|
|
146
155
|
puts options[:output].to_s
|
147
156
|
end
|
148
157
|
|
158
|
+
desc 'asff_mapper', 'asff_mapper translates AWS Security Finding Format results from JSON to HDF-formatted JSON so as to be viewable on Heimdall'
|
159
|
+
long_desc Help.text(:asff_mapper)
|
160
|
+
option :json, required: true, banner: 'ASFF-FINDING-JSON', aliases: ['-i', '--input', '-j']
|
161
|
+
option :securityhub_standards, required: false, type: :array, banner: 'ASFF-SECURITYHUB-STANDARDS-JSON', aliases: ['--sh', '--input-securityhub-standards']
|
162
|
+
option :output, required: true, banner: 'HDF-SCAN-RESULTS-JSON', aliases: '-o'
|
163
|
+
def asff_mapper
|
164
|
+
hdf = HeimdallTools::ASFFMapper.new(File.read(options[:json]), securityhub_standards_json_array: options[:securityhub_standards].nil? ? nil : options[:securityhub_standards].map { |filename| File.read(filename) }).to_hdf
|
165
|
+
File.write(options[:output], hdf)
|
166
|
+
puts "\rHDF Generated:\n"
|
167
|
+
puts options[:output].to_s
|
168
|
+
end
|
169
|
+
|
170
|
+
desc 'prowler_mapper', 'prowler_mapper translates Prowler-derived AWS Security Finding Format results from concatenated JSON blobs to HDF-formatted JSON so as to be viewable on Heimdall'
|
171
|
+
long_desc Help.text(:prowler_mapper)
|
172
|
+
option :json, required: true, banner: 'PROWLER-ASFF-JSON', aliases: ['-i', '--input', '-j']
|
173
|
+
option :output, required: true, banner: 'HDF-SCAN-RESULTS-JSON', aliases: '-o'
|
174
|
+
def prowler_mapper
|
175
|
+
hdf = HeimdallTools::ProwlerMapper.new(File.read(options[:json])).to_hdf
|
176
|
+
File.write(options[:output], hdf)
|
177
|
+
puts "\rHDF Generated:\n"
|
178
|
+
puts options[:output].to_s
|
179
|
+
end
|
180
|
+
|
149
181
|
desc 'version', 'prints version'
|
150
182
|
def version
|
151
183
|
puts VERSION
|
@@ -58,9 +58,9 @@ module HeimdallTools
|
|
58
58
|
def snippet(snippetid)
|
59
59
|
snippet = @snippets.select { |x| x['id'].eql?(snippetid) }.first
|
60
60
|
"\nPath: #{snippet['File']}\n" \
|
61
|
-
|
62
|
-
|
63
|
-
|
61
|
+
"StartLine: #{snippet['StartLine']}, " \
|
62
|
+
"EndLine: #{snippet['EndLine']}\n" \
|
63
|
+
"Code:\n#{snippet['Text']['#cdata-section'].strip}" \
|
64
64
|
end
|
65
65
|
|
66
66
|
def nist_tag(rule)
|
@@ -0,0 +1,6 @@
|
|
1
|
+
asff_mapper translates AWS Security Finding Format results from JSON to HDF-formatted JSON so as to be viewable on Heimdall
|
2
|
+
|
3
|
+
Examples:
|
4
|
+
|
5
|
+
heimdall_tools asff_mapper -i <asff-finding-json> -o <hdf-scan-results-json>
|
6
|
+
heimdall_tools asff_mapper -i <asff-finding-json> --sh <standard-1-json> ... <standard-n-json> -o <hdf-scan-results-json>
|
@@ -25,8 +25,6 @@ DEFAULT_NIST_REV = 'Rev_4'.freeze
|
|
25
25
|
|
26
26
|
NA_PLUGIN_OUTPUT = 'This Nessus Plugin does not provide output message.'.freeze
|
27
27
|
|
28
|
-
# rubocop:disable Metrics/AbcSize
|
29
|
-
|
30
28
|
# Loading spinner sign
|
31
29
|
$spinner = Enumerator.new do |e|
|
32
30
|
loop do
|
@@ -94,11 +92,17 @@ module HeimdallTools
|
|
94
92
|
|
95
93
|
def finding(issue, timestamp)
|
96
94
|
finding = {}
|
97
|
-
# if compliance-result field, this is a policy compliance result entry
|
98
|
-
# nessus policy compliance result provides a pass/fail data
|
99
|
-
# For non policy compliance results are defaulted to failed
|
100
95
|
if issue['compliance-result']
|
101
|
-
|
96
|
+
case issue['compliance-result']
|
97
|
+
when 'PASSED'
|
98
|
+
finding['status'] = 'passed'
|
99
|
+
when 'ERROR'
|
100
|
+
finding['status'] = 'error'
|
101
|
+
when 'WARNING'
|
102
|
+
finding['status'] = 'skipped'
|
103
|
+
else
|
104
|
+
finding['status'] = 'failed'
|
105
|
+
end
|
102
106
|
else
|
103
107
|
finding['status'] = 'failed'
|
104
108
|
end
|
@@ -221,8 +225,12 @@ module HeimdallTools
|
|
221
225
|
end
|
222
226
|
if item['compliance-reference']
|
223
227
|
@item['tags']['nist'] = cci_nist_tag(parse_refs(item['compliance-reference'], 'CCI'))
|
228
|
+
@item['tags']['cci'] = parse_refs(item['compliance-reference'], 'CCI')
|
229
|
+
@item['tags']['rid'] = parse_refs(item['compliance-reference'], 'Rule-ID').join(',')
|
230
|
+
@item['tags']['stig_id'] = parse_refs(item['compliance-reference'], 'STIG-ID').join(',')
|
224
231
|
else
|
225
232
|
@item['tags']['nist'] = plugin_nist_tag(item['pluginFamily'], item['pluginID'])
|
233
|
+
@item['tags']['rid'] = item['pluginID'].to_s
|
226
234
|
end
|
227
235
|
if item['compliance-solution']
|
228
236
|
@item['descriptions'] << desc_tags(item['compliance-solution'], 'check')
|
@@ -0,0 +1,8 @@
|
|
1
|
+
module HeimdallTools
|
2
|
+
class ProwlerMapper < ASFFMapper
|
3
|
+
def initialize(prowler_asff_json)
|
4
|
+
# comes as an asff-json file which is basically all the findings concatenated into one file instead of putting it in the proper wrapper data structure
|
5
|
+
super("{ \"Findings\": [#{prowler_asff_json.split("\n").join(',')}]}", meta: { 'name' => 'Prowler', 'title' => 'Prowler findings' })
|
6
|
+
end
|
7
|
+
end
|
8
|
+
end
|
@@ -158,7 +158,11 @@ class Control
|
|
158
158
|
# OWASP is stated specifically, ex owasp-a1
|
159
159
|
#
|
160
160
|
# SonarQube is inconsistent with tags (ex some cwe rules don't have cwe number in desc,) as noted below
|
161
|
-
|
161
|
+
|
162
|
+
# rubocop:disable Style/MutableConstant
|
163
|
+
TAG_DATA = {} # NOTE: We count on Ruby to preserve order for TAG_DATA
|
164
|
+
# rubocop:enable Style/MutableConstant
|
165
|
+
|
162
166
|
TAG_DATA[:cwe] = {
|
163
167
|
# Some rules with cwe tag don't have cwe number in description!
|
164
168
|
# Currently only squid:S2658, but it has OWASP tag so we can use that.
|