heimdall_tools 1.3.45 → 1.3.49
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +236 -176
- data/lib/data/scoutsuite-nist-mapping.csv +140 -0
- data/lib/heimdall_tools/asff_compatible_products/firewall_manager.rb +11 -0
- data/lib/heimdall_tools/asff_compatible_products/prowler.rb +19 -0
- data/lib/heimdall_tools/asff_compatible_products/securityhub.rb +89 -0
- data/lib/heimdall_tools/asff_mapper.rb +232 -0
- data/lib/heimdall_tools/aws_config_mapper.rb +5 -5
- data/lib/heimdall_tools/cli.rb +61 -6
- data/lib/heimdall_tools/fortify_mapper.rb +3 -3
- data/lib/heimdall_tools/help/asff_mapper.md +6 -0
- data/lib/heimdall_tools/help/prowler_mapper.md +5 -0
- data/lib/heimdall_tools/help/sarif_mapper.md +12 -0
- data/lib/heimdall_tools/help/scoutsuite_mapper.md +7 -0
- data/lib/heimdall_tools/nessus_mapper.rb +14 -6
- data/lib/heimdall_tools/prowler_mapper.rb +8 -0
- data/lib/heimdall_tools/sarif_mapper.rb +198 -0
- data/lib/heimdall_tools/scoutsuite_mapper.rb +180 -0
- data/lib/heimdall_tools/sonarqube_mapper.rb +5 -1
- data/lib/heimdall_tools/xccdf_results_mapper.rb +161 -0
- data/lib/heimdall_tools/zap_mapper.rb +0 -2
- data/lib/heimdall_tools.rb +5 -0
- metadata +46 -4
@@ -0,0 +1,7 @@
|
|
1
|
+
scoutsuite_mapper translates Scout Suite results from Javascript to HDF-formatted JSON so as to be viewable on Heimdall
|
2
|
+
|
3
|
+
Note: Currently this mapper only supports AWS.
|
4
|
+
|
5
|
+
Examples:
|
6
|
+
|
7
|
+
heimdall_tools scoutsuite_mapper -i <scoutsuite-results-js> -o <hdf-scan-results-json>
|
@@ -25,8 +25,6 @@ DEFAULT_NIST_REV = 'Rev_4'.freeze
|
|
25
25
|
|
26
26
|
NA_PLUGIN_OUTPUT = 'This Nessus Plugin does not provide output message.'.freeze
|
27
27
|
|
28
|
-
# rubocop:disable Metrics/AbcSize
|
29
|
-
|
30
28
|
# Loading spinner sign
|
31
29
|
$spinner = Enumerator.new do |e|
|
32
30
|
loop do
|
@@ -94,11 +92,17 @@ module HeimdallTools
|
|
94
92
|
|
95
93
|
def finding(issue, timestamp)
|
96
94
|
finding = {}
|
97
|
-
# if compliance-result field, this is a policy compliance result entry
|
98
|
-
# nessus policy compliance result provides a pass/fail data
|
99
|
-
# For non policy compliance results are defaulted to failed
|
100
95
|
if issue['compliance-result']
|
101
|
-
|
96
|
+
case issue['compliance-result']
|
97
|
+
when 'PASSED'
|
98
|
+
finding['status'] = 'passed'
|
99
|
+
when 'ERROR'
|
100
|
+
finding['status'] = 'error'
|
101
|
+
when 'WARNING'
|
102
|
+
finding['status'] = 'skipped'
|
103
|
+
else
|
104
|
+
finding['status'] = 'failed'
|
105
|
+
end
|
102
106
|
else
|
103
107
|
finding['status'] = 'failed'
|
104
108
|
end
|
@@ -221,8 +225,12 @@ module HeimdallTools
|
|
221
225
|
end
|
222
226
|
if item['compliance-reference']
|
223
227
|
@item['tags']['nist'] = cci_nist_tag(parse_refs(item['compliance-reference'], 'CCI'))
|
228
|
+
@item['tags']['cci'] = parse_refs(item['compliance-reference'], 'CCI')
|
229
|
+
@item['tags']['rid'] = parse_refs(item['compliance-reference'], 'Rule-ID').join(',')
|
230
|
+
@item['tags']['stig_id'] = parse_refs(item['compliance-reference'], 'STIG-ID').join(',')
|
224
231
|
else
|
225
232
|
@item['tags']['nist'] = plugin_nist_tag(item['pluginFamily'], item['pluginID'])
|
233
|
+
@item['tags']['rid'] = item['pluginID'].to_s
|
226
234
|
end
|
227
235
|
if item['compliance-solution']
|
228
236
|
@item['descriptions'] << desc_tags(item['compliance-solution'], 'check')
|
@@ -0,0 +1,8 @@
|
|
1
|
+
module HeimdallTools
|
2
|
+
class ProwlerMapper < ASFFMapper
|
3
|
+
def initialize(prowler_asff_json)
|
4
|
+
# comes as an asff-json file which is basically all the findings concatenated into one file instead of putting it in the proper wrapper data structure
|
5
|
+
super("{ \"Findings\": [#{prowler_asff_json.split("\n").join(',')}]}", meta: { 'name' => 'Prowler', 'title' => 'Prowler findings' })
|
6
|
+
end
|
7
|
+
end
|
8
|
+
end
|
@@ -0,0 +1,198 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'csv'
|
3
|
+
require 'heimdall_tools/hdf'
|
4
|
+
|
5
|
+
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
6
|
+
|
7
|
+
CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
|
8
|
+
|
9
|
+
IMPACT_MAPPING = {
|
10
|
+
error: 0.7,
|
11
|
+
warning: 0.5,
|
12
|
+
note: 0.3,
|
13
|
+
none: 0.0
|
14
|
+
}.freeze
|
15
|
+
|
16
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
17
|
+
|
18
|
+
# Loading spinner sign
|
19
|
+
$spinner = Enumerator.new do |e|
|
20
|
+
loop do
|
21
|
+
e.yield '|'
|
22
|
+
e.yield '/'
|
23
|
+
e.yield '-'
|
24
|
+
e.yield '\\'
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
module HeimdallTools
|
29
|
+
class SarifMapper
|
30
|
+
def initialize(sarif_json, _name = nil, verbose = false)
|
31
|
+
@sarif_json = sarif_json
|
32
|
+
@verbose = verbose
|
33
|
+
begin
|
34
|
+
@cwe_nist_mapping = parse_mapper
|
35
|
+
@sarif_log = JSON.parse(@sarif_json)
|
36
|
+
rescue StandardError => e
|
37
|
+
raise "Invalid SARIF JSON file provided\n\nException: #{e}"
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
def extract_scaninfo(sarif_log)
|
42
|
+
info = {}
|
43
|
+
begin
|
44
|
+
info['policy'] = 'SARIF'
|
45
|
+
info['version'] = sarif_log['version']
|
46
|
+
info['projectName'] = 'Static Analysis Results Interchange Format'
|
47
|
+
info['summary'] = NA_STRING
|
48
|
+
info
|
49
|
+
rescue StandardError => e
|
50
|
+
raise "Error extracting project info from SARIF JSON file provided Exception: #{e}"
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def finding(result)
|
55
|
+
finding = {}
|
56
|
+
finding['status'] = 'failed'
|
57
|
+
finding['code_desc'] = ''
|
58
|
+
if get_location(result)['uri']
|
59
|
+
finding['code_desc'] += " URL : #{get_location(result)['uri']}"
|
60
|
+
end
|
61
|
+
if get_location(result)['start_line']
|
62
|
+
finding['code_desc'] += " LINE : #{get_location(result)['start_line']}"
|
63
|
+
end
|
64
|
+
if get_location(result)['start_column']
|
65
|
+
finding['code_desc'] += " COLUMN : #{get_location(result)['start_column']}"
|
66
|
+
end
|
67
|
+
finding['code_desc'].strip!
|
68
|
+
finding['run_time'] = NA_FLOAT
|
69
|
+
finding['start_time'] = NA_STRING
|
70
|
+
finding
|
71
|
+
end
|
72
|
+
|
73
|
+
def add_nist_tag_from_cwe(cweid, taxonomy_name, tags_node)
|
74
|
+
entries = @cwe_nist_mapping.select { |x| cweid.include?(x[:cweid].to_s) && !x[:nistid].nil? }
|
75
|
+
tags = entries.map { |x| x[:nistid] }
|
76
|
+
result_tags = tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
77
|
+
if result_tags.count.positive?
|
78
|
+
if !tags_node
|
79
|
+
tags_node = {}
|
80
|
+
end
|
81
|
+
if !tags_node.key?(taxonomy_name)
|
82
|
+
tags_node[taxonomy_name] = []
|
83
|
+
end
|
84
|
+
result_tags.each do |t|
|
85
|
+
tags_node[taxonomy_name] |= [t]
|
86
|
+
end
|
87
|
+
end
|
88
|
+
tags_node
|
89
|
+
end
|
90
|
+
|
91
|
+
def get_location(result)
|
92
|
+
location_info = {}
|
93
|
+
location_info['uri'] = result.dig('locations', 0, 'physicalLocation', 'artifactLocation', 'uri')
|
94
|
+
location_info['start_line'] = result.dig('locations', 0, 'physicalLocation', 'region', 'startLine')
|
95
|
+
location_info['start_column'] = result.dig('locations', 0, 'physicalLocation', 'region', 'startColumn')
|
96
|
+
location_info
|
97
|
+
end
|
98
|
+
|
99
|
+
def get_rule_info(run, result, rule_id)
|
100
|
+
finding = {}
|
101
|
+
driver = run.dig('tool', 'driver')
|
102
|
+
finding['driver_name'] = driver['name']
|
103
|
+
finding['driver_version'] = driver['version']
|
104
|
+
rules = driver['rules']
|
105
|
+
if rules
|
106
|
+
rule = rules.find { |x| x['id'].eql?(rule_id) }
|
107
|
+
if rule
|
108
|
+
finding['rule_name'] = rule&.[]('name')
|
109
|
+
finding['rule_short_description'] = rule&.[]('shortDescription')&.[]('text')
|
110
|
+
finding['rule_tags'] = get_tags(rule)
|
111
|
+
finding['rule_name'] = rule&.[]('messageStrings')&.[]('default')&.[]('text') unless finding['rule_name']
|
112
|
+
end
|
113
|
+
end
|
114
|
+
finding['rule_name'] = result&.[]('message')&.[]('text') unless finding['rule_name']
|
115
|
+
finding
|
116
|
+
end
|
117
|
+
|
118
|
+
def get_tags(rule)
|
119
|
+
result = {}
|
120
|
+
Array(rule&.[]('relationships')).each do |relationship|
|
121
|
+
taxonomy_name = relationship['target']['toolComponent']['name'].downcase
|
122
|
+
taxonomy_id = relationship['target']['id']
|
123
|
+
if !result.key?(taxonomy_name)
|
124
|
+
result[taxonomy_name] = []
|
125
|
+
end
|
126
|
+
result[taxonomy_name] |= [taxonomy_id]
|
127
|
+
end
|
128
|
+
result
|
129
|
+
end
|
130
|
+
|
131
|
+
def parse_identifiers(rule_tags, ref)
|
132
|
+
# Extracting id number from reference style CWE-297
|
133
|
+
rule_tags[ref.downcase].map { |e| e.downcase.split("#{ref.downcase}-")[1] }
|
134
|
+
rescue StandardError
|
135
|
+
[]
|
136
|
+
end
|
137
|
+
|
138
|
+
def impact(severity)
|
139
|
+
severity_mapping = IMPACT_MAPPING[severity.to_sym]
|
140
|
+
severity_mapping.nil? ? 0.1 : severity_mapping
|
141
|
+
end
|
142
|
+
|
143
|
+
def parse_mapper
|
144
|
+
csv_data = CSV.read(CWE_NIST_MAPPING_FILE, **{ encoding: 'UTF-8',
|
145
|
+
headers: true,
|
146
|
+
header_converters: :symbol,
|
147
|
+
converters: :all })
|
148
|
+
csv_data.map(&:to_hash)
|
149
|
+
end
|
150
|
+
|
151
|
+
def desc_tags(data, label)
|
152
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
153
|
+
end
|
154
|
+
|
155
|
+
def process_item(run, result, controls)
|
156
|
+
printf("\rProcessing: %s", $spinner.next)
|
157
|
+
control = controls.find { |x| x['id'].eql?(result['ruleId']) }
|
158
|
+
|
159
|
+
if control
|
160
|
+
control['results'] << finding(result)
|
161
|
+
else
|
162
|
+
rule_info = get_rule_info(run, result, result['ruleId'])
|
163
|
+
item = {}
|
164
|
+
item['tags'] = rule_info['rule_tags']
|
165
|
+
item['descriptions'] = []
|
166
|
+
item['refs'] = NA_ARRAY
|
167
|
+
item['source_location'] = { ref: get_location(result)['uri'], line: get_location(result)['start_line'] }
|
168
|
+
item['descriptions'] = NA_ARRAY
|
169
|
+
item['title'] = rule_info['rule_name'].to_s
|
170
|
+
item['id'] = result['ruleId'].to_s
|
171
|
+
item['desc'] = rule_info['rule_short_description'].to_s
|
172
|
+
item['impact'] = impact(result['level'].to_s)
|
173
|
+
item['code'] = NA_STRING
|
174
|
+
item['results'] = [finding(result)]
|
175
|
+
item['tags'] = add_nist_tag_from_cwe(parse_identifiers(rule_info['rule_tags'], 'CWE'), 'nist', item['tags'])
|
176
|
+
controls << item
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
def to_hdf
|
181
|
+
controls = []
|
182
|
+
@sarif_log['runs'].each do |run|
|
183
|
+
run['results'].each do |result|
|
184
|
+
process_item(run, result, controls)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
scaninfo = extract_scaninfo(@sarif_log)
|
189
|
+
results = HeimdallDataFormat.new(profile_name: scaninfo['policy'],
|
190
|
+
version: scaninfo['version'],
|
191
|
+
title: scaninfo['projectName'],
|
192
|
+
summary: scaninfo['summary'],
|
193
|
+
controls: controls,
|
194
|
+
target_id: scaninfo['projectName'])
|
195
|
+
results.to_hdf
|
196
|
+
end
|
197
|
+
end
|
198
|
+
end
|
@@ -0,0 +1,180 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'csv'
|
3
|
+
require 'heimdall_tools/hdf'
|
4
|
+
|
5
|
+
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
6
|
+
|
7
|
+
SCOUTSUITE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'scoutsuite-nist-mapping.csv')
|
8
|
+
|
9
|
+
IMPACT_MAPPING = {
|
10
|
+
danger: 0.7,
|
11
|
+
warning: 0.5
|
12
|
+
}.freeze
|
13
|
+
|
14
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
15
|
+
|
16
|
+
INSPEC_INPUTS_MAPPING = {
|
17
|
+
string: 'String',
|
18
|
+
numeric: 'Numeric',
|
19
|
+
regexp: 'Regexp',
|
20
|
+
array: 'Array',
|
21
|
+
hash: 'Hash',
|
22
|
+
boolean: 'Boolean',
|
23
|
+
any: 'Any'
|
24
|
+
}.freeze
|
25
|
+
|
26
|
+
# Loading spinner sign
|
27
|
+
$spinner = Enumerator.new do |e|
|
28
|
+
loop do
|
29
|
+
e.yield '|'
|
30
|
+
e.yield '/'
|
31
|
+
e.yield '-'
|
32
|
+
e.yield '\\'
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
module HeimdallTools
|
37
|
+
# currently only tested against an AWS based result, but ScoutSuite supports many other cloud providers such as Azure
|
38
|
+
class ScoutSuiteMapper
|
39
|
+
def initialize(scoutsuite_js)
|
40
|
+
begin
|
41
|
+
@scoutsuite_nist_mapping = parse_mapper
|
42
|
+
rescue StandardError => e
|
43
|
+
raise "Invalid Scout Suite to NIST mapping file:\nException: #{e}"
|
44
|
+
end
|
45
|
+
|
46
|
+
begin
|
47
|
+
@scoutsuite_json = scoutsuite_js.lines[1] # first line is `scoutsuite_results =\n` and second line is json
|
48
|
+
@report = JSON.parse(@scoutsuite_json)
|
49
|
+
rescue StandardError => e
|
50
|
+
raise "Invalid Scout Suite JavaScript file provided:\nException: #{e}"
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def parse_mapper
|
55
|
+
csv_data = CSV.read(SCOUTSUITE_NIST_MAPPING_FILE, { encoding: 'UTF-8', headers: true, header_converters: :symbol })
|
56
|
+
csv_data.map(&:to_hash)
|
57
|
+
end
|
58
|
+
|
59
|
+
def create_attribute(name, value, required = nil, sensitive = nil, type = nil)
|
60
|
+
{ name: name, options: { value: value, required: required, sensitive: sensitive, type: type }.compact }
|
61
|
+
end
|
62
|
+
|
63
|
+
def extract_scaninfo(report)
|
64
|
+
info = {}
|
65
|
+
begin
|
66
|
+
info['name'] = 'Scout Suite Multi-Cloud Security Auditing Tool'
|
67
|
+
info['version'] = report['last_run']['version']
|
68
|
+
info['title'] = "Scout Suite Report using #{report['last_run']['ruleset_name']} ruleset on #{report['provider_name']} with account #{report['account_id']}"
|
69
|
+
info['target_id'] = "#{report['last_run']['ruleset_name']} ruleset:#{report['provider_name']}:#{report['account_id']}"
|
70
|
+
info['summary'] = report['last_run']['ruleset_about']
|
71
|
+
info['attributes'] = [
|
72
|
+
create_attribute('account_id', report['account_id'], true, false, INSPEC_INPUTS_MAPPING[:string]),
|
73
|
+
create_attribute('environment', report['environment']),
|
74
|
+
create_attribute('ruleset', report['ruleset_name']),
|
75
|
+
# think at least these run_parameters are aws only
|
76
|
+
create_attribute('run_parameters_excluded_regions', report['last_run']['run_parameters']['excluded_regions'].join(', ')),
|
77
|
+
create_attribute('run_parameters_regions', report['last_run']['run_parameters']['regions'].join(', ')),
|
78
|
+
create_attribute('run_parameters_services', report['last_run']['run_parameters']['services'].join(', ')),
|
79
|
+
create_attribute('run_parameters_skipped_services', report['last_run']['run_parameters']['skipped_services'].join(', ')),
|
80
|
+
create_attribute('time', report['last_run']['time']),
|
81
|
+
create_attribute('partition', report['partition']), # think this is aws only
|
82
|
+
create_attribute('provider_code', report['provider_code']),
|
83
|
+
create_attribute('provider_name', report['provider_name']),
|
84
|
+
]
|
85
|
+
|
86
|
+
info
|
87
|
+
rescue StandardError => e
|
88
|
+
raise "Error extracting report info from Scout Suite JS->JSON file:\nException: #{e}"
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def nist_tag(rule)
|
93
|
+
entries = @scoutsuite_nist_mapping.select { |x| rule.eql?(x[:rule].to_s) && !x[:nistid].nil? }
|
94
|
+
tags = entries.map { |x| x[:nistid].split('|') }
|
95
|
+
tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
96
|
+
end
|
97
|
+
|
98
|
+
def impact(severity)
|
99
|
+
IMPACT_MAPPING[severity.to_sym]
|
100
|
+
end
|
101
|
+
|
102
|
+
def desc_tags(data, label)
|
103
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
104
|
+
end
|
105
|
+
|
106
|
+
def findings(details)
|
107
|
+
finding = {}
|
108
|
+
if (details['checked_items']).zero?
|
109
|
+
finding['status'] = 'skipped'
|
110
|
+
finding['skip_message'] = 'Skipped because no items were checked'
|
111
|
+
elsif (details['flagged_items']).zero?
|
112
|
+
finding['status'] = 'passed'
|
113
|
+
finding['message'] = "0 flagged items out of #{details['checked_items']} checked items"
|
114
|
+
else # there are checked items and things were flagged
|
115
|
+
finding['status'] = 'failed'
|
116
|
+
finding['message'] = "#{details['flagged_items']} flagged items out of #{details['checked_items']} checked items:\n#{details['items'].join("\n")}"
|
117
|
+
end
|
118
|
+
finding['code_desc'] = details['description']
|
119
|
+
finding['start_time'] = @report['last_run']['time']
|
120
|
+
[finding]
|
121
|
+
end
|
122
|
+
|
123
|
+
def compliance(arr)
|
124
|
+
str = 'Compliant with '
|
125
|
+
arr.map do |val|
|
126
|
+
info = "#{val['name']}, reference #{val['reference']}, version #{val['version']}"
|
127
|
+
str + info
|
128
|
+
end.join("\n")
|
129
|
+
end
|
130
|
+
|
131
|
+
def to_hdf
|
132
|
+
controls = []
|
133
|
+
@report['services'].each_key do |service|
|
134
|
+
@report['services'][service]['findings'].each_key do |finding|
|
135
|
+
printf("\rProcessing: %s", $spinner.next)
|
136
|
+
|
137
|
+
finding_id = finding
|
138
|
+
finding_details = @report['services'][service]['findings'][finding]
|
139
|
+
|
140
|
+
item = {}
|
141
|
+
item['id'] = finding_id
|
142
|
+
item['title'] = finding_details['description']
|
143
|
+
|
144
|
+
item['tags'] = { nist: nist_tag(finding_id) }
|
145
|
+
|
146
|
+
item['impact'] = impact(finding_details['level'])
|
147
|
+
|
148
|
+
item['desc'] = finding_details['rationale']
|
149
|
+
|
150
|
+
item['descriptions'] = []
|
151
|
+
item['descriptions'] << desc_tags(finding_details['remediation'], 'fix') unless finding_details['remediation'].nil?
|
152
|
+
item['descriptions'] << desc_tags(finding_details['service'], 'service')
|
153
|
+
item['descriptions'] << desc_tags(finding_details['path'], 'path')
|
154
|
+
item['descriptions'] << desc_tags(finding_details['id_suffix'], 'id_suffix')
|
155
|
+
|
156
|
+
item['refs'] = []
|
157
|
+
item['refs'] += finding_details['references'].map { |link| { url: link } } unless finding_details['references'].nil? || finding_details['references'].empty?
|
158
|
+
item['refs'] << { ref: compliance(finding_details['compliance']) } unless finding_details['compliance'].nil?
|
159
|
+
|
160
|
+
item['source_location'] = NA_HASH
|
161
|
+
item['code'] = NA_STRING
|
162
|
+
|
163
|
+
item['results'] = findings(finding_details)
|
164
|
+
|
165
|
+
controls << item
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
scaninfo = extract_scaninfo(@report)
|
170
|
+
results = HeimdallDataFormat.new(profile_name: scaninfo['name'],
|
171
|
+
version: scaninfo['version'],
|
172
|
+
title: scaninfo['title'],
|
173
|
+
summary: scaninfo['summary'],
|
174
|
+
controls: controls,
|
175
|
+
target_id: scaninfo['target_id'],
|
176
|
+
attributes: scaninfo['attributes'])
|
177
|
+
results.to_hdf
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
@@ -158,7 +158,11 @@ class Control
|
|
158
158
|
# OWASP is stated specifically, ex owasp-a1
|
159
159
|
#
|
160
160
|
# SonarQube is inconsistent with tags (ex some cwe rules don't have cwe number in desc,) as noted below
|
161
|
-
|
161
|
+
|
162
|
+
# rubocop:disable Style/MutableConstant
|
163
|
+
TAG_DATA = {} # NOTE: We count on Ruby to preserve order for TAG_DATA
|
164
|
+
# rubocop:enable Style/MutableConstant
|
165
|
+
|
162
166
|
TAG_DATA[:cwe] = {
|
163
167
|
# Some rules with cwe tag don't have cwe number in description!
|
164
168
|
# Currently only squid:S2658, but it has OWASP tag so we can use that.
|
@@ -0,0 +1,161 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'csv'
|
3
|
+
require 'heimdall_tools/hdf'
|
4
|
+
require 'utilities/xml_to_hash'
|
5
|
+
require 'nokogiri'
|
6
|
+
|
7
|
+
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
8
|
+
|
9
|
+
# XCCDF mapping for converting SCAP client (SCC or OpenSCAP) outputs to HDF
|
10
|
+
# SCC output from the RHEL7 Lockdown image was used for testing
|
11
|
+
|
12
|
+
U_CCI_LIST = File.join(RESOURCE_DIR, 'U_CCI_List.xml')
|
13
|
+
|
14
|
+
IMPACT_MAPPING = {
|
15
|
+
critical: 0.9,
|
16
|
+
high: 0.7,
|
17
|
+
medium: 0.5,
|
18
|
+
low: 0.3,
|
19
|
+
na: 0.0
|
20
|
+
}.freeze
|
21
|
+
|
22
|
+
# severity maps to high, medium, low with weights all being 10.0 from the xml
|
23
|
+
# it doesn't really look like SCAP or SCC cares about that value, just if its high, med, or low
|
24
|
+
|
25
|
+
CWE_REGEX = 'CWE-(\d*):'.freeze
|
26
|
+
CCI_REGEX = 'CCI-(\d*)'.freeze
|
27
|
+
|
28
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5 Rev_4}.freeze
|
29
|
+
|
30
|
+
module HeimdallTools
|
31
|
+
class XCCDFResultsMapper
|
32
|
+
def initialize(scap_xml, _name = nil)
|
33
|
+
@scap_xml = scap_xml
|
34
|
+
read_cci_xml
|
35
|
+
begin
|
36
|
+
data = xml_to_hash(scap_xml)
|
37
|
+
@results = data['Benchmark']['TestResult']
|
38
|
+
@benchmarks = data['Benchmark']
|
39
|
+
@groups = data['Benchmark']['Group']
|
40
|
+
rescue StandardError => e
|
41
|
+
raise "Invalid SCAP Client XCCDF output XML file provided Exception: #{e}"
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
# change for pass/fail based on output Benchmark.rule
|
46
|
+
# Pass/Fail are the only two options included in the output file
|
47
|
+
def finding(issue, count)
|
48
|
+
finding = {}
|
49
|
+
finding['status'] = issue['rule-result'][count]['result'].to_s
|
50
|
+
if finding['status'] == 'pass'
|
51
|
+
finding['status'] = 'passed'
|
52
|
+
end
|
53
|
+
if finding['status'] == 'fail'
|
54
|
+
finding['status'] = 'failed'
|
55
|
+
end
|
56
|
+
finding['code_desc'] = NA_STRING
|
57
|
+
finding['run_time'] = NA_FLOAT
|
58
|
+
finding['start_time'] = issue['start-time']
|
59
|
+
finding['message'] = NA_STRING
|
60
|
+
finding['resource_class'] = NA_STRING
|
61
|
+
[finding]
|
62
|
+
end
|
63
|
+
|
64
|
+
def read_cci_xml
|
65
|
+
@cci_xml = Nokogiri::XML(File.open(U_CCI_LIST))
|
66
|
+
@cci_xml.remove_namespaces!
|
67
|
+
rescue StandardError => e
|
68
|
+
puts "Exception: #{e.message}"
|
69
|
+
end
|
70
|
+
|
71
|
+
def cci_nist_tag(cci_refs)
|
72
|
+
nist_tags = []
|
73
|
+
cci_refs.each do |cci_ref|
|
74
|
+
item_node = @cci_xml.xpath("//cci_list/cci_items/cci_item[@id='#{cci_ref}']")[0] unless @cci_xml.nil?
|
75
|
+
unless item_node.nil?
|
76
|
+
nist_ref = item_node.xpath('./references/reference[not(@version <= preceding-sibling::reference/@version) and not(@version <=following-sibling::reference/@version)]/@index').text
|
77
|
+
end
|
78
|
+
nist_tags << nist_ref
|
79
|
+
end
|
80
|
+
nist_tags
|
81
|
+
end
|
82
|
+
|
83
|
+
def get_impact(severity)
|
84
|
+
IMPACT_MAPPING[severity.to_sym]
|
85
|
+
end
|
86
|
+
|
87
|
+
def parse_refs(refs)
|
88
|
+
refs.map { |ref| ref['text'] if ref['text'].match?(CCI_REGEX) }.reject!(&:nil?)
|
89
|
+
end
|
90
|
+
|
91
|
+
# Clean up output by removing the Satsifies block and the end of the description
|
92
|
+
def satisfies_parse(satisf)
|
93
|
+
temp_satisf = satisf.match('Satisfies: ([^;]*)<\/VulnDiscussion>')
|
94
|
+
return temp_satisf[1].split(',') unless temp_satisf.nil?
|
95
|
+
|
96
|
+
NA_ARRAY
|
97
|
+
end
|
98
|
+
|
99
|
+
def desc_tags(data, label)
|
100
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
101
|
+
end
|
102
|
+
|
103
|
+
def collapse_duplicates(controls)
|
104
|
+
unique_controls = []
|
105
|
+
|
106
|
+
controls.map { |x| x['id'] }.uniq.each do |id|
|
107
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
108
|
+
unique_control = controls.find { |x| x['id'].eql?(id) }
|
109
|
+
unique_control['results'] = collapsed_results.flatten
|
110
|
+
unique_controls << unique_control
|
111
|
+
end
|
112
|
+
unique_controls
|
113
|
+
end
|
114
|
+
|
115
|
+
def to_hdf
|
116
|
+
controls = []
|
117
|
+
@groups.each_with_index do |group, i|
|
118
|
+
@item = {}
|
119
|
+
@item['id'] = group['Rule']['id'].split('.').last.split('_').drop(2).first.split('r').first.split('S')[1]
|
120
|
+
@item['title'] = group['Rule']['title'].to_s
|
121
|
+
@item['desc'] = group['Rule']['description'].to_s.split('Satisfies').first
|
122
|
+
@item['descriptions'] = []
|
123
|
+
@item['descriptions'] << desc_tags(group['Rule']['description'], 'default')
|
124
|
+
@item['descriptions'] << desc_tags('NA', 'rationale')
|
125
|
+
@item['descriptions'] << desc_tags(group['Rule']['check']['check-content-ref']['name'], 'check')
|
126
|
+
@item['descriptions'] << desc_tags(group['Rule']['fixtext']['text'], 'fix')
|
127
|
+
@item['impact'] = get_impact(group['Rule']['severity'])
|
128
|
+
@item['refs'] = NA_ARRAY
|
129
|
+
@item['tags'] = {}
|
130
|
+
@item['tags']['severity'] = nil
|
131
|
+
@item['tags']['gtitle'] = group['title']
|
132
|
+
@item['tags']['satisfies'] = satisfies_parse(group['Rule']['description'])
|
133
|
+
@item['tags']['gid'] = group['Rule']['id'].split('.').last.split('_').drop(2).first.split('r').first
|
134
|
+
@item['tags']['legacy_id'] = group['Rule']['ident'][2]['text']
|
135
|
+
@item['tags']['rid'] = group['Rule']['ident'][1]['text']
|
136
|
+
@item['tags']['stig_id'] = @benchmarks['id']
|
137
|
+
@item['tags']['fix_id'] = group['Rule']['fix']['id']
|
138
|
+
@item['tags']['cci'] = parse_refs(group['Rule']['ident'])
|
139
|
+
@item['tags']['nist'] = cci_nist_tag(@item['tags']['cci'])
|
140
|
+
@item['code'] = NA_STRING
|
141
|
+
@item['source_location'] = NA_HASH
|
142
|
+
# results were in another location and using the top block "Benchmark" as a starting point caused odd issues. This works for now for the results.
|
143
|
+
@item['results'] = finding(@results, i)
|
144
|
+
controls << @item
|
145
|
+
end
|
146
|
+
|
147
|
+
controls = collapse_duplicates(controls)
|
148
|
+
results = HeimdallDataFormat.new(profile_name: @benchmarks['id'],
|
149
|
+
version: @benchmarks['style'],
|
150
|
+
duration: NA_FLOAT,
|
151
|
+
title: @benchmarks['title'],
|
152
|
+
maintainer: @benchmarks['reference']['publisher'],
|
153
|
+
summary: @benchmarks['description'],
|
154
|
+
license: @benchmarks['notice']['id'],
|
155
|
+
copyright: @benchmarks['metadata']['creator'],
|
156
|
+
copyright_email: 'disa.stig_spt@mail.mil',
|
157
|
+
controls: controls)
|
158
|
+
results.to_hdf
|
159
|
+
end
|
160
|
+
end
|
161
|
+
end
|
@@ -8,8 +8,6 @@ RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
|
8
8
|
CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
|
9
9
|
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
10
10
|
|
11
|
-
# rubocop:disable Metrics/AbcSize
|
12
|
-
|
13
11
|
module HeimdallTools
|
14
12
|
class ZapMapper
|
15
13
|
def initialize(zap_json, name)
|
data/lib/heimdall_tools.rb
CHANGED
@@ -16,4 +16,9 @@ module HeimdallTools
|
|
16
16
|
autoload :DBProtectMapper, 'heimdall_tools/dbprotect_mapper'
|
17
17
|
autoload :AwsConfigMapper, 'heimdall_tools/aws_config_mapper'
|
18
18
|
autoload :NetsparkerMapper, 'heimdall_tools/netsparker_mapper'
|
19
|
+
autoload :SarifMapper, 'heimdall_tools/sarif_mapper'
|
20
|
+
autoload :ScoutSuiteMapper, 'heimdall_tools/scoutsuite_mapper'
|
21
|
+
autoload :XCCDFResultsMapper, 'heimdall_tools/xccdf_results_mapper'
|
22
|
+
autoload :ASFFMapper, 'heimdall_tools/asff_mapper'
|
23
|
+
autoload :ProwlerMapper, 'heimdall_tools/prowler_mapper'
|
19
24
|
end
|