heimdall_tools 1.3.37 → 1.3.41
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +18 -0
- data/lib/heimdall_tools.rb +1 -0
- data/lib/heimdall_tools/aws_config_mapper.rb +26 -26
- data/lib/heimdall_tools/burpsuite_mapper.rb +8 -12
- data/lib/heimdall_tools/cli.rb +19 -8
- data/lib/heimdall_tools/command.rb +0 -2
- data/lib/heimdall_tools/dbprotect_mapper.rb +9 -18
- data/lib/heimdall_tools/fortify_mapper.rb +1 -2
- data/lib/heimdall_tools/hdf.rb +4 -5
- data/lib/heimdall_tools/help/netsparker_mapper.md +7 -0
- data/lib/heimdall_tools/jfrog_xray_mapper.rb +34 -27
- data/lib/heimdall_tools/nessus_mapper.rb +39 -46
- data/lib/heimdall_tools/netsparker_mapper.rb +164 -0
- data/lib/heimdall_tools/nikto_mapper.rb +28 -28
- data/lib/heimdall_tools/snyk_mapper.rb +21 -23
- data/lib/heimdall_tools/sonarqube_mapper.rb +23 -21
- data/lib/heimdall_tools/zap_mapper.rb +4 -5
- data/lib/utilities/xml_to_hash.rb +6 -6
- metadata +40 -24
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 6e2861d76ab4ec09b9a409e934365fdc47935378f99ceeca94ccdf7c1931661d
|
4
|
+
data.tar.gz: f5bc5a6c7339da985cc825216fbb817b070dc94ae31a568955a39c822163a07a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 53c34b8edc9cd713ee7ae481d6c27c1213ed371a800d1c0e728ae4bfe196561fc3128349547ee839521967fdc96da9478115834541faeba2a83a07cfba1de1a8
|
7
|
+
data.tar.gz: 2f0fb2011e3caa5a9b31fbfe44bc6069284bd35700af2fc6e20a236a2532f328db320c6bd0283930e22ac73d87929ad7dc9306ca5e315fa52c6a7f477b7b6e84
|
data/README.md
CHANGED
@@ -15,6 +15,7 @@ HeimdallTools supplies several methods to convert output from various tools to "
|
|
15
15
|
- **jfrog_xray_mapper** - package vulnerability scanner
|
16
16
|
- **dbprotect_mapper** - database vulnerability scanner
|
17
17
|
- **aws_config_mapper** - assess, audit, and evaluate AWS resources
|
18
|
+
- **netsparker_mapper** - web application security scanner
|
18
19
|
|
19
20
|
Ruby 2.4 or higher (check using "ruby -v")
|
20
21
|
|
@@ -234,6 +235,23 @@ FLAGS:
|
|
234
235
|
example: heimdall_tools aws_config_mapper -o aws_config_results_hdf.json
|
235
236
|
```
|
236
237
|
|
238
|
+
## netsparker_mapper
|
239
|
+
|
240
|
+
netsparker_mapper translates an Netsparker XML results file into HDF format JSON to be viewable in Heimdall.
|
241
|
+
|
242
|
+
The current iteration only works with Netsparker Enterprise Vulnerabilities Scan.
|
243
|
+
|
244
|
+
```
|
245
|
+
USAGE: heimdall_tools netsparker_mapper [OPTIONS] -x <netsparker_results_xml> -o <hdf-scan-results.json>
|
246
|
+
|
247
|
+
FLAGS:
|
248
|
+
-x <netsparker_results_xml> : path to netsparker results XML file.
|
249
|
+
-o --output <scan-results> : path to output scan-results json.
|
250
|
+
-V --verbose : verbose run [optional].
|
251
|
+
|
252
|
+
example: heimdall_tools netsparker_mapper -x netsparker_results.xml -o netsparker_hdf.json
|
253
|
+
```
|
254
|
+
|
237
255
|
## version
|
238
256
|
|
239
257
|
Prints out the gem version
|
data/lib/heimdall_tools.rb
CHANGED
@@ -15,4 +15,5 @@ module HeimdallTools
|
|
15
15
|
autoload :JfrogXrayMapper, 'heimdall_tools/jfrog_xray_mapper'
|
16
16
|
autoload :DBProtectMapper, 'heimdall_tools/dbprotect_mapper'
|
17
17
|
autoload :AwsConfigMapper, 'heimdall_tools/aws_config_mapper'
|
18
|
+
autoload :NetsparkerMapper, 'heimdall_tools/netsparker_mapper'
|
18
19
|
end
|
@@ -13,10 +13,9 @@ INSUFFICIENT_DATA_MSG = 'Not enough data has been collectd to determine complian
|
|
13
13
|
##
|
14
14
|
# HDF mapper for use with AWS Config rules.
|
15
15
|
#
|
16
|
-
# Ruby AWS Ruby SDK for ConfigService:
|
16
|
+
# Ruby AWS Ruby SDK for ConfigService:
|
17
17
|
# - https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/ConfigService/Client.html
|
18
18
|
#
|
19
|
-
# rubocop:disable Metrics/AbcSize, Metrics/ClassLength
|
20
19
|
module HeimdallTools
|
21
20
|
class AwsConfigMapper
|
22
21
|
def initialize(custom_mapping, verbose = false)
|
@@ -57,8 +56,8 @@ module HeimdallTools
|
|
57
56
|
title: 'AWS Config',
|
58
57
|
summary: 'AWS Config',
|
59
58
|
controls: controls,
|
60
|
-
statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION }
|
61
|
-
|
59
|
+
statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION },
|
60
|
+
)
|
62
61
|
results.to_hdf
|
63
62
|
end
|
64
63
|
|
@@ -67,12 +66,12 @@ module HeimdallTools
|
|
67
66
|
##
|
68
67
|
# Read in a config rule -> 800-53 control mapping CSV.
|
69
68
|
#
|
70
|
-
# Params:
|
69
|
+
# Params:
|
71
70
|
# - path: The file path to the CSV file
|
72
71
|
#
|
73
72
|
# Returns: A mapped version of the csv in the format { rule_name: row, ... }
|
74
73
|
def get_rule_mapping(path)
|
75
|
-
|
74
|
+
CSV.read(path, headers: true).map { |row| [row[0], row] }.to_h
|
76
75
|
end
|
77
76
|
|
78
77
|
##
|
@@ -142,7 +141,7 @@ module HeimdallTools
|
|
142
141
|
end
|
143
142
|
|
144
143
|
# Map based on name for easy lookup
|
145
|
-
|
144
|
+
compliance_results.collect { |r| [r.config_rule_name, r.to_h] }.to_h
|
146
145
|
end
|
147
146
|
|
148
147
|
##
|
@@ -192,7 +191,7 @@ module HeimdallTools
|
|
192
191
|
(result[:result_recorded_time] - result[:config_rule_invoked_time]).round(6)
|
193
192
|
end
|
194
193
|
# status
|
195
|
-
hdf_result['status'] = case result
|
194
|
+
hdf_result['status'] = case result[:compliance_type]
|
196
195
|
when 'COMPLIANT'
|
197
196
|
'passed'
|
198
197
|
when 'NON_COMPLIANT'
|
@@ -209,19 +208,19 @@ module HeimdallTools
|
|
209
208
|
when 'NOT_APPLICABLE'
|
210
209
|
rule[:impact] = 0
|
211
210
|
rule[:results] << {
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
211
|
+
run_time: 0,
|
212
|
+
code_desc: NOT_APPLICABLE_MSG,
|
213
|
+
skip_message: NOT_APPLICABLE_MSG,
|
214
|
+
start_time: DateTime.now.strftime('%Y-%m-%dT%H:%M:%S%:z'),
|
215
|
+
status: 'skipped'
|
217
216
|
}
|
218
217
|
when 'INSUFFICIENT_DATA'
|
219
218
|
rule[:results] << {
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
219
|
+
run_time: 0,
|
220
|
+
code_desc: INSUFFICIENT_DATA_MSG,
|
221
|
+
skip_message: INSUFFICIENT_DATA_MSG,
|
222
|
+
start_time: DateTime.now.strftime('%Y-%m-%dT%H:%M:%S%:z'),
|
223
|
+
status: 'skipped'
|
225
224
|
}
|
226
225
|
end
|
227
226
|
end
|
@@ -245,11 +244,11 @@ module HeimdallTools
|
|
245
244
|
# NIST tag
|
246
245
|
result['nist'] = []
|
247
246
|
default_mapping_match = @default_mapping[config_rule[:config_rule_name]]
|
248
|
-
|
247
|
+
|
249
248
|
result['nist'] += default_mapping_match[1].split('|') unless default_mapping_match.nil?
|
250
249
|
|
251
250
|
custom_mapping_match = @custom_mapping[config_rule[:config_rule_name]]
|
252
|
-
|
251
|
+
|
253
252
|
result['nist'] += custom_mapping_match[1].split('|').map { |name| "#{name} (user provided)" } unless custom_mapping_match.nil?
|
254
253
|
|
255
254
|
result['nist'] = ['unmapped'] if result['nist'].empty?
|
@@ -258,8 +257,10 @@ module HeimdallTools
|
|
258
257
|
end
|
259
258
|
|
260
259
|
def check_text(config_rule)
|
261
|
-
|
262
|
-
|
260
|
+
# If no input parameters, then provide an empty JSON array to the JSON
|
261
|
+
# parser because passing nil to JSON.parse throws an exception.
|
262
|
+
params = (JSON.parse(config_rule[:input_parameters] || '[]').map { |key, value| "#{key}: #{value}" }).join('<br/>')
|
263
|
+
check_text = config_rule[:config_rule_arn] || ''
|
263
264
|
check_text += "<br/>#{params}" unless params.empty?
|
264
265
|
check_text
|
265
266
|
end
|
@@ -274,11 +275,10 @@ module HeimdallTools
|
|
274
275
|
def hdf_descriptions(config_rule)
|
275
276
|
[
|
276
277
|
{
|
277
|
-
|
278
|
-
|
279
|
-
}
|
278
|
+
label: 'check',
|
279
|
+
data: check_text(config_rule)
|
280
|
+
},
|
280
281
|
]
|
281
282
|
end
|
282
283
|
end
|
283
284
|
end
|
284
|
-
# rubocop:enable Metrics/AbcSize, Metrics/ClassLength
|
@@ -16,13 +16,11 @@ IMPACT_MAPPING = {
|
|
16
16
|
|
17
17
|
CWE_REGEX = 'CWE-(\d*):'.freeze
|
18
18
|
|
19
|
-
DEFAULT_NIST_TAG =
|
20
|
-
|
21
|
-
# rubocop:disable Metrics/AbcSize
|
19
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5 Rev_4}.freeze
|
22
20
|
|
23
21
|
module HeimdallTools
|
24
22
|
class BurpSuiteMapper
|
25
|
-
def initialize(burps_xml,
|
23
|
+
def initialize(burps_xml, _name = nil, verbose = false)
|
26
24
|
@burps_xml = burps_xml
|
27
25
|
@verbose = verbose
|
28
26
|
|
@@ -33,11 +31,9 @@ module HeimdallTools
|
|
33
31
|
@issues = data['issues']['issue']
|
34
32
|
@burpVersion = data['issues']['burpVersion']
|
35
33
|
@timestamp = data['issues']['exportTime']
|
36
|
-
|
37
34
|
rescue StandardError => e
|
38
35
|
raise "Invalid Burpsuite XML file provided Exception: #{e}"
|
39
36
|
end
|
40
|
-
|
41
37
|
end
|
42
38
|
|
43
39
|
def parse_html(block)
|
@@ -63,7 +59,7 @@ module HeimdallTools
|
|
63
59
|
end
|
64
60
|
|
65
61
|
def nist_tag(cweid)
|
66
|
-
entries = @cwe_nist_mapping.select { |x| cweid.include?
|
62
|
+
entries = @cwe_nist_mapping.select { |x| cweid.include?(x[:cweid].to_s) && !x[:nistid].nil? }
|
67
63
|
tags = entries.map { |x| [x[:nistid], "Rev_#{x[:rev]}"] }
|
68
64
|
tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
69
65
|
end
|
@@ -86,17 +82,17 @@ module HeimdallTools
|
|
86
82
|
end
|
87
83
|
|
88
84
|
def desc_tags(data, label)
|
89
|
-
{
|
85
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
90
86
|
end
|
91
87
|
|
92
88
|
# Burpsuite report could have multiple issue entries for multiple findings of same issue type.
|
93
|
-
# The meta data is identical across entries
|
89
|
+
# The meta data is identical across entries
|
94
90
|
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
95
91
|
def collapse_duplicates(controls)
|
96
92
|
unique_controls = []
|
97
93
|
|
98
94
|
controls.map { |x| x['id'] }.uniq.each do |id|
|
99
|
-
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
95
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
100
96
|
unique_control = controls.find { |x| x['id'].eql?(id) }
|
101
97
|
unique_control['results'] = collapsed_results.flatten
|
102
98
|
unique_controls << unique_control
|
@@ -129,8 +125,8 @@ module HeimdallTools
|
|
129
125
|
controls = collapse_duplicates(controls)
|
130
126
|
results = HeimdallDataFormat.new(profile_name: 'BurpSuite Pro Scan',
|
131
127
|
version: @burpVersion,
|
132
|
-
title:
|
133
|
-
summary:
|
128
|
+
title: 'BurpSuite Pro Scan',
|
129
|
+
summary: 'BurpSuite Pro Scan',
|
134
130
|
controls: controls)
|
135
131
|
results.to_hdf
|
136
132
|
end
|
data/lib/heimdall_tools/cli.rb
CHANGED
@@ -54,11 +54,10 @@ module HeimdallTools
|
|
54
54
|
hdfs = HeimdallTools::NessusMapper.new(File.read(options[:xml])).to_hdf
|
55
55
|
|
56
56
|
puts "\nHDF Generated:"
|
57
|
-
hdfs.
|
57
|
+
hdfs.each_key do |host|
|
58
58
|
File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
|
59
59
|
puts "#{options[:output_prefix]}-#{host}.json"
|
60
60
|
end
|
61
|
-
|
62
61
|
end
|
63
62
|
|
64
63
|
desc 'snyk_mapper', 'snyk_mapper translates Snyk results Json to HDF format Json be viewed on Heimdall'
|
@@ -69,7 +68,7 @@ module HeimdallTools
|
|
69
68
|
def snyk_mapper
|
70
69
|
hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
|
71
70
|
puts "\r\HDF Generated:\n"
|
72
|
-
hdfs.
|
71
|
+
hdfs.each_key do |host|
|
73
72
|
File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
|
74
73
|
puts "#{options[:output_prefix]}-#{host}.json"
|
75
74
|
end
|
@@ -84,7 +83,7 @@ module HeimdallTools
|
|
84
83
|
hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
|
85
84
|
File.write(options[:output], hdf)
|
86
85
|
puts "\r\HDF Generated:\n"
|
87
|
-
puts
|
86
|
+
puts options[:output].to_s
|
88
87
|
end
|
89
88
|
|
90
89
|
desc 'jfrog_xray_mapper', 'jfrog_xray_mapper translates Jfrog Xray results Json to HDF format Json be viewed on Heimdall'
|
@@ -96,9 +95,9 @@ module HeimdallTools
|
|
96
95
|
hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
|
97
96
|
File.write(options[:output], hdf)
|
98
97
|
puts "\r\HDF Generated:\n"
|
99
|
-
puts
|
98
|
+
puts options[:output].to_s
|
100
99
|
end
|
101
|
-
|
100
|
+
|
102
101
|
desc 'dbprotect_mapper', 'dbprotect_mapper translates dbprotect results xml to HDF format Json be viewed on Heimdall'
|
103
102
|
long_desc Help.text(:dbprotect_mapper)
|
104
103
|
option :xml, required: true, aliases: '-x'
|
@@ -108,7 +107,7 @@ module HeimdallTools
|
|
108
107
|
hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
|
109
108
|
File.write(options[:output], hdf)
|
110
109
|
puts "\r\HDF Generated:\n"
|
111
|
-
puts
|
110
|
+
puts options[:output].to_s
|
112
111
|
end
|
113
112
|
|
114
113
|
desc 'aws_config_mapper', 'aws_config_mapper pulls Ruby AWS SDK data to translate AWS Config Rule results into HDF format Json to be viewable in Heimdall'
|
@@ -120,7 +119,19 @@ module HeimdallTools
|
|
120
119
|
hdf = HeimdallTools::AwsConfigMapper.new(options[:custom_mapping]).to_hdf
|
121
120
|
File.write(options[:output], hdf)
|
122
121
|
puts "\r\HDF Generated:\n"
|
123
|
-
puts
|
122
|
+
puts options[:output].to_s
|
123
|
+
end
|
124
|
+
|
125
|
+
desc 'netsparker_mapper', 'netsparker_mapper translates netsparker enterprise results xml to HDF format Json be viewed on Heimdall'
|
126
|
+
long_desc Help.text(:netsparker_mapper)
|
127
|
+
option :xml, required: true, aliases: '-x'
|
128
|
+
option :output, required: true, aliases: '-o'
|
129
|
+
option :verbose, type: :boolean, aliases: '-V'
|
130
|
+
def netsparker_mapper
|
131
|
+
hdf = HeimdallTools::NetsparkerMapper.new(File.read(options[:xml])).to_hdf
|
132
|
+
File.write(options[:output], hdf)
|
133
|
+
puts "\r\HDF Generated:\n"
|
134
|
+
puts options[:output].to_s
|
124
135
|
end
|
125
136
|
|
126
137
|
desc 'version', 'prints version'
|
@@ -10,21 +10,17 @@ IMPACT_MAPPING = {
|
|
10
10
|
Informational: 0.0
|
11
11
|
}.freeze
|
12
12
|
|
13
|
-
# rubocop:disable Metrics/AbcSize
|
14
|
-
|
15
13
|
module HeimdallTools
|
16
14
|
class DBProtectMapper
|
17
|
-
def initialize(xml,
|
15
|
+
def initialize(xml, _name = nil, verbose = false)
|
18
16
|
@verbose = verbose
|
19
17
|
|
20
18
|
begin
|
21
19
|
dataset = xml_to_hash(xml)
|
22
20
|
@entries = compile_findings(dataset['dataset'])
|
23
|
-
|
24
21
|
rescue StandardError => e
|
25
22
|
raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
|
26
23
|
end
|
27
|
-
|
28
24
|
end
|
29
25
|
|
30
26
|
def to_hdf
|
@@ -46,7 +42,7 @@ module HeimdallTools
|
|
46
42
|
end
|
47
43
|
controls = collapse_duplicates(controls)
|
48
44
|
results = HeimdallDataFormat.new(profile_name: @entries.first['Policy'],
|
49
|
-
version:
|
45
|
+
version: '',
|
50
46
|
title: @entries.first['Job Name'],
|
51
47
|
summary: format_summary(@entries.first),
|
52
48
|
controls: controls)
|
@@ -56,16 +52,15 @@ module HeimdallTools
|
|
56
52
|
private
|
57
53
|
|
58
54
|
def compile_findings(dataset)
|
59
|
-
keys = dataset['metadata']['item'].map{ |e| e['name']}
|
60
|
-
|
61
|
-
findings
|
55
|
+
keys = dataset['metadata']['item'].map { |e| e['name'] }
|
56
|
+
dataset['data']['row'].map { |e| keys.zip(e['value']).to_h }
|
62
57
|
end
|
63
58
|
|
64
59
|
def format_desc(entry)
|
65
60
|
text = []
|
66
61
|
text << "Task : #{entry['Task']}"
|
67
62
|
text << "Check Category : #{entry['Check Category']}"
|
68
|
-
text.join(
|
63
|
+
text.join('; ')
|
69
64
|
end
|
70
65
|
|
71
66
|
def format_summary(entry)
|
@@ -90,14 +85,12 @@ module HeimdallTools
|
|
90
85
|
finding['status'] = 'skipped'
|
91
86
|
when 'Failed'
|
92
87
|
finding['status'] = 'failed'
|
93
|
-
finding['backtrace'] = [
|
88
|
+
finding['backtrace'] = ['DB Protect Failed Check']
|
94
89
|
when 'Finding'
|
95
90
|
finding['status'] = 'failed'
|
96
91
|
when 'Not A Finding'
|
97
92
|
finding['status'] = 'passed'
|
98
|
-
|
99
|
-
finding['status'] = 'skipped'
|
100
|
-
else
|
93
|
+
else
|
101
94
|
finding['status'] = 'skipped'
|
102
95
|
end
|
103
96
|
[finding]
|
@@ -108,20 +101,18 @@ module HeimdallTools
|
|
108
101
|
end
|
109
102
|
|
110
103
|
# DBProtect report could have multiple issue entries for multiple findings of same issue type.
|
111
|
-
# The meta data is identical across entries
|
104
|
+
# The meta data is identical across entries
|
112
105
|
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
113
106
|
def collapse_duplicates(controls)
|
114
107
|
unique_controls = []
|
115
108
|
|
116
109
|
controls.map { |x| x['id'] }.uniq.each do |id|
|
117
|
-
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
110
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
118
111
|
unique_control = controls.find { |x| x['id'].eql?(id) }
|
119
112
|
unique_control['results'] = collapsed_results.flatten
|
120
113
|
unique_controls << unique_control
|
121
114
|
end
|
122
115
|
unique_controls
|
123
116
|
end
|
124
|
-
|
125
|
-
|
126
117
|
end
|
127
118
|
end
|
@@ -3,7 +3,7 @@ require 'heimdall_tools/hdf'
|
|
3
3
|
require 'utilities/xml_to_hash'
|
4
4
|
|
5
5
|
NIST_REFERENCE_NAME = 'Standards Mapping - NIST Special Publication 800-53 Revision 4'.freeze
|
6
|
-
DEFAULT_NIST_TAG =
|
6
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
7
7
|
|
8
8
|
module HeimdallTools
|
9
9
|
class FortifyMapper
|
@@ -19,7 +19,6 @@ module HeimdallTools
|
|
19
19
|
@rules = data['FVDL']['Description']
|
20
20
|
@uuid = data['FVDL']['UUID']
|
21
21
|
@fortify_version = data['FVDL']['EngineData']['EngineVersion']
|
22
|
-
|
23
22
|
rescue StandardError => e
|
24
23
|
raise "Invalid Fortify FVDL file provided Exception: #{e}"
|
25
24
|
end
|
data/lib/heimdall_tools/hdf.rb
CHANGED
@@ -2,15 +2,14 @@ require 'json'
|
|
2
2
|
require 'heimdall_tools/version'
|
3
3
|
require 'openssl'
|
4
4
|
|
5
|
-
NA_STRING =
|
6
|
-
NA_TAG = nil
|
5
|
+
NA_STRING = ''.freeze
|
6
|
+
NA_TAG = nil
|
7
7
|
NA_ARRAY = [].freeze
|
8
8
|
NA_HASH = {}.freeze
|
9
|
-
NA_FLOAT = 0.0
|
9
|
+
NA_FLOAT = 0.0
|
10
10
|
|
11
11
|
PLATFORM_NAME = 'Heimdall Tools'.freeze
|
12
12
|
|
13
|
-
|
14
13
|
module HeimdallTools
|
15
14
|
class HeimdallDataFormat
|
16
15
|
def initialize(profile_name: NA_TAG,
|
@@ -60,7 +59,7 @@ module HeimdallTools
|
|
60
59
|
profile_block['groups'] = groups
|
61
60
|
profile_block['status'] = status
|
62
61
|
profile_block['controls'] = controls
|
63
|
-
profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).
|
62
|
+
profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).unpack1('H*')
|
64
63
|
@results_json['profiles'] << profile_block
|
65
64
|
end
|
66
65
|
|