heimdall_tools 1.3.40 → 1.3.41

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1a306a3ebf9a2755760b9cc825f1123a62291723d92d215c03d3e9d958d22497
4
- data.tar.gz: 5e551876a20872c32126a6c96d08e3c6cd6acdf73bc31abf1ba918693764d4e9
3
+ metadata.gz: 6e2861d76ab4ec09b9a409e934365fdc47935378f99ceeca94ccdf7c1931661d
4
+ data.tar.gz: f5bc5a6c7339da985cc825216fbb817b070dc94ae31a568955a39c822163a07a
5
5
  SHA512:
6
- metadata.gz: 4ed8a026a5fbbd63d3da4ebb4211d8ee8cc371ae29e57f4deb10f9e188491af29b2988e17524aa5ab4c064f62a44349edfa9b626641c89d6a958ff040df39e40
7
- data.tar.gz: 2874bbd8f062e601ed1fb65b53ec53ee5267d7c17457cef91f936dd28936bac6f35b4387c3d452c301c593cdb13e01ea6d587c1885a88b1a6f025ac3ee38bdaa
6
+ metadata.gz: 53c34b8edc9cd713ee7ae481d6c27c1213ed371a800d1c0e728ae4bfe196561fc3128349547ee839521967fdc96da9478115834541faeba2a83a07cfba1de1a8
7
+ data.tar.gz: 2f0fb2011e3caa5a9b31fbfe44bc6069284bd35700af2fc6e20a236a2532f328db320c6bd0283930e22ac73d87929ad7dc9306ca5e315fa52c6a7f477b7b6e84
@@ -13,10 +13,9 @@ INSUFFICIENT_DATA_MSG = 'Not enough data has been collectd to determine complian
13
13
  ##
14
14
  # HDF mapper for use with AWS Config rules.
15
15
  #
16
- # Ruby AWS Ruby SDK for ConfigService:
16
+ # Ruby AWS Ruby SDK for ConfigService:
17
17
  # - https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/ConfigService/Client.html
18
18
  #
19
- # rubocop:disable Metrics/AbcSize, Metrics/ClassLength
20
19
  module HeimdallTools
21
20
  class AwsConfigMapper
22
21
  def initialize(custom_mapping, verbose = false)
@@ -57,8 +56,8 @@ module HeimdallTools
57
56
  title: 'AWS Config',
58
57
  summary: 'AWS Config',
59
58
  controls: controls,
60
- statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION }
61
- )
59
+ statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION },
60
+ )
62
61
  results.to_hdf
63
62
  end
64
63
 
@@ -67,12 +66,12 @@ module HeimdallTools
67
66
  ##
68
67
  # Read in a config rule -> 800-53 control mapping CSV.
69
68
  #
70
- # Params:
69
+ # Params:
71
70
  # - path: The file path to the CSV file
72
71
  #
73
72
  # Returns: A mapped version of the csv in the format { rule_name: row, ... }
74
73
  def get_rule_mapping(path)
75
- Hash[CSV.read(path, headers: true).map { |row| [row[0], row] }]
74
+ CSV.read(path, headers: true).map { |row| [row[0], row] }.to_h
76
75
  end
77
76
 
78
77
  ##
@@ -142,7 +141,7 @@ module HeimdallTools
142
141
  end
143
142
 
144
143
  # Map based on name for easy lookup
145
- Hash[compliance_results.collect { |r| [r.config_rule_name, r.to_h] }]
144
+ compliance_results.collect { |r| [r.config_rule_name, r.to_h] }.to_h
146
145
  end
147
146
 
148
147
  ##
@@ -192,7 +191,7 @@ module HeimdallTools
192
191
  (result[:result_recorded_time] - result[:config_rule_invoked_time]).round(6)
193
192
  end
194
193
  # status
195
- hdf_result['status'] = case result.dig(:compliance_type)
194
+ hdf_result['status'] = case result[:compliance_type]
196
195
  when 'COMPLIANT'
197
196
  'passed'
198
197
  when 'NON_COMPLIANT'
@@ -209,19 +208,19 @@ module HeimdallTools
209
208
  when 'NOT_APPLICABLE'
210
209
  rule[:impact] = 0
211
210
  rule[:results] << {
212
- 'run_time': 0,
213
- 'code_desc': NOT_APPLICABLE_MSG,
214
- 'skip_message': NOT_APPLICABLE_MSG,
215
- 'start_time': DateTime.now.strftime('%Y-%m-%dT%H:%M:%S%:z'),
216
- 'status': 'skipped'
211
+ run_time: 0,
212
+ code_desc: NOT_APPLICABLE_MSG,
213
+ skip_message: NOT_APPLICABLE_MSG,
214
+ start_time: DateTime.now.strftime('%Y-%m-%dT%H:%M:%S%:z'),
215
+ status: 'skipped'
217
216
  }
218
217
  when 'INSUFFICIENT_DATA'
219
218
  rule[:results] << {
220
- 'run_time': 0,
221
- 'code_desc': INSUFFICIENT_DATA_MSG,
222
- 'skip_message': INSUFFICIENT_DATA_MSG,
223
- 'start_time': DateTime.now.strftime('%Y-%m-%dT%H:%M:%S%:z'),
224
- 'status': 'skipped'
219
+ run_time: 0,
220
+ code_desc: INSUFFICIENT_DATA_MSG,
221
+ skip_message: INSUFFICIENT_DATA_MSG,
222
+ start_time: DateTime.now.strftime('%Y-%m-%dT%H:%M:%S%:z'),
223
+ status: 'skipped'
225
224
  }
226
225
  end
227
226
  end
@@ -245,11 +244,11 @@ module HeimdallTools
245
244
  # NIST tag
246
245
  result['nist'] = []
247
246
  default_mapping_match = @default_mapping[config_rule[:config_rule_name]]
248
-
247
+
249
248
  result['nist'] += default_mapping_match[1].split('|') unless default_mapping_match.nil?
250
249
 
251
250
  custom_mapping_match = @custom_mapping[config_rule[:config_rule_name]]
252
-
251
+
253
252
  result['nist'] += custom_mapping_match[1].split('|').map { |name| "#{name} (user provided)" } unless custom_mapping_match.nil?
254
253
 
255
254
  result['nist'] = ['unmapped'] if result['nist'].empty?
@@ -258,8 +257,10 @@ module HeimdallTools
258
257
  end
259
258
 
260
259
  def check_text(config_rule)
261
- params = (JSON.parse(config_rule[:input_parameters]).map { |key, value| "#{key}: #{value}" }).join('<br/>')
262
- check_text = config_rule[:config_rule_arn]
260
+ # If no input parameters, then provide an empty JSON array to the JSON
261
+ # parser because passing nil to JSON.parse throws an exception.
262
+ params = (JSON.parse(config_rule[:input_parameters] || '[]').map { |key, value| "#{key}: #{value}" }).join('<br/>')
263
+ check_text = config_rule[:config_rule_arn] || ''
263
264
  check_text += "<br/>#{params}" unless params.empty?
264
265
  check_text
265
266
  end
@@ -274,11 +275,10 @@ module HeimdallTools
274
275
  def hdf_descriptions(config_rule)
275
276
  [
276
277
  {
277
- 'label': 'check',
278
- 'data': check_text(config_rule)
279
- }
278
+ label: 'check',
279
+ data: check_text(config_rule)
280
+ },
280
281
  ]
281
282
  end
282
283
  end
283
284
  end
284
- # rubocop:enable Metrics/AbcSize, Metrics/ClassLength
@@ -16,13 +16,11 @@ IMPACT_MAPPING = {
16
16
 
17
17
  CWE_REGEX = 'CWE-(\d*):'.freeze
18
18
 
19
- DEFAULT_NIST_TAG = ["SA-11", "RA-5", "Rev_4"].freeze
20
-
21
- # rubocop:disable Metrics/AbcSize
19
+ DEFAULT_NIST_TAG = %w{SA-11 RA-5 Rev_4}.freeze
22
20
 
23
21
  module HeimdallTools
24
22
  class BurpSuiteMapper
25
- def initialize(burps_xml, name=nil, verbose = false)
23
+ def initialize(burps_xml, _name = nil, verbose = false)
26
24
  @burps_xml = burps_xml
27
25
  @verbose = verbose
28
26
 
@@ -33,11 +31,9 @@ module HeimdallTools
33
31
  @issues = data['issues']['issue']
34
32
  @burpVersion = data['issues']['burpVersion']
35
33
  @timestamp = data['issues']['exportTime']
36
-
37
34
  rescue StandardError => e
38
35
  raise "Invalid Burpsuite XML file provided Exception: #{e}"
39
36
  end
40
-
41
37
  end
42
38
 
43
39
  def parse_html(block)
@@ -86,17 +82,17 @@ module HeimdallTools
86
82
  end
87
83
 
88
84
  def desc_tags(data, label)
89
- { "data": data || NA_STRING, "label": label || NA_STRING }
85
+ { data: data || NA_STRING, label: label || NA_STRING }
90
86
  end
91
87
 
92
88
  # Burpsuite report could have multiple issue entries for multiple findings of same issue type.
93
- # The meta data is identical across entries
89
+ # The meta data is identical across entries
94
90
  # method collapse_duplicates return unique controls with applicable findings collapsed into it.
95
91
  def collapse_duplicates(controls)
96
92
  unique_controls = []
97
93
 
98
94
  controls.map { |x| x['id'] }.uniq.each do |id|
99
- collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
95
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
100
96
  unique_control = controls.find { |x| x['id'].eql?(id) }
101
97
  unique_control['results'] = collapsed_results.flatten
102
98
  unique_controls << unique_control
@@ -129,8 +125,8 @@ module HeimdallTools
129
125
  controls = collapse_duplicates(controls)
130
126
  results = HeimdallDataFormat.new(profile_name: 'BurpSuite Pro Scan',
131
127
  version: @burpVersion,
132
- title: "BurpSuite Pro Scan",
133
- summary: "BurpSuite Pro Scan",
128
+ title: 'BurpSuite Pro Scan',
129
+ summary: 'BurpSuite Pro Scan',
134
130
  controls: controls)
135
131
  results.to_hdf
136
132
  end
@@ -54,11 +54,10 @@ module HeimdallTools
54
54
  hdfs = HeimdallTools::NessusMapper.new(File.read(options[:xml])).to_hdf
55
55
 
56
56
  puts "\nHDF Generated:"
57
- hdfs.keys.each do | host |
57
+ hdfs.each_key do |host|
58
58
  File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
59
59
  puts "#{options[:output_prefix]}-#{host}.json"
60
60
  end
61
-
62
61
  end
63
62
 
64
63
  desc 'snyk_mapper', 'snyk_mapper translates Snyk results Json to HDF format Json be viewed on Heimdall'
@@ -69,7 +68,7 @@ module HeimdallTools
69
68
  def snyk_mapper
70
69
  hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
71
70
  puts "\r\HDF Generated:\n"
72
- hdfs.keys.each do | host |
71
+ hdfs.each_key do |host|
73
72
  File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
74
73
  puts "#{options[:output_prefix]}-#{host}.json"
75
74
  end
@@ -84,7 +83,7 @@ module HeimdallTools
84
83
  hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
85
84
  File.write(options[:output], hdf)
86
85
  puts "\r\HDF Generated:\n"
87
- puts "#{options[:output]}"
86
+ puts options[:output].to_s
88
87
  end
89
88
 
90
89
  desc 'jfrog_xray_mapper', 'jfrog_xray_mapper translates Jfrog Xray results Json to HDF format Json be viewed on Heimdall'
@@ -96,9 +95,9 @@ module HeimdallTools
96
95
  hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
97
96
  File.write(options[:output], hdf)
98
97
  puts "\r\HDF Generated:\n"
99
- puts "#{options[:output]}"
98
+ puts options[:output].to_s
100
99
  end
101
-
100
+
102
101
  desc 'dbprotect_mapper', 'dbprotect_mapper translates dbprotect results xml to HDF format Json be viewed on Heimdall'
103
102
  long_desc Help.text(:dbprotect_mapper)
104
103
  option :xml, required: true, aliases: '-x'
@@ -108,7 +107,7 @@ module HeimdallTools
108
107
  hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
109
108
  File.write(options[:output], hdf)
110
109
  puts "\r\HDF Generated:\n"
111
- puts "#{options[:output]}"
110
+ puts options[:output].to_s
112
111
  end
113
112
 
114
113
  desc 'aws_config_mapper', 'aws_config_mapper pulls Ruby AWS SDK data to translate AWS Config Rule results into HDF format Json to be viewable in Heimdall'
@@ -120,9 +119,9 @@ module HeimdallTools
120
119
  hdf = HeimdallTools::AwsConfigMapper.new(options[:custom_mapping]).to_hdf
121
120
  File.write(options[:output], hdf)
122
121
  puts "\r\HDF Generated:\n"
123
- puts "#{options[:output]}"
122
+ puts options[:output].to_s
124
123
  end
125
-
124
+
126
125
  desc 'netsparker_mapper', 'netsparker_mapper translates netsparker enterprise results xml to HDF format Json be viewed on Heimdall'
127
126
  long_desc Help.text(:netsparker_mapper)
128
127
  option :xml, required: true, aliases: '-x'
@@ -132,7 +131,7 @@ module HeimdallTools
132
131
  hdf = HeimdallTools::NetsparkerMapper.new(File.read(options[:xml])).to_hdf
133
132
  File.write(options[:output], hdf)
134
133
  puts "\r\HDF Generated:\n"
135
- puts "#{options[:output]}"
134
+ puts options[:output].to_s
136
135
  end
137
136
 
138
137
  desc 'version', 'prints version'
@@ -3,8 +3,6 @@ require 'thor'
3
3
  # Override thor's long_desc identation behavior
4
4
  # https://github.com/erikhuda/thor/issues/398
5
5
 
6
- # rubocop:disable Naming/UncommunicativeMethodParamName
7
-
8
6
  class Thor
9
7
  module Shell
10
8
  class Basic
@@ -10,21 +10,17 @@ IMPACT_MAPPING = {
10
10
  Informational: 0.0
11
11
  }.freeze
12
12
 
13
- # rubocop:disable Metrics/AbcSize
14
-
15
13
  module HeimdallTools
16
14
  class DBProtectMapper
17
- def initialize(xml, name=nil, verbose = false)
15
+ def initialize(xml, _name = nil, verbose = false)
18
16
  @verbose = verbose
19
17
 
20
18
  begin
21
19
  dataset = xml_to_hash(xml)
22
20
  @entries = compile_findings(dataset['dataset'])
23
-
24
21
  rescue StandardError => e
25
22
  raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
26
23
  end
27
-
28
24
  end
29
25
 
30
26
  def to_hdf
@@ -46,7 +42,7 @@ module HeimdallTools
46
42
  end
47
43
  controls = collapse_duplicates(controls)
48
44
  results = HeimdallDataFormat.new(profile_name: @entries.first['Policy'],
49
- version: "",
45
+ version: '',
50
46
  title: @entries.first['Job Name'],
51
47
  summary: format_summary(@entries.first),
52
48
  controls: controls)
@@ -56,16 +52,15 @@ module HeimdallTools
56
52
  private
57
53
 
58
54
  def compile_findings(dataset)
59
- keys = dataset['metadata']['item'].map{ |e| e['name']}
60
- findings = dataset['data']['row'].map { |e| Hash[keys.zip(e['value'])] }
61
- findings
55
+ keys = dataset['metadata']['item'].map { |e| e['name'] }
56
+ dataset['data']['row'].map { |e| keys.zip(e['value']).to_h }
62
57
  end
63
58
 
64
59
  def format_desc(entry)
65
60
  text = []
66
61
  text << "Task : #{entry['Task']}"
67
62
  text << "Check Category : #{entry['Check Category']}"
68
- text.join("; ")
63
+ text.join('; ')
69
64
  end
70
65
 
71
66
  def format_summary(entry)
@@ -90,14 +85,12 @@ module HeimdallTools
90
85
  finding['status'] = 'skipped'
91
86
  when 'Failed'
92
87
  finding['status'] = 'failed'
93
- finding['backtrace'] = ["DB Protect Failed Check"]
88
+ finding['backtrace'] = ['DB Protect Failed Check']
94
89
  when 'Finding'
95
90
  finding['status'] = 'failed'
96
91
  when 'Not A Finding'
97
92
  finding['status'] = 'passed'
98
- when 'Skipped'
99
- finding['status'] = 'skipped'
100
- else
93
+ else
101
94
  finding['status'] = 'skipped'
102
95
  end
103
96
  [finding]
@@ -108,20 +101,18 @@ module HeimdallTools
108
101
  end
109
102
 
110
103
  # DBProtect report could have multiple issue entries for multiple findings of same issue type.
111
- # The meta data is identical across entries
104
+ # The meta data is identical across entries
112
105
  # method collapse_duplicates return unique controls with applicable findings collapsed into it.
113
106
  def collapse_duplicates(controls)
114
107
  unique_controls = []
115
108
 
116
109
  controls.map { |x| x['id'] }.uniq.each do |id|
117
- collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
110
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
118
111
  unique_control = controls.find { |x| x['id'].eql?(id) }
119
112
  unique_control['results'] = collapsed_results.flatten
120
113
  unique_controls << unique_control
121
114
  end
122
115
  unique_controls
123
116
  end
124
-
125
-
126
117
  end
127
118
  end
@@ -3,7 +3,7 @@ require 'heimdall_tools/hdf'
3
3
  require 'utilities/xml_to_hash'
4
4
 
5
5
  NIST_REFERENCE_NAME = 'Standards Mapping - NIST Special Publication 800-53 Revision 4'.freeze
6
- DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
6
+ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
7
7
 
8
8
  module HeimdallTools
9
9
  class FortifyMapper
@@ -19,7 +19,6 @@ module HeimdallTools
19
19
  @rules = data['FVDL']['Description']
20
20
  @uuid = data['FVDL']['UUID']
21
21
  @fortify_version = data['FVDL']['EngineData']['EngineVersion']
22
-
23
22
  rescue StandardError => e
24
23
  raise "Invalid Fortify FVDL file provided Exception: #{e}"
25
24
  end
@@ -2,15 +2,14 @@ require 'json'
2
2
  require 'heimdall_tools/version'
3
3
  require 'openssl'
4
4
 
5
- NA_STRING = "".freeze
6
- NA_TAG = nil.freeze
5
+ NA_STRING = ''.freeze
6
+ NA_TAG = nil
7
7
  NA_ARRAY = [].freeze
8
8
  NA_HASH = {}.freeze
9
- NA_FLOAT = 0.0.freeze
9
+ NA_FLOAT = 0.0
10
10
 
11
11
  PLATFORM_NAME = 'Heimdall Tools'.freeze
12
12
 
13
-
14
13
  module HeimdallTools
15
14
  class HeimdallDataFormat
16
15
  def initialize(profile_name: NA_TAG,
@@ -60,7 +59,7 @@ module HeimdallTools
60
59
  profile_block['groups'] = groups
61
60
  profile_block['status'] = status
62
61
  profile_block['controls'] = controls
63
- profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).unpack("H*")[0]
62
+ profile_block['sha256'] = OpenSSL::Digest::SHA256.digest(profile_block.to_s).unpack1('H*')
64
63
  @results_json['profiles'] << profile_block
65
64
  end
66
65
 
@@ -10,10 +10,10 @@ CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
10
10
  IMPACT_MAPPING = {
11
11
  high: 0.7,
12
12
  medium: 0.5,
13
- low: 0.3,
13
+ low: 0.3
14
14
  }.freeze
15
15
 
16
- DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
16
+ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
17
17
 
18
18
  # Loading spinner sign
19
19
  $spinner = Enumerator.new do |e|
@@ -27,14 +27,13 @@ end
27
27
 
28
28
  module HeimdallTools
29
29
  class JfrogXrayMapper
30
- def initialize(xray_json, name=nil, verbose = false)
30
+ def initialize(xray_json, _name = nil, verbose = false)
31
31
  @xray_json = xray_json
32
32
  @verbose = verbose
33
33
 
34
34
  begin
35
35
  @cwe_nist_mapping = parse_mapper
36
36
  @project = JSON.parse(xray_json)
37
-
38
37
  rescue StandardError => e
39
38
  raise "Invalid JFrog Xray JSON file provided Exception: #{e}"
40
39
  end
@@ -44,11 +43,11 @@ module HeimdallTools
44
43
  finding = {}
45
44
  finding['status'] = 'failed'
46
45
  finding['code_desc'] = []
47
- finding['code_desc'] << "source_comp_id : #{vulnerability['source_comp_id'].to_s }"
48
- finding['code_desc'] << "vulnerable_versions : #{vulnerability['component_versions']['vulnerable_versions'].to_s }"
49
- finding['code_desc'] << "fixed_versions : #{vulnerability['component_versions']['fixed_versions'].to_s }"
50
- finding['code_desc'] << "issue_type : #{vulnerability['issue_type'].to_s }"
51
- finding['code_desc'] << "provider : #{vulnerability['provider'].to_s }"
46
+ finding['code_desc'] << "source_comp_id : #{vulnerability['source_comp_id']}"
47
+ finding['code_desc'] << "vulnerable_versions : #{vulnerability['component_versions']['vulnerable_versions']}"
48
+ finding['code_desc'] << "fixed_versions : #{vulnerability['component_versions']['fixed_versions']}"
49
+ finding['code_desc'] << "issue_type : #{vulnerability['issue_type']}"
50
+ finding['code_desc'] << "provider : #{vulnerability['provider']}"
52
51
  finding['code_desc'] = finding['code_desc'].join("\n")
53
52
  finding['run_time'] = NA_FLOAT
54
53
 
@@ -61,8 +60,8 @@ module HeimdallTools
61
60
  text = []
62
61
  info = vulnerability['component_versions']['more_details']
63
62
  text << info['description'].to_s
64
- text << "cves: #{info['cves'].to_s }" unless info['cves'].nil?
65
- text.join("<br>")
63
+ text << "cves: #{info['cves']}" unless info['cves'].nil?
64
+ text.join('<br>')
66
65
  end
67
66
 
68
67
  def nist_tag(cweid)
@@ -73,9 +72,9 @@ module HeimdallTools
73
72
 
74
73
  def parse_identifiers(vulnerability, ref)
75
74
  # Extracting id number from reference style CWE-297
76
- vulnerability['component_versions']['more_details']['cves'][0][ref.downcase].map { |e| e.split("#{ref}-")[1] }
77
- rescue
78
- return []
75
+ vulnerability['component_versions']['more_details']['cves'][0][ref.downcase].map { |e| e.split("#{ref}-")[1] }
76
+ rescue StandardError
77
+ []
79
78
  end
80
79
 
81
80
  def impact(severity)
@@ -91,17 +90,17 @@ module HeimdallTools
91
90
  end
92
91
 
93
92
  def desc_tags(data, label)
94
- { "data": data || NA_STRING, "label": label || NA_STRING }
93
+ { data: data || NA_STRING, label: label || NA_STRING }
95
94
  end
96
95
 
97
96
  # Xray report could have multiple vulnerability entries for multiple findings of same issue type.
98
- # The meta data is identical across entries
97
+ # The meta data is identical across entries
99
98
  # method collapse_duplicates return unique controls with applicable findings collapsed into it.
100
99
  def collapse_duplicates(controls)
101
100
  unique_controls = []
102
101
 
103
102
  controls.map { |x| x['id'] }.uniq.each do |id|
104
- collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
103
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
105
104
  unique_control = controls.find { |x| x['id'].eql?(id) }
106
105
  unique_control['results'] = collapsed_results.flatten
107
106
  unique_controls << unique_control
@@ -112,9 +111,9 @@ module HeimdallTools
112
111
  def to_hdf
113
112
  controls = []
114
113
  vulnerability_count = 0
115
- @project['data'].uniq.each do | vulnerability |
114
+ @project['data'].uniq.each do |vulnerability|
116
115
  printf("\rProcessing: %s", $spinner.next)
117
-
116
+
118
117
  vulnerability_count +=1
119
118
  item = {}
120
119
  item['tags'] = {}
@@ -123,26 +122,26 @@ module HeimdallTools
123
122
  item['source_location'] = NA_HASH
124
123
  item['descriptions'] = NA_ARRAY
125
124
 
126
- # Xray JSONs might note have `id` fields populated.
125
+ # Xray JSONs might note have `id` fields populated.
127
126
  # If thats a case MD5 hash is used to collapse vulnerability findings of the same type.
128
- item['id'] = vulnerability['id'].empty? ? OpenSSL::Digest::MD5.digest(vulnerability['summary'].to_s).unpack("H*")[0].to_s : vulnerability['id']
127
+ item['id'] = vulnerability['id'].empty? ? OpenSSL::Digest::MD5.digest(vulnerability['summary'].to_s).unpack1('H*').to_s : vulnerability['id']
129
128
  item['title'] = vulnerability['summary'].to_s
130
129
  item['desc'] = format_control_desc(vulnerability)
131
- item['impact'] = impact(vulnerability['severity'].to_s)
130
+ item['impact'] = impact(vulnerability['severity'].to_s)
132
131
  item['code'] = NA_STRING
133
132
  item['results'] = finding(vulnerability)
134
133
 
135
- item['tags']['nist'] = nist_tag( parse_identifiers( vulnerability, 'CWE') )
136
- item['tags']['cweid'] = parse_identifiers( vulnerability, 'CWE')
134
+ item['tags']['nist'] = nist_tag(parse_identifiers(vulnerability, 'CWE'))
135
+ item['tags']['cweid'] = parse_identifiers(vulnerability, 'CWE')
137
136
 
138
137
  controls << item
139
138
  end
140
139
 
141
140
  controls = collapse_duplicates(controls)
142
- results = HeimdallDataFormat.new(profile_name: "JFrog Xray Scan",
141
+ results = HeimdallDataFormat.new(profile_name: 'JFrog Xray Scan',
143
142
  version: NA_STRING,
144
- title: "JFrog Xray Scan",
145
- summary: "Continuous Security and Universal Artifact Analysis",
143
+ title: 'JFrog Xray Scan',
144
+ summary: 'Continuous Security and Universal Artifact Analysis',
146
145
  controls: controls)
147
146
  results.to_hdf
148
147
  end